{
  "cells": [
    {
      "attachments": {},
      "cell_type": "markdown",
      "metadata": {},
      "source": [
        "# `accuracy_analysis_from_labels_table`\n",
        "\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 2,
      "metadata": {
        "tags": [
          "hide_input"
        ]
      },
      "outputs": [
        {
          "data": {
            "text/html": [
              "\n",
              "<style>\n",
              "  #altair-viz-ce4898d725784c8bb6493b3db56fd8fe.vega-embed {\n",
              "    width: 100%;\n",
              "    display: flex;\n",
              "  }\n",
              "\n",
              "  #altair-viz-ce4898d725784c8bb6493b3db56fd8fe.vega-embed details,\n",
              "  #altair-viz-ce4898d725784c8bb6493b3db56fd8fe.vega-embed details summary {\n",
              "    position: relative;\n",
              "  }\n",
              "</style>\n",
              "<div id=\"altair-viz-ce4898d725784c8bb6493b3db56fd8fe\"></div>\n",
              "<script type=\"text/javascript\">\n",
              "  var VEGA_DEBUG = (typeof VEGA_DEBUG == \"undefined\") ? {} : VEGA_DEBUG;\n",
              "  (function(spec, embedOpt){\n",
              "    let outputDiv = document.currentScript.previousElementSibling;\n",
              "    if (outputDiv.id !== \"altair-viz-ce4898d725784c8bb6493b3db56fd8fe\") {\n",
              "      outputDiv = document.getElementById(\"altair-viz-ce4898d725784c8bb6493b3db56fd8fe\");\n",
              "    }\n",
              "    const paths = {\n",
              "      \"vega\": \"https://cdn.jsdelivr.net/npm/vega@5?noext\",\n",
              "      \"vega-lib\": \"https://cdn.jsdelivr.net/npm/vega-lib?noext\",\n",
              "      \"vega-lite\": \"https://cdn.jsdelivr.net/npm/vega-lite@5.17.0?noext\",\n",
              "      \"vega-embed\": \"https://cdn.jsdelivr.net/npm/vega-embed@6?noext\",\n",
              "    };\n",
              "\n",
              "    function maybeLoadScript(lib, version) {\n",
              "      var key = `${lib.replace(\"-\", \"\")}_version`;\n",
              "      return (VEGA_DEBUG[key] == version) ?\n",
              "        Promise.resolve(paths[lib]) :\n",
              "        new Promise(function(resolve, reject) {\n",
              "          var s = document.createElement('script');\n",
              "          document.getElementsByTagName(\"head\")[0].appendChild(s);\n",
              "          s.async = true;\n",
              "          s.onload = () => {\n",
              "            VEGA_DEBUG[key] = version;\n",
              "            return resolve(paths[lib]);\n",
              "          };\n",
              "          s.onerror = () => reject(`Error loading script: ${paths[lib]}`);\n",
              "          s.src = paths[lib];\n",
              "        });\n",
              "    }\n",
              "\n",
              "    function showError(err) {\n",
              "      outputDiv.innerHTML = `<div class=\"error\" style=\"color:red;\">${err}</div>`;\n",
              "      throw err;\n",
              "    }\n",
              "\n",
              "    function displayChart(vegaEmbed) {\n",
              "      vegaEmbed(outputDiv, spec, embedOpt)\n",
              "        .catch(err => showError(`Javascript Error: ${err.message}<br>This usually means there's a typo in your chart specification. See the javascript console for the full traceback.`));\n",
              "    }\n",
              "\n",
              "    if(typeof define === \"function\" && define.amd) {\n",
              "      requirejs.config({paths});\n",
              "      require([\"vega-embed\"], displayChart, err => showError(`Error loading script: ${err.message}`));\n",
              "    } else {\n",
              "      maybeLoadScript(\"vega\", \"5\")\n",
              "        .then(() => maybeLoadScript(\"vega-lite\", \"5.17.0\"))\n",
              "        .then(() => maybeLoadScript(\"vega-embed\", \"6\"))\n",
              "        .catch(showError)\n",
              "        .then(() => displayChart(vegaEmbed));\n",
              "    }\n",
              "  })({\"config\": {\"view\": {\"continuousWidth\": 300, \"continuousHeight\": 300}}, \"layer\": [{\"layer\": [{\"mark\": \"point\", \"encoding\": {\"opacity\": {\"condition\": {\"param\": \"point_selection\", \"value\": 1, \"empty\": false}, \"value\": 0}, \"tooltip\": [{\"field\": \"truth_threshold\", \"format\": \".3f\", \"title\": \"Match weight\", \"type\": \"quantitative\"}, {\"field\": \"match_probability\", \"format\": \".3%\", \"title\": \"Match probability\", \"type\": \"quantitative\"}, {\"field\": \"tp\", \"format\": \",.0f\", \"title\": \"TP\", \"type\": \"quantitative\"}, {\"field\": \"tn\", \"format\": \",.0f\", \"title\": \"TN\", \"type\": \"quantitative\"}, {\"field\": \"fp\", \"format\": \",.0f\", \"title\": \"FP\", \"type\": \"quantitative\"}, {\"field\": \"fn\", \"format\": \",.0f\", \"title\": \"FN\", \"type\": \"quantitative\"}, {\"field\": \"precision\", \"format\": \".4f\", \"title\": \"Precision\", \"type\": \"quantitative\"}, {\"field\": \"recall\", \"format\": \".4f\", \"title\": \"Recall (TPR)\", \"type\": \"quantitative\"}, {\"field\": \"fp_rate\", \"format\": \".4f\", \"title\": \"FPR\", \"type\": \"quantitative\"}]}, \"params\": [{\"name\": \"metric\", \"select\": {\"type\": \"point\", \"fields\": [\"metric\"]}, \"bind\": \"legend\", \"value\": [{\"metric\": \"precision\"}, {\"metric\": \"recall\"}]}, {\"name\": \"point_selection\", \"select\": {\"type\": \"point\", \"encodings\": [\"x\"], \"fields\": [\"truth_threshold\"], \"nearest\": true, \"on\": \"mouseover\"}}], \"transform\": [{\"filter\": {\"param\": \"metric\", \"empty\": true}}]}, {\"mark\": \"line\", \"encoding\": {\"opacity\": {\"condition\": {\"param\": \"metric\", \"value\": 1}, \"value\": 0.1}}}], \"encoding\": {\"color\": {\"field\": \"metric\", \"type\": \"nominal\", \"sort\": [\"precision\", \"recall\", \"f1\"], \"title\": \"Metric\", \"legend\": {\"labelExpr\": \"{'precision': 'Precision (PPV)', 'recall': 'Recall (TPR)', 'specificity': 'Specificity (TNR)', 'accuracy': 'Accuracy', 'npv': 'NPV', 'f1': 'F1', 'f2': 'F2', 'f0_5': 'F0.5', 'p4': 'P4', 'phi': '\\u03c6 (MCC)'}[datum.value]\"}}, \"x\": {\"type\": \"quantitative\", \"field\": \"truth_threshold\", \"axis\": {\"labelFontSize\": 12, \"title\": \"Match weight threshold\", \"titleFontSize\": 16, \"titlePadding\": 10}}, \"y\": {\"field\": \"value\", \"type\": \"quantitative\", \"axis\": {\"labelFontSize\": 12, \"title\": \"Score\", \"titleFontSize\": 18, \"titlePadding\": 10}}}}, {\"layer\": [{\"mark\": {\"type\": \"rule\", \"color\": \"gray\"}, \"encoding\": {\"x\": {\"field\": \"truth_threshold\", \"type\": \"quantitative\"}}}, {\"layer\": [{\"mark\": {\"type\": \"text\", \"align\": \"right\", \"baseline\": \"middle\", \"fontSize\": 14, \"x2\": 200, \"xOffset\": -10, \"y\": 100, \"y2\": 200}, \"transform\": [{\"filter\": {\"param\": \"metric\", \"empty\": true}}]}], \"encoding\": {\"color\": {\"field\": \"metric\", \"sort\": [\"precision\", \"recall\", \"f1\"]}, \"text\": {\"field\": \"y_text\"}, \"y\": {\"field\": \"score_index\", \"type\": \"quantitative\"}}}, {\"mark\": {\"type\": \"text\", \"color\": \"black\", \"fontSize\": 14, \"xOffset\": 25, \"y\": \"height\", \"yOffset\": -20}, \"encoding\": {\"text\": {\"condition\": {\"param\": \"point_selection\", \"aggregate\": \"min\", \"empty\": false, \"field\": \"truth_threshold\", \"format\": \"+.2f\", \"type\": \"nominal\"}, \"value\": \" \"}, \"x\": {\"field\": \"truth_threshold\", \"type\": \"quantitative\"}}}], \"transform\": [{\"filter\": {\"param\": \"point_selection\", \"empty\": false}}]}], \"data\": {\"name\": \"data-88481be63707845e366cd370703ce8b4\"}, \"height\": 400, \"title\": {\"text\": \"Link Quality Evaluation\", \"fontSize\": 20, \"subtitle\": [\"Click a legend value to show a specific evaluation metric\", \"Shift + Click to show multiple metrics\"]}, \"transform\": [{\"fold\": [\"precision\", \"recall\", \"f1\"], \"as\": [\"metric\", \"value\"]}, {\"calculate\": \"0.275 - 0.05*indexof(['precision', 'recall', 'f1'], datum.metric)\", \"as\": \"score_index\"}, {\"calculate\": \"{'precision': 'Precision (PPV)', 'recall': 'Recall (TPR)', 'specificity': 'Specificity (TNR)', 'accuracy': 'Accuracy', 'npv': 'NPV', 'f1': 'F1', 'f2': 'F2', 'f0_5': 'F0.5', 'p4': 'P4', 'phi': '\\u03c6 (MCC)'}[datum.metric]\", \"as\": \"metric_text\"}, {\"calculate\": \"datum.metric_text + ' = ' + format(datum.value, ',.3f')\", \"as\": \"y_text\"}], \"width\": 400, \"$schema\": \"https://vega.github.io/schema/vega-lite/v5.14.1.json\", \"datasets\": {\"data-88481be63707845e366cd370703ce8b4\": [{\"truth_threshold\": -23.800000354647636, \"match_probability\": 6.846773588489456e-08, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1055.0, \"fp\": 90.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9213973799126638, \"fp_rate\": 0.07860262008733625, \"fn_rate\": 0.2880354505169867, \"precision\": 0.94140625, \"recall\": 0.7119645494830132, \"specificity\": 0.9213973799126638, \"npv\": 0.6432926829268293, \"accuracy\": 0.7874685138539043, \"f1\": 0.8107653490328006, \"f2\": 0.7484472049689441, \"f0_5\": 0.8844036697247707, \"p4\": 0.7832976799979975, \"phi\": 0.6085442007563051}, {\"truth_threshold\": -22.70000033825636, \"match_probability\": 1.467637948991862e-07, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1077.0, \"fp\": 68.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9406113537117904, \"fp_rate\": 0.059388646288209605, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9550858652575958, \"recall\": 0.7119645494830132, \"specificity\": 0.9406113537117904, \"npv\": 0.648014440433213, \"accuracy\": 0.7943954659949622, \"f1\": 0.8157968970380818, \"f2\": 0.750155633948952, \"f0_5\": 0.8940274514653147, \"p4\": 0.7908413564901972, \"phi\": 0.6273505612520337}, {\"truth_threshold\": -21.700000323355198, \"match_probability\": 2.9352754975091214e-07, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1083.0, \"fp\": 62.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9458515283842794, \"fp_rate\": 0.05414847161572053, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9588859416445623, \"recall\": 0.7119645494830132, \"specificity\": 0.9458515283842794, \"npv\": 0.6492805755395683, \"accuracy\": 0.7962846347607053, \"f1\": 0.817179994348686, \"f2\": 0.7506229235880398, \"f0_5\": 0.896688577452561, \"p4\": 0.792886883910619, \"phi\": 0.6325043185815227}, {\"truth_threshold\": -21.600000321865082, \"match_probability\": 3.1459503204353755e-07, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1088.0, \"fp\": 57.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9502183406113537, \"fp_rate\": 0.04978165938864629, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9620758483033932, \"recall\": 0.7119645494830132, \"specificity\": 0.9502183406113537, \"npv\": 0.6503287507471608, \"accuracy\": 0.7978589420654912, \"f1\": 0.8183361629881154, \"f2\": 0.7510127765659084, \"f0_5\": 0.8989183140619172, \"p4\": 0.7945877557823284, \"phi\": 0.6368075433805553}, {\"truth_threshold\": -20.60000030696392, \"match_probability\": 6.29189872645777e-07, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1094.0, \"fp\": 51.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9554585152838428, \"fp_rate\": 0.0445414847161572, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9659318637274549, \"recall\": 0.7119645494830132, \"specificity\": 0.9554585152838428, \"npv\": 0.6515783204288267, \"accuracy\": 0.7997481108312342, \"f1\": 0.8197278911564626, \"f2\": 0.75148113501715, \"f0_5\": 0.9016086793864572, \"p4\": 0.7966244062798371, \"phi\": 0.6419817284271657}, {\"truth_threshold\": -19.000000283122063, \"match_probability\": 1.907344620533969e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1096.0, \"fp\": 49.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9572052401746725, \"fp_rate\": 0.04279475982532751, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9672240802675586, \"recall\": 0.7119645494830132, \"specificity\": 0.9572052401746725, \"npv\": 0.6519928613920285, \"accuracy\": 0.8003778337531486, \"f1\": 0.8201928530913216, \"f2\": 0.7516373843434868, \"f0_5\": 0.9025090500561728, \"p4\": 0.7973022397990062, \"phi\": 0.6437089952787838}, {\"truth_threshold\": -17.900000266730785, \"match_probability\": 4.088473825324779e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1097.0, \"fp\": 48.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9580786026200874, \"fp_rate\": 0.04192139737991266, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9678714859437751, \"recall\": 0.7119645494830132, \"specificity\": 0.9580786026200874, \"npv\": 0.6521997621878716, \"accuracy\": 0.8006926952141058, \"f1\": 0.8204255319148936, \"f2\": 0.751715533374922, \"f0_5\": 0.9029599100786811, \"p4\": 0.7976409617025867, \"phi\": 0.6445731096055997}, {\"truth_threshold\": -17.600000262260437, \"match_probability\": 5.03349696795731e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1100.0, \"fp\": 45.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9606986899563319, \"fp_rate\": 0.039301310043668124, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9698189134808853, \"recall\": 0.7119645494830132, \"specificity\": 0.9606986899563319, \"npv\": 0.6528189910979229, \"accuracy\": 0.8016372795969773, \"f1\": 0.8211243611584327, \"f2\": 0.7519500780031201, \"f0_5\": 0.9043151969981238, \"p4\": 0.7986563533248476, \"phi\": 0.6471673893914208}, {\"truth_threshold\": -17.50000026077032, \"match_probability\": 5.394766530610173e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1108.0, \"fp\": 37.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9676855895196507, \"fp_rate\": 0.032314410480349345, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9750505731625084, \"recall\": 0.7119645494830132, \"specificity\": 0.9676855895196507, \"npv\": 0.6544595392793857, \"accuracy\": 0.8041561712846348, \"f1\": 0.8229937393284007, \"f2\": 0.7525762464869367, \"f0_5\": 0.9079492653522542, \"p4\": 0.8013584652743565, \"phi\": 0.6540998665530485}, {\"truth_threshold\": -16.900000251829624, \"match_probability\": 8.176914304005986e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1110.0, \"fp\": 35.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9694323144104804, \"fp_rate\": 0.03056768558951965, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9763673193787981, \"recall\": 0.7119645494830132, \"specificity\": 0.9694323144104804, \"npv\": 0.6548672566371682, \"accuracy\": 0.8047858942065491, \"f1\": 0.8234624145785877, \"f2\": 0.7527329515877147, \"f0_5\": 0.9088623507228158, \"p4\": 0.8020327397013851, \"phi\": 0.6558363061606292}, {\"truth_threshold\": -16.50000024586916, \"match_probability\": 1.0789474965962542e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1111.0, \"fp\": 34.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9703056768558952, \"fp_rate\": 0.029694323144104803, \"fn_rate\": 0.2880354505169867, \"precision\": 0.977027027027027, \"recall\": 0.7119645494830132, \"specificity\": 0.9703056768558952, \"npv\": 0.6550707547169812, \"accuracy\": 0.8051007556675063, \"f1\": 0.8236969524352037, \"f2\": 0.7528113286130779, \"f0_5\": 0.90931958244246, \"p4\": 0.8023696912661274, \"phi\": 0.6567050301458078}, {\"truth_threshold\": -15.800000235438347, \"match_probability\": 1.7527435818536736e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1120.0, \"fp\": 25.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9781659388646288, \"fp_rate\": 0.021834061135371178, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9830047586675731, \"recall\": 0.7119645494830132, \"specificity\": 0.9781659388646288, \"npv\": 0.656891495601173, \"accuracy\": 0.8079345088161209, \"f1\": 0.8258138206739006, \"f2\": 0.7535174570088587, \"f0_5\": 0.9134554643082754, \"p4\": 0.8053967630362511, \"phi\": 0.6645388735433893}, {\"truth_threshold\": -15.400000229477882, \"match_probability\": 2.312746079632102e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1121.0, \"fp\": 24.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9790393013100437, \"fp_rate\": 0.02096069868995633, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9836734693877551, \"recall\": 0.7119645494830132, \"specificity\": 0.9790393013100437, \"npv\": 0.6570926143024619, \"accuracy\": 0.8082493702770781, \"f1\": 0.8260497000856898, \"f2\": 0.7535959974984365, \"f0_5\": 0.9139173302995829, \"p4\": 0.8057325018854461, \"phi\": 0.6654110243207023}, {\"truth_threshold\": -15.20000022649765, \"match_probability\": 2.6566384864664307e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1126.0, \"fp\": 19.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9834061135371179, \"fp_rate\": 0.016593886462882096, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9870307167235495, \"recall\": 0.7119645494830132, \"specificity\": 0.9834061135371179, \"npv\": 0.6580946814728229, \"accuracy\": 0.809823677581864, \"f1\": 0.8272311212814645, \"f2\": 0.75398894566691, \"f0_5\": 0.9162336839437334, \"p4\": 0.8074094203617235, \"phi\": 0.6697770344487317}, {\"truth_threshold\": -14.300000213086605, \"match_probability\": 4.957348695121048e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1126.0, \"fp\": 19.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9834061135371179, \"fp_rate\": 0.016593886462882096, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9870218579234973, \"recall\": 0.7114721811915312, \"specificity\": 0.9834061135371179, \"npv\": 0.6577102803738317, \"accuracy\": 0.8095088161209067, \"f1\": 0.82689556509299, \"f2\": 0.7535460992907801, \"f0_5\": 0.9160644097882592, \"p4\": 0.8071048961802441, \"phi\": 0.6693357668739984}, {\"truth_threshold\": -13.200000196695328, \"match_probability\": 0.00010625707305470121, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1135.0, \"fp\": 10.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9912663755458515, \"fp_rate\": 0.008733624454148471, \"fn_rate\": 0.28852781880846873, \"precision\": 0.993127147766323, \"recall\": 0.7114721811915312, \"specificity\": 0.9912663755458515, \"npv\": 0.6595002905287624, \"accuracy\": 0.8123425692695214, \"f1\": 0.8290304073436604, \"f2\": 0.7542540975049588, \"f0_5\": 0.9202649344032607, \"p4\": 0.8101156090778194, \"phi\": 0.6772196571827369}, {\"truth_threshold\": -12.800000190734863, \"match_probability\": 0.00014020228918616167, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1137.0, \"fp\": 8.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9930131004366812, \"fp_rate\": 0.0069868995633187774, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9944941500344116, \"recall\": 0.7114721811915312, \"specificity\": 0.9930131004366812, \"npv\": 0.6598955310504934, \"accuracy\": 0.8129722921914357, \"f1\": 0.8295063145809415, \"f2\": 0.7544116111517176, \"f0_5\": 0.9212036210633686, \"p4\": 0.8107834022242488, \"phi\": 0.6789756245799222}, {\"truth_threshold\": -12.400000184774399, \"match_probability\": 0.00018498974370122882, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1138.0, \"fp\": 7.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.993886462882096, \"fp_rate\": 0.00611353711790393, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9951790633608816, \"recall\": 0.7114721811915312, \"specificity\": 0.993886462882096, \"npv\": 0.660092807424594, \"accuracy\": 0.8132871536523929, \"f1\": 0.8297444731553258, \"f2\": 0.7544903926482874, \"f0_5\": 0.9216736828677127, \"p4\": 0.8111171302195401, \"phi\": 0.6798541595642643}, {\"truth_threshold\": -11.700000174343586, \"match_probability\": 0.0003004820136373637, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1139.0, \"fp\": 6.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.994759825327511, \"fp_rate\": 0.005240174672489083, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9958649207443143, \"recall\": 0.7114721811915312, \"specificity\": 0.994759825327511, \"npv\": 0.6602898550724637, \"accuracy\": 0.8136020151133502, \"f1\": 0.8299827685238369, \"f2\": 0.7545691906005222, \"f0_5\": 0.9221442246330568, \"p4\": 0.8114507463687338, \"phi\": 0.680733063624895}, {\"truth_threshold\": -11.300000168383121, \"match_probability\": 0.00039645033391533577, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1142.0, \"fp\": 3.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9973799126637555, \"fp_rate\": 0.0026200873362445414, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9979281767955801, \"recall\": 0.7114721811915312, \"specificity\": 0.9973799126637555, \"npv\": 0.6608796296296297, \"accuracy\": 0.8145465994962217, \"f1\": 0.8306984765737281, \"f2\": 0.7548056832427915, \"f0_5\": 0.9235587370573949, \"p4\": 0.81245092776752, \"phi\": 0.683372001937977}, {\"truth_threshold\": -10.900000162422657, \"match_probability\": 0.0005230530993675534, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1143.0, \"fp\": 2.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9982532751091703, \"fp_rate\": 0.0017467248908296944, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9986178299930891, \"recall\": 0.7114721811915312, \"specificity\": 0.9982532751091703, \"npv\": 0.6610757663389243, \"accuracy\": 0.8148614609571788, \"f1\": 0.8309373202990225, \"f2\": 0.7548845470692718, \"f0_5\": 0.9240312060365775, \"p4\": 0.8127841005555416, \"phi\": 0.6842523939858662}, {\"truth_threshold\": -10.700000159442425, \"match_probability\": 0.0006007835088396779, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1444.0, \"tn\": 1143.0, \"fp\": 2.0, \"fn\": 587.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7109798129000492, \"tn_rate\": 0.9982532751091703, \"fp_rate\": 0.0017467248908296944, \"fn_rate\": 0.28902018709995075, \"precision\": 0.9986168741355463, \"recall\": 0.7109798129000492, \"specificity\": 0.9982532751091703, \"npv\": 0.6606936416184971, \"accuracy\": 0.8145465994962217, \"f1\": 0.8306010928961749, \"f2\": 0.754440961337513, \"f0_5\": 0.9238643634037108, \"p4\": 0.8124788095466353, \"phi\": 0.6838163737767555}, {\"truth_threshold\": -10.200000151991844, \"match_probability\": 0.0008494248089972806, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1443.0, \"tn\": 1143.0, \"fp\": 2.0, \"fn\": 588.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7104874446085672, \"tn_rate\": 0.9982532751091703, \"fp_rate\": 0.0017467248908296944, \"fn_rate\": 0.2895125553914328, \"precision\": 0.9986159169550173, \"recall\": 0.7104874446085672, \"specificity\": 0.9982532751091703, \"npv\": 0.6603119584055459, \"accuracy\": 0.8142317380352645, \"f1\": 0.830264672036824, \"f2\": 0.7539972828926743, \"f0_5\": 0.9236973498911791, \"p4\": 0.8121735251016357, \"phi\": 0.6833805796370901}, {\"truth_threshold\": -9.100000135600567, \"match_probability\": 0.0018190150448253225, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1443.0, \"tn\": 1144.0, \"fp\": 1.0, \"fn\": 588.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7104874446085672, \"tn_rate\": 0.9991266375545852, \"fp_rate\": 0.0008733624454148472, \"fn_rate\": 0.2895125553914328, \"precision\": 0.9993074792243767, \"recall\": 0.7104874446085672, \"specificity\": 0.9991266375545852, \"npv\": 0.6605080831408776, \"accuracy\": 0.8145465994962217, \"f1\": 0.8305035971223022, \"f2\": 0.7540760869565217, \"f0_5\": 0.9241706161137441, \"p4\": 0.8125064984715595, \"phi\": 0.6842619488798015}, {\"truth_threshold\": -8.600000128149986, \"match_probability\": 0.0025705389597152823, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1440.0, \"tn\": 1144.0, \"fp\": 1.0, \"fn\": 591.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7090103397341211, \"tn_rate\": 0.9991266375545852, \"fp_rate\": 0.0008733624454148472, \"fn_rate\": 0.29098966026587886, \"precision\": 0.9993060374739764, \"recall\": 0.7090103397341211, \"specificity\": 0.9991266375545852, \"npv\": 0.6593659942363113, \"accuracy\": 0.8136020151133502, \"f1\": 0.8294930875576036, \"f2\": 0.7527443805541035, \"f0_5\": 0.9236690186016677, \"p4\": 0.811590547208778, \"phi\": 0.6829568226176045}, {\"truth_threshold\": -8.400000125169754, \"match_probability\": 0.0029516456585356845, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1439.0, \"tn\": 1144.0, \"fp\": 1.0, \"fn\": 592.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7085179714426391, \"tn_rate\": 0.9991266375545852, \"fp_rate\": 0.0008733624454148472, \"fn_rate\": 0.2914820285573609, \"precision\": 0.9993055555555556, \"recall\": 0.7085179714426391, \"specificity\": 0.9991266375545852, \"npv\": 0.6589861751152074, \"accuracy\": 0.8132871536523929, \"f1\": 0.829155862863728, \"f2\": 0.7523002927645337, \"f0_5\": 0.923501476062123, \"p4\": 0.8112852415850365, \"phi\": 0.6825222299358593}, {\"truth_threshold\": -7.600000113248825, \"match_probability\": 0.0051278965144870335, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1439.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 592.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7085179714426391, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2914820285573609, \"precision\": 1.0, \"recall\": 0.7085179714426391, \"specificity\": 1.0, \"npv\": 0.6591824985607369, \"accuracy\": 0.8136020151133502, \"f1\": 0.8293948126801153, \"f2\": 0.7523789605772248, \"f0_5\": 0.9239758571978939, \"p4\": 0.8116179257342173, \"phi\": 0.6834051848579609}, {\"truth_threshold\": -7.400000110268593, \"match_probability\": 0.005885918232687788, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1437.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 594.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.707533234859675, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.29246676514032494, \"precision\": 1.0, \"recall\": 0.707533234859675, \"specificity\": 1.0, \"npv\": 0.6584243818286372, \"accuracy\": 0.8129722921914357, \"f1\": 0.828719723183391, \"f2\": 0.7514904298713524, \"f0_5\": 0.9236405707674509, \"p4\": 0.811007239776182, \"phi\": 0.6825372757481436}, {\"truth_threshold\": -6.800000101327896, \"match_probability\": 0.00889438522932807, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1436.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 595.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.707040866568193, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.292959133431807, \"precision\": 1.0, \"recall\": 0.707040866568193, \"specificity\": 1.0, \"npv\": 0.6580459770114943, \"accuracy\": 0.8126574307304786, \"f1\": 0.8283818863570811, \"f2\": 0.7510460251046025, \"f0_5\": 0.9234726688102894, \"p4\": 0.8107019041426428, \"phi\": 0.6821036562194343}, {\"truth_threshold\": -6.70000009983778, \"match_probability\": 0.009526684411466419, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1428.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 603.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7031019202363368, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2968980797636632, \"precision\": 1.0, \"recall\": 0.7031019202363368, \"specificity\": 1.0, \"npv\": 0.6550343249427918, \"accuracy\": 0.8101385390428212, \"f1\": 0.8256721595836947, \"f2\": 0.7474874371859297, \"f0_5\": 0.9221232080588919, \"p4\": 0.8082593661032169, \"phi\": 0.6786426833673147}, {\"truth_threshold\": -6.400000095367432, \"match_probability\": 0.011702953955477532, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1424.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 607.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7011324470704087, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.29886755292959133, \"precision\": 1.0, \"recall\": 0.7011324470704087, \"specificity\": 1.0, \"npv\": 0.6535388127853882, \"accuracy\": 0.8088790931989924, \"f1\": 0.8243125904486251, \"f2\": 0.7457059069962296, \"f0_5\": 0.9214442862689272, \"p4\": 0.8070381719383619, \"phi\": 0.6769174743376838}, {\"truth_threshold\": -6.3000000938773155, \"match_probability\": 0.012532388771145032, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1423.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 608.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7006400787789266, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.29935992122107336, \"precision\": 1.0, \"recall\": 0.7006400787789266, \"specificity\": 1.0, \"npv\": 0.6531660011409013, \"accuracy\": 0.8085642317380353, \"f1\": 0.8239722061378112, \"f2\": 0.74526029119095, \"f0_5\": 0.9212741162760585, \"p4\": 0.8067328787708493, \"phi\": 0.6764867171608602}, {\"truth_threshold\": -5.700000084936619, \"match_probability\": 0.01887356650421064, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1422.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 609.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7001477104874446, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2998522895125554, \"precision\": 1.0, \"recall\": 0.7001477104874446, \"specificity\": 1.0, \"npv\": 0.6527936145952109, \"accuracy\": 0.8082493702770781, \"f1\": 0.8236316246741964, \"f2\": 0.7448145820238844, \"f0_5\": 0.9211037699183832, \"p4\": 0.8064275873033679, \"phi\": 0.676056177162564}, {\"truth_threshold\": -5.600000083446503, \"match_probability\": 0.02020082327925431, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1405.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 626.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6917774495322502, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3082225504677499, \"precision\": 1.0, \"recall\": 0.6917774495322502, \"specificity\": 1.0, \"npv\": 0.6465273856578204, \"accuracy\": 0.802896725440806, \"f1\": 0.8178114086146682, \"f2\": 0.737223213348725, \"f0_5\": 0.9181806299830088, \"p4\": 0.8012376730750075, \"phi\": 0.6687698153349331}, {\"truth_threshold\": -5.300000078976154, \"match_probability\": 0.024754544222716376, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1393.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 638.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6858690300344658, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.31413096996553425, \"precision\": 1.0, \"recall\": 0.6858690300344658, \"specificity\": 1.0, \"npv\": 0.6421761076836792, \"accuracy\": 0.7991183879093199, \"f1\": 0.8136682242990654, \"f2\": 0.7318482715141326, \"f0_5\": 0.9160857556227805, \"p4\": 0.7975738525329583, \"phi\": 0.6636630953189379}, {\"truth_threshold\": -4.90000007301569, \"match_probability\": 0.032407497325934585, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1391.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 640.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6848842934515017, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3151157065484983, \"precision\": 1.0, \"recall\": 0.6848842934515017, \"specificity\": 1.0, \"npv\": 0.6414565826330533, \"accuracy\": 0.7984886649874056, \"f1\": 0.8129748684979544, \"f2\": 0.7309511297950604, \"f0_5\": 0.9157340355497038, \"p4\": 0.7969631540364932, \"phi\": 0.6628148598035907}, {\"truth_threshold\": -4.800000071525574, \"match_probability\": 0.03465289308554322, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1390.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 641.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6843919251600197, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3156080748399803, \"precision\": 1.0, \"recall\": 0.6843919251600197, \"specificity\": 1.0, \"npv\": 0.641097424412094, \"accuracy\": 0.7981738035264484, \"f1\": 0.8126278865828706, \"f2\": 0.7305024174900148, \"f0_5\": 0.9155578975102094, \"p4\": 0.7966577964206586, \"phi\": 0.6623910480286727}, {\"truth_threshold\": -4.500000067055225, \"match_probability\": 0.04232371044088178, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1389.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 642.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6838995568685377, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.31610044313146235, \"precision\": 1.0, \"recall\": 0.6838995568685377, \"specificity\": 1.0, \"npv\": 0.6407386681589256, \"accuracy\": 0.7978589420654912, \"f1\": 0.8122807017543859, \"f2\": 0.7300536108483129, \"f0_5\": 0.9153815737445631, \"p4\": 0.7963524329131265, \"phi\": 0.6619674396995868}, {\"truth_threshold\": -4.300000064074993, \"match_probability\": 0.048313119674570026, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1388.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 643.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6834071885770556, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3165928114229444, \"precision\": 1.0, \"recall\": 0.6834071885770556, \"specificity\": 1.0, \"npv\": 0.6403803131991052, \"accuracy\": 0.797544080604534, \"f1\": 0.8119333138344546, \"f2\": 0.7296047098402019, \"f0_5\": 0.9152050639588554, \"p4\": 0.7960470632785187, \"phi\": 0.6615440344100268}, {\"truth_threshold\": -4.200000062584877, \"match_probability\": 0.05160178526561565, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1382.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 649.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6804529788281635, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.31954702117183653, \"precision\": 1.0, \"recall\": 0.6804529788281635, \"specificity\": 1.0, \"npv\": 0.6382385730211817, \"accuracy\": 0.7956549118387909, \"f1\": 0.8098447113975974, \"f2\": 0.7269093204292026, \"f0_5\": 0.9141420822860167, \"p4\": 0.7942147035798486, \"phi\": 0.6590078438192518}, {\"truth_threshold\": -3.8000000566244125, \"match_probability\": 0.06698457743861425, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1379.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 652.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6789758739537174, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3210241260462826, \"precision\": 1.0, \"recall\": 0.6789758739537174, \"specificity\": 1.0, \"npv\": 0.6371730662214803, \"accuracy\": 0.7947103274559194, \"f1\": 0.8087976539589443, \"f2\": 0.7255603493633589, \"f0_5\": 0.9136080561812641, \"p4\": 0.7932984218488349, \"phi\": 0.6577424568153551}, {\"truth_threshold\": -3.6000000536441803, \"match_probability\": 0.0761862214703254, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1377.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 654.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6779911373707533, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.32200886262924666, \"precision\": 1.0, \"recall\": 0.6779911373707533, \"specificity\": 1.0, \"npv\": 0.6364647026125625, \"accuracy\": 0.7940806045340051, \"f1\": 0.8080985915492958, \"f2\": 0.7246605620461004, \"f0_5\": 0.9132510943095902, \"p4\": 0.7926875252637993, \"phi\": 0.6568998611817706}, {\"truth_threshold\": -3.300000049173832, \"match_probability\": 0.09217307161544283, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1375.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 656.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6770064007877893, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3229935992122107, \"precision\": 1.0, \"recall\": 0.6770064007877893, \"specificity\": 1.0, \"npv\": 0.6357579122709606, \"accuracy\": 0.7934508816120907, \"f1\": 0.8073987081620669, \"f2\": 0.7237603958311402, \"f0_5\": 0.9128933740539105, \"p4\": 0.7920765927688658, \"phi\": 0.6560580583751121}, {\"truth_threshold\": -3.200000047683716, \"match_probability\": 0.09813940308831819, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1374.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 657.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6765140324963073, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.32348596750369274, \"precision\": 1.0, \"recall\": 0.6765140324963073, \"specificity\": 1.0, \"npv\": 0.6354051054384018, \"accuracy\": 0.7931360201511335, \"f1\": 0.8070484581497798, \"f2\": 0.7233101705622236, \"f0_5\": 0.912714228776405, \"p4\": 0.7917711124557005, \"phi\": 0.6556374532841107}, {\"truth_threshold\": -3.1000000461935997, \"match_probability\": 0.10444750015659417, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1370.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 661.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6745445593303792, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3254554406696209, \"precision\": 1.0, \"recall\": 0.6745445593303792, \"specificity\": 1.0, \"npv\": 0.6339977851605758, \"accuracy\": 0.7918765743073047, \"f1\": 0.8056453984122317, \"f2\": 0.7215083210448704, \"f0_5\": 0.9119957395819465, \"p4\": 0.7905490918185237, \"phi\": 0.6539569990508374}, {\"truth_threshold\": -2.7000000402331352, \"match_probability\": 0.13336855415354743, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1363.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 668.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.671097981290005, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3289020187099951, \"precision\": 1.0, \"recall\": 0.671097981290005, \"specificity\": 1.0, \"npv\": 0.631549917264203, \"accuracy\": 0.7896725440806045, \"f1\": 0.803182086034178, \"f2\": 0.7183514282702645, \"f0_5\": 0.9107309902445543, \"p4\": 0.7884101358393227, \"phi\": 0.6510237127477586}, {\"truth_threshold\": -2.600000038743019, \"match_probability\": 0.1415855743659812, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1356.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 675.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6676514032496307, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.33234859675036926, \"precision\": 1.0, \"recall\": 0.6676514032496307, \"specificity\": 1.0, \"npv\": 0.6291208791208791, \"accuracy\": 0.7874685138539043, \"f1\": 0.8007085916740478, \"f2\": 0.7151898734177216, \"f0_5\": 0.9094567404426559, \"p4\": 0.7862705739141664, \"phi\": 0.6480998671182523}, {\"truth_threshold\": -2.500000037252903, \"match_probability\": 0.15022110152606716, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1355.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 676.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6671590349581487, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3328409650418513, \"precision\": 1.0, \"recall\": 0.6671590349581487, \"specificity\": 1.0, \"npv\": 0.628775398132894, \"accuracy\": 0.7871536523929471, \"f1\": 0.8003544004725339, \"f2\": 0.7147378415444667, \"f0_5\": 0.9092739229633606, \"p4\": 0.7859648678428045, \"phi\": 0.6476829377278417}, {\"truth_threshold\": -2.400000035762787, \"match_probability\": 0.1592855907727143, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1351.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 680.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6651895617922206, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3348104382077794, \"precision\": 1.0, \"recall\": 0.6651895617922206, \"specificity\": 1.0, \"npv\": 0.6273972602739726, \"accuracy\": 0.7858942065491183, \"f1\": 0.7989355410999409, \"f2\": 0.7129287598944591, \"f0_5\": 0.9085406859448554, \"p4\": 0.7847418977635621, \"phi\": 0.6460171117170842}, {\"truth_threshold\": -2.3000000342726707, \"match_probability\": 0.16878839957195682, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1346.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 685.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6627277203348104, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3372722796651896, \"precision\": 1.0, \"recall\": 0.6627277203348104, \"specificity\": 1.0, \"npv\": 0.6256830601092896, \"accuracy\": 0.7843198992443325, \"f1\": 0.7971572401539828, \"f2\": 0.7106652587117213, \"f0_5\": 0.9076196898179366, \"p4\": 0.7832128384966869, \"phi\": 0.6439390561833764}, {\"truth_threshold\": -2.2000000327825546, \"match_probability\": 0.1787376058900962, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1344.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 687.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6617429837518464, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.33825701624815363, \"precision\": 1.0, \"recall\": 0.6617429837518464, \"specificity\": 1.0, \"npv\": 0.625, \"accuracy\": 0.7836901763224181, \"f1\": 0.7964444444444444, \"f2\": 0.7097591888466414, \"f0_5\": 0.907249898744431, \"p4\": 0.7826011005327976, \"phi\": 0.6431091391396206}, {\"truth_threshold\": -2.1000000312924385, \"match_probability\": 0.18913982061899084, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1340.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 691.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6597735105859183, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34022648941408173, \"precision\": 1.0, \"recall\": 0.6597735105859183, \"specificity\": 1.0, \"npv\": 0.6236383442265795, \"accuracy\": 0.7824307304785895, \"f1\": 0.7950163156333432, \"f2\": 0.7079459002535926, \"f0_5\": 0.9065079150317954, \"p4\": 0.7813774176935412, \"phi\": 0.6414515256091918}, {\"truth_threshold\": -2.0000000298023224, \"match_probability\": 0.19999999669481672, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1338.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 693.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6587887740029542, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3412112259970458, \"precision\": 1.0, \"recall\": 0.6587887740029542, \"specificity\": 1.0, \"npv\": 0.6229597388465724, \"accuracy\": 0.781801007556675, \"f1\": 0.7943009795191451, \"f2\": 0.7070386810399493, \"f0_5\": 0.9061357171881349, \"p4\": 0.7807654687830268, \"phi\": 0.6406238230099892}, {\"truth_threshold\": -1.700000025331974, \"match_probability\": 0.2353489599091234, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1335.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 696.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6573116691285081, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34268833087149186, \"precision\": 1.0, \"recall\": 0.6573116691285081, \"specificity\": 1.0, \"npv\": 0.6219445953286258, \"accuracy\": 0.7808564231738035, \"f1\": 0.7932263814616756, \"f2\": 0.7056771328893118, \"f0_5\": 0.9055759055759056, \"p4\": 0.7798474053553527, \"phi\": 0.6393836407517114}, {\"truth_threshold\": -1.600000023841858, \"match_probability\": 0.24805074388621665, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1333.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 698.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6563269325455441, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3436730674544559, \"precision\": 1.0, \"recall\": 0.6563269325455441, \"specificity\": 1.0, \"npv\": 0.6212696690179056, \"accuracy\": 0.7802267002518891, \"f1\": 0.7925089179548157, \"f2\": 0.7047689542138099, \"f0_5\": 0.9052016840961564, \"p4\": 0.7792352667284765, \"phi\": 0.6385577625791636}, {\"truth_threshold\": -1.4000000208616257, \"match_probability\": 0.2747995717943022, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1332.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 699.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6558345642540621, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34416543574593794, \"precision\": 1.0, \"recall\": 0.6558345642540621, \"specificity\": 1.0, \"npv\": 0.6209327548806941, \"accuracy\": 0.779911838790932, \"f1\": 0.792149866190901, \"f2\": 0.7043147208121827, \"f0_5\": 0.9050142682429678, \"p4\": 0.778929167747172, \"phi\": 0.6381450953570468}, {\"truth_threshold\": -1.3000000193715096, \"match_probability\": 0.2888262766358852, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1327.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 704.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6533727227966519, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3466272772033481, \"precision\": 1.0, \"recall\": 0.6533727227966519, \"specificity\": 1.0, \"npv\": 0.6192536506219578, \"accuracy\": 0.7783375314861462, \"f1\": 0.7903513996426444, \"f2\": 0.7020421119458259, \"f0_5\": 0.904074124540128, \"p4\": 0.7773983659139201, \"phi\": 0.6360844627945531}, {\"truth_threshold\": -1.2000000178813934, \"match_probability\": 0.3032695424040186, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1322.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 709.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6509108813392418, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34908911866075826, \"precision\": 1.0, \"recall\": 0.6509108813392418, \"specificity\": 1.0, \"npv\": 0.6175836030204962, \"accuracy\": 0.7767632241813602, \"f1\": 0.7885475693408888, \"f2\": 0.6997670971839932, \"f0_5\": 0.9031288427380789, \"p4\": 0.775867028931697, \"phi\": 0.6340283016890773}, {\"truth_threshold\": -1.1000000163912773, \"match_probability\": 0.318111997717226, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1320.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 711.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6499261447562777, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3500738552437223, \"precision\": 1.0, \"recall\": 0.6499261447562777, \"specificity\": 1.0, \"npv\": 0.6169181034482759, \"accuracy\": 0.7761335012594458, \"f1\": 0.7878245299910475, \"f2\": 0.6988564167725541, \"f0_5\": 0.9027492819039803, \"p4\": 0.7752543370502095, \"phi\": 0.6332070787700438}, {\"truth_threshold\": -1.0000000149011612, \"match_probability\": 0.33333333103806717, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1319.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 712.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6494337764647957, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.35056622353520434, \"precision\": 1.0, \"recall\": 0.6494337764647957, \"specificity\": 1.0, \"npv\": 0.6165858912224017, \"accuracy\": 0.7758186397984886, \"f1\": 0.7874626865671642, \"f2\": 0.6984009319072328, \"f0_5\": 0.9025591898179828, \"p4\": 0.7749479564070448, \"phi\": 0.6327967318590355}, {\"truth_threshold\": -0.9000000134110451, \"match_probability\": 0.34891031813411577, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1316.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 715.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6479566715903495, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3520433284096504, \"precision\": 1.0, \"recall\": 0.6479566715903495, \"specificity\": 1.0, \"npv\": 0.6155913978494624, \"accuracy\": 0.7748740554156172, \"f1\": 0.7863758589781894, \"f2\": 0.6970338983050848, \"f0_5\": 0.9019876627827279, \"p4\": 0.774028672532505, \"phi\": 0.6315667448577293}, {\"truth_threshold\": -0.7000000104308128, \"match_probability\": 0.38102425962470177, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1314.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 717.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6469719350073855, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.35302806499261447, \"precision\": 1.0, \"recall\": 0.6469719350073855, \"specificity\": 1.0, \"npv\": 0.6149301825993555, \"accuracy\": 0.7742443324937027, \"f1\": 0.7856502242152467, \"f2\": 0.6961220597584233, \"f0_5\": 0.901605599011939, \"p4\": 0.7734156957074743, \"phi\": 0.6307476279232052}, {\"truth_threshold\": -0.6000000089406967, \"match_probability\": 0.3975010577814427, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1309.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 722.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6445100935499753, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3554899064500246, \"precision\": 1.0, \"recall\": 0.6445100935499753, \"specificity\": 1.0, \"npv\": 0.6132833422603107, \"accuracy\": 0.7726700251889169, \"f1\": 0.7838323353293413, \"f2\": 0.6938407717587194, \"f0_5\": 0.9006467593229668, \"p4\": 0.7718828151071816, \"phi\": 0.6287028744111437}, {\"truth_threshold\": -0.5000000074505806, \"match_probability\": 0.41421356112001384, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1306.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 725.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6430329886755293, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.35696701132447073, \"precision\": 1.0, \"recall\": 0.6430329886755293, \"specificity\": 1.0, \"npv\": 0.6122994652406417, \"accuracy\": 0.7717254408060453, \"f1\": 0.7827389871141744, \"f2\": 0.6924708377518558, \"f0_5\": 0.9000689179875948, \"p4\": 0.7709627754494935, \"phi\": 0.627478091329186}, {\"truth_threshold\": -0.4000000059604645, \"match_probability\": 0.4311259267559445, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1300.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 731.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6400787789266371, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3599212210733629, \"precision\": 1.0, \"recall\": 0.6400787789266371, \"specificity\": 1.0, \"npv\": 0.6103411513859275, \"accuracy\": 0.7698362720403022, \"f1\": 0.7805463824677275, \"f2\": 0.6897283531409168, \"f0_5\": 0.8989074816761167, \"p4\": 0.7691219621523272, \"phi\": 0.6250331342479231}, {\"truth_threshold\": -0.30000000447034836, \"match_probability\": 0.4482004805735527, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1297.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 734.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.638601674052191, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.36139832594780896, \"precision\": 1.0, \"recall\": 0.638601674052191, \"specificity\": 1.0, \"npv\": 0.6093666844065992, \"accuracy\": 0.7688916876574308, \"f1\": 0.7794471153846154, \"f2\": 0.6883558008703959, \"f0_5\": 0.8983238675716858, \"p4\": 0.7682011740290052, \"phi\": 0.6238129405308033}, {\"truth_threshold\": -0.20000000298023224, \"match_probability\": 0.4653980381052749, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1284.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 747.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6322008862629247, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.36779911373707536, \"precision\": 1.0, \"recall\": 0.6322008862629247, \"specificity\": 1.0, \"npv\": 0.6051797040169133, \"accuracy\": 0.7647984886649875, \"f1\": 0.7746606334841629, \"f2\": 0.6823979591836735, \"f0_5\": 0.895772289660946, \"p4\": 0.7642079467115986, \"phi\": 0.6185427594175095}, {\"truth_threshold\": 0.0, \"match_probability\": 0.5, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1283.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 748.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6317085179714427, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3682914820285574, \"precision\": 1.0, \"recall\": 0.6317085179714427, \"specificity\": 1.0, \"npv\": 0.6048600105652404, \"accuracy\": 0.7644836272040302, \"f1\": 0.7742908871454436, \"f2\": 0.6819389816094398, \"f0_5\": 0.8955744799664945, \"p4\": 0.7639005528137026, \"phi\": 0.6181385126768588}, {\"truth_threshold\": 0.10000000149011612, \"match_probability\": 0.5173217450900928, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1279.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 752.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6297390448055146, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3702609551944855, \"precision\": 1.0, \"recall\": 0.6297390448055146, \"specificity\": 1.0, \"npv\": 0.6035846072746441, \"accuracy\": 0.7632241813602015, \"f1\": 0.7728096676737161, \"f2\": 0.6801020950760396, \"f0_5\": 0.8947810270043375, \"p4\": 0.7626706427097226, \"phi\": 0.6165231496419628}, {\"truth_threshold\": 0.20000000298023224, \"match_probability\": 0.5346019618947252, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1272.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 759.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6262924667651403, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.37370753323485967, \"precision\": 1.0, \"recall\": 0.6262924667651403, \"specificity\": 1.0, \"npv\": 0.6013655462184874, \"accuracy\": 0.7610201511335013, \"f1\": 0.7702089009990918, \"f2\": 0.6768837803320562, \"f0_5\": 0.8933839022334598, \"p4\": 0.7605169691954441, \"phi\": 0.6137024615957984}, {\"truth_threshold\": 0.30000000447034836, \"match_probability\": 0.5517995194264473, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1271.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 760.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6258000984736583, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3741999015263417, \"precision\": 1.0, \"recall\": 0.6258000984736583, \"specificity\": 1.0, \"npv\": 0.6010498687664042, \"accuracy\": 0.760705289672544, \"f1\": 0.7698364627498486, \"f2\": 0.6764236295902075, \"f0_5\": 0.893183415319747, \"p4\": 0.7602091587940459, \"phi\": 0.6133001443515198}, {\"truth_threshold\": 0.5000000074505806, \"match_probability\": 0.5857864388799862, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1268.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 763.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6243229935992122, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.37567700640078777, \"precision\": 1.0, \"recall\": 0.6243229935992122, \"specificity\": 1.0, \"npv\": 0.600104821802935, \"accuracy\": 0.7597607052896725, \"f1\": 0.7687177932706881, \"f2\": 0.6750425894378195, \"f0_5\": 0.892580599746586, \"p4\": 0.7592855072439135, \"phi\": 0.6120941421230318}, {\"truth_threshold\": 0.6000000089406967, \"match_probability\": 0.6024989422185573, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1266.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 765.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6233382570162481, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3766617429837518, \"precision\": 1.0, \"recall\": 0.6233382570162481, \"specificity\": 1.0, \"npv\": 0.599476439790576, \"accuracy\": 0.7591309823677582, \"f1\": 0.7679708826205641, \"f2\": 0.6741214057507987, \"f0_5\": 0.8921775898520085, \"p4\": 0.7586695530830421, \"phi\": 0.6112909283650163}, {\"truth_threshold\": 0.7000000104308128, \"match_probability\": 0.6189757403752982, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1255.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 776.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6179222058099458, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.38207779419005417, \"precision\": 1.0, \"recall\": 0.6179222058099458, \"specificity\": 1.0, \"npv\": 0.5960437272254034, \"accuracy\": 0.7556675062972292, \"f1\": 0.7638466220328667, \"f2\": 0.6690478729075594, \"f0_5\": 0.8899446886966388, \"p4\": 0.7552790297360157, \"phi\": 0.6068843832941353}, {\"truth_threshold\": 0.800000011920929, \"match_probability\": 0.6351831076021942, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1249.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 782.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6149679960610537, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3850320039389463, \"precision\": 1.0, \"recall\": 0.6149679960610537, \"specificity\": 1.0, \"npv\": 0.5941878567721848, \"accuracy\": 0.7537783375314862, \"f1\": 0.7615853658536585, \"f2\": 0.6662754721007148, \"f0_5\": 0.8887149565959869, \"p4\": 0.7534275803790252, \"phi\": 0.6044886397303119}, {\"truth_threshold\": 0.9000000134110451, \"match_probability\": 0.6510896818658842, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1246.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 785.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6134908911866076, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3865091088133924, \"precision\": 1.0, \"recall\": 0.6134908911866076, \"specificity\": 1.0, \"npv\": 0.5932642487046632, \"accuracy\": 0.7528337531486146, \"f1\": 0.7604516325907843, \"f2\": 0.6648879402347919, \"f0_5\": 0.8880969351389879, \"p4\": 0.7525012807216611, \"phi\": 0.6032928083832734}, {\"truth_threshold\": 1.0000000149011612, \"match_probability\": 0.6666666689619328, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1241.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 790.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6110290497291975, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3889709502708026, \"precision\": 1.0, \"recall\": 0.6110290497291975, \"specificity\": 1.0, \"npv\": 0.5917312661498708, \"accuracy\": 0.7512594458438288, \"f1\": 0.758557457212714, \"f2\": 0.6625734116390817, \"f0_5\": 0.8870621872766261, \"p4\": 0.7509565686139796, \"phi\": 0.6013027467512604}, {\"truth_threshold\": 1.1000000163912773, \"match_probability\": 0.681888002282774, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1238.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 793.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6095519448547514, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.39044805514524866, \"precision\": 1.0, \"recall\": 0.6095519448547514, \"specificity\": 1.0, \"npv\": 0.5908152734778122, \"accuracy\": 0.7503148614609572, \"f1\": 0.757418170694402, \"f2\": 0.6611835077974791, \"f0_5\": 0.8864384934841758, \"p4\": 0.7500292006663175, \"phi\": 0.6001104889920623}, {\"truth_threshold\": 1.2000000178813934, \"match_probability\": 0.6967304575959814, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1232.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 799.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6065977351058592, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3934022648941408, \"precision\": 1.0, \"recall\": 0.6065977351058592, \"specificity\": 1.0, \"npv\": 0.5889917695473251, \"accuracy\": 0.7484256926952141, \"f1\": 0.7551333129022372, \"f2\": 0.6584010260795211, \"f0_5\": 0.8851846529673804, \"p4\": 0.748173210499427, \"phi\": 0.5977299335012423}, {\"truth_threshold\": 1.3000000193715096, \"match_probability\": 0.7111737233641148, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1217.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 814.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5992122107336287, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.40078778926637126, \"precision\": 1.0, \"recall\": 0.5992122107336287, \"specificity\": 1.0, \"npv\": 0.5844818785094436, \"accuracy\": 0.7437027707808564, \"f1\": 0.749384236453202, \"f2\": 0.6514291831709667, \"f0_5\": 0.8820118857805479, \"p4\": 0.7435255360881731, \"phi\": 0.5918012154054669}, {\"truth_threshold\": 1.5000000223517418, \"match_probability\": 0.7387961280260511, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1216.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 815.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5987198424421467, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4012801575578533, \"precision\": 1.0, \"recall\": 0.5987198424421467, \"specificity\": 1.0, \"npv\": 0.5841836734693877, \"accuracy\": 0.7433879093198993, \"f1\": 0.7489990760702187, \"f2\": 0.6509635974304069, \"f0_5\": 0.8817984046410442, \"p4\": 0.7432152820546354, \"phi\": 0.5914070991600171}, {\"truth_threshold\": 1.600000023841858, \"match_probability\": 0.7519492561137834, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1213.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 818.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5972427375677006, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.40275726243229937, \"precision\": 1.0, \"recall\": 0.5972427375677006, \"specificity\": 1.0, \"npv\": 0.5832908813041263, \"accuracy\": 0.7424433249370277, \"f1\": 0.747842170160296, \"f2\": 0.6495662418335654, \"f0_5\": 0.8811564724684005, \"p4\": 0.742284201345094, \"phi\": 0.590225586321326}, {\"truth_threshold\": 1.700000025331974, \"match_probability\": 0.7646510400908766, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1200.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 831.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5908419497784343, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4091580502215657, \"precision\": 1.0, \"recall\": 0.5908419497784343, \"specificity\": 1.0, \"npv\": 0.5794534412955465, \"accuracy\": 0.7383501259445844, \"f1\": 0.7428040854224698, \"f2\": 0.6435006435006435, \"f0_5\": 0.8783487044356609, \"p4\": 0.7382438098538624, \"phi\": 0.5851199886013844}, {\"truth_threshold\": 1.8000000268220901, \"match_probability\": 0.7768953900182098, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1195.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 836.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5883801083210242, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4116198916789759, \"precision\": 1.0, \"recall\": 0.5883801083210242, \"specificity\": 1.0, \"npv\": 0.5779909136799596, \"accuracy\": 0.7367758186397985, \"f1\": 0.7408555486670799, \"f2\": 0.6411632149372251, \"f0_5\": 0.8772573777712523, \"p4\": 0.7366872427429624, \"phi\": 0.5831623756721471}, {\"truth_threshold\": 1.9000000283122063, \"match_probability\": 0.7886787621992872, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1190.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 841.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.585918266863614, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.414081733136386, \"precision\": 1.0, \"recall\": 0.585918266863614, \"specificity\": 1.0, \"npv\": 0.5765357502517623, \"accuracy\": 0.7352015113350125, \"f1\": 0.7389009624340267, \"f2\": 0.6388232767876315, \"f0_5\": 0.8761596230304816, \"p4\": 0.7351291857832581, \"phi\": 0.5812080759697219}, {\"truth_threshold\": 2.0000000298023224, \"match_probability\": 0.8000000033051833, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1189.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 842.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.585425898572132, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.41457410142786805, \"precision\": 1.0, \"recall\": 0.585425898572132, \"specificity\": 1.0, \"npv\": 0.5762455963764469, \"accuracy\": 0.7348866498740554, \"f1\": 0.7385093167701864, \"f2\": 0.6383549876516698, \"f0_5\": 0.8759392957123914, \"p4\": 0.7348173920177556, \"phi\": 0.5808176099748659}, {\"truth_threshold\": 2.1000000312924385, \"match_probability\": 0.8108601793810092, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1183.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 848.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5824716888232397, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4175283111767602, \"precision\": 1.0, \"recall\": 0.5824716888232397, \"specificity\": 1.0, \"npv\": 0.57451078775715, \"accuracy\": 0.7329974811083123, \"f1\": 0.7361543248288737, \"f2\": 0.6355431395723649, \"f0_5\": 0.8746118586426143, \"p4\": 0.732945325152551, \"phi\": 0.578477543896111}, {\"truth_threshold\": 2.2000000327825546, \"match_probability\": 0.8212623941099038, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1179.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 852.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5805022156573116, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4194977843426883, \"precision\": 1.0, \"recall\": 0.5805022156573116, \"specificity\": 1.0, \"npv\": 0.5733600400600901, \"accuracy\": 0.7317380352644837, \"f1\": 0.7345794392523365, \"f2\": 0.6336665591744598, \"f0_5\": 0.8737216540684749, \"p4\": 0.7316960140750485, \"phi\": 0.5769200755947459}, {\"truth_threshold\": 2.3000000342726707, \"match_probability\": 0.8312116004280432, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1164.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 867.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5731166912850812, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.42688330871491875, \"precision\": 1.0, \"recall\": 0.5731166912850812, \"specificity\": 1.0, \"npv\": 0.5690854870775348, \"accuracy\": 0.7270151133501259, \"f1\": 0.7286384976525822, \"f2\": 0.6266149870801033, \"f0_5\": 0.8703454463885151, \"p4\": 0.7270016744799519, \"phi\": 0.571097532311457}, {\"truth_threshold\": 2.400000035762787, \"match_probability\": 0.8407144092272857, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1151.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 880.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5667159034958149, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.43328409650418515, \"precision\": 1.0, \"recall\": 0.5667159034958149, \"specificity\": 1.0, \"npv\": 0.5654320987654321, \"accuracy\": 0.7229219143576826, \"f1\": 0.7234443746071653, \"f2\": 0.6204851752021563, \"f0_5\": 0.8673700075357951, \"p4\": 0.7229205464573797, \"phi\": 0.5660736371863528}, {\"truth_threshold\": 2.500000037252903, \"match_probability\": 0.8497788984739328, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1148.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 883.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5652387986213688, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4347612013786312, \"precision\": 1.0, \"recall\": 0.5652387986213688, \"specificity\": 1.0, \"npv\": 0.564595660749507, \"accuracy\": 0.7219773299748111, \"f1\": 0.7222396980182447, \"f2\": 0.6190681622088007, \"f0_5\": 0.8666767325985203, \"p4\": 0.7219769863862414, \"phi\": 0.5649171381617742}, {\"truth_threshold\": 2.600000038743019, \"match_probability\": 0.8584144256340188, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1147.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 884.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5647464303298868, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.43525356967011325, \"precision\": 1.0, \"recall\": 0.5647464303298868, \"specificity\": 1.0, \"npv\": 0.5643173977328734, \"accuracy\": 0.7216624685138538, \"f1\": 0.7218376337319069, \"f2\": 0.6185956207528853, \"f0_5\": 0.8664450823387219, \"p4\": 0.7216623155682867, \"phi\": 0.5645318732743893}, {\"truth_threshold\": 2.7000000402331352, \"match_probability\": 0.8666314458464526, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1144.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 887.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5632693254554406, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4367306745445593, \"precision\": 1.0, \"recall\": 0.5632693254554406, \"specificity\": 1.0, \"npv\": 0.5634842519685039, \"accuracy\": 0.7207178841309824, \"f1\": 0.7206299212598425, \"f2\": 0.6171773845489857, \"f0_5\": 0.8657484486151051, \"p4\": 0.7207178457145617, \"phi\": 0.5633767784627467}, {\"truth_threshold\": 2.8000000417232513, \"match_probability\": 0.8744413378412453, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1136.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 895.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5593303791235844, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.44066962087641554, \"precision\": 1.0, \"recall\": 0.5593303791235844, \"specificity\": 1.0, \"npv\": 0.5612745098039216, \"accuracy\": 0.718198992443325, \"f1\": 0.7173981686138301, \"f2\": 0.6133909287257019, \"f0_5\": 0.8638783269961977, \"p4\": 0.7181958416012424, \"phi\": 0.560301601247963}, {\"truth_threshold\": 2.9000000432133675, \"match_probability\": 0.8818562391739494, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1135.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 896.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5588380108321024, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.44116198916789756, \"precision\": 1.0, \"recall\": 0.5588380108321024, \"specificity\": 1.0, \"npv\": 0.560999510044096, \"accuracy\": 0.7178841309823678, \"f1\": 0.7169930511686671, \"f2\": 0.612917161680527, \"f0_5\": 0.8636432810835489, \"p4\": 0.717880234989325, \"phi\": 0.5599177174110734}, {\"truth_threshold\": 3.1000000461935997, \"match_probability\": 0.8955524998434058, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1109.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 922.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5460364352535697, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.45396356474643035, \"precision\": 1.0, \"recall\": 0.5460364352535697, \"specificity\": 1.0, \"npv\": 0.5539429124334785, \"accuracy\": 0.7096977329974811, \"f1\": 0.7063694267515923, \"f2\": 0.6005631972273368, \"f0_5\": 0.8574300293799288, \"p4\": 0.7096451676361052, \"phi\": 0.5499754660338557}, {\"truth_threshold\": 3.200000047683716, \"match_probability\": 0.9018605969116819, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1105.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 926.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5440669620876416, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.45593303791235845, \"precision\": 1.0, \"recall\": 0.5440669620876416, \"specificity\": 1.0, \"npv\": 0.5528730082085949, \"accuracy\": 0.7084382871536524, \"f1\": 0.704719387755102, \"f2\": 0.5986564091450861, \"f0_5\": 0.8564563633545187, \"p4\": 0.7083729914338502, \"phi\": 0.5484523115060287}, {\"truth_threshold\": 3.300000049173832, \"match_probability\": 0.9078269283845571, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1103.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 928.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5430822255046776, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4569177744953225, \"precision\": 1.0, \"recall\": 0.5430822255046776, \"specificity\": 1.0, \"npv\": 0.5523396044380126, \"accuracy\": 0.707808564231738, \"f1\": 0.7038927887683472, \"f2\": 0.597702395144684, \"f0_5\": 0.8559677169020643, \"p4\": 0.7077363543593591, \"phi\": 0.5476913561601727}, {\"truth_threshold\": 3.400000050663948, \"match_probability\": 0.9134653434169965, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1101.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 930.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5420974889217134, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.45790251107828656, \"precision\": 1.0, \"recall\": 0.5420974889217134, \"specificity\": 1.0, \"npv\": 0.5518072289156627, \"accuracy\": 0.7071788413098237, \"f1\": 0.7030651340996169, \"f2\": 0.5967479674796748, \"f0_5\": 0.8554778554778555, \"p4\": 0.7070993471091068, \"phi\": 0.5469308120448416}, {\"truth_threshold\": 3.500000052154064, \"match_probability\": 0.9187896995557598, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1094.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 937.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5386509108813392, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.46134908911866074, \"precision\": 1.0, \"recall\": 0.5386509108813392, \"specificity\": 1.0, \"npv\": 0.5499519692603266, \"accuracy\": 0.7049748110831234, \"f1\": 0.70016, \"f2\": 0.5934042091559991, \"f0_5\": 0.8537537068830966, \"p4\": 0.7048668608108392, \"phi\": 0.544272109503198}, {\"truth_threshold\": 3.6000000536441803, \"match_probability\": 0.9238137785296746, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1093.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 938.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5381585425898572, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.46184145741014276, \"precision\": 1.0, \"recall\": 0.5381585425898572, \"specificity\": 1.0, \"npv\": 0.5496879500720115, \"accuracy\": 0.7046599496221663, \"f1\": 0.6997439180537772, \"f2\": 0.5929261147878919, \"f0_5\": 0.8535061689832891, \"p4\": 0.7045475528853299, \"phi\": 0.5438926972206558}, {\"truth_threshold\": 3.7000000551342964, \"match_probability\": 0.9285512128432143, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1089.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 942.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5361890694239291, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4638109305760709, \"precision\": 1.0, \"recall\": 0.5361890694239291, \"specificity\": 1.0, \"npv\": 0.5486344034499281, \"accuracy\": 0.7034005037783375, \"f1\": 0.698076923076923, \"f2\": 0.5910126994464344, \"f0_5\": 0.8525129168623767, \"p4\": 0.7032693518148777, \"phi\": 0.5423760413585481}, {\"truth_threshold\": 3.8000000566244125, \"match_probability\": 0.9330154225613858, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1084.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 947.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.533727227966519, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.46627277203348105, \"precision\": 1.0, \"recall\": 0.533727227966519, \"specificity\": 1.0, \"npv\": 0.5473231357552581, \"accuracy\": 0.7018261964735516, \"f1\": 0.6959871589085073, \"f2\": 0.5886185925282363, \"f0_5\": 0.8512643317103816, \"p4\": 0.701669388939399, \"phi\": 0.5404824326919393}, {\"truth_threshold\": 3.9000000581145287, \"match_probability\": 0.9372195616099515, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1074.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 957.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5288035450516987, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4711964549483013, \"precision\": 1.0, \"recall\": 0.5288035450516987, \"specificity\": 1.0, \"npv\": 0.5447193149381542, \"accuracy\": 0.6986775818639799, \"f1\": 0.6917874396135266, \"f2\": 0.58382257012394, \"f0_5\": 0.8487434803224277, \"p4\": 0.698461896288862, \"phi\": 0.5367024359898404}, {\"truth_threshold\": 4.000000059604645, \"match_probability\": 0.9411764728755594, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1067.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 964.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5253569670113245, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.47464303298867555, \"precision\": 1.0, \"recall\": 0.5253569670113245, \"specificity\": 1.0, \"npv\": 0.5429113323850165, \"accuracy\": 0.6964735516372796, \"f1\": 0.6888315041962556, \"f2\": 0.5804591448155805, \"f0_5\": 0.8469598348944277, \"p4\": 0.6962104664501566, \"phi\": 0.5340620291107292}, {\"truth_threshold\": 4.200000062584877, \"match_probability\": 0.9483982147343843, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1030.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1001.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5071393402264894, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.49286065977351057, \"precision\": 1.0, \"recall\": 0.5071393402264894, \"specificity\": 1.0, \"npv\": 0.5335507921714818, \"accuracy\": 0.684823677581864, \"f1\": 0.6729826853969291, \"f2\": 0.5625955866287962, \"f0_5\": 0.8372622337831247, \"p4\": 0.6842191143036372, \"phi\": 0.5201774665622936}, {\"truth_threshold\": 4.300000064074993, \"match_probability\": 0.9516868803254299, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1025.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1006.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5046774987690793, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.49532250123092075, \"precision\": 1.0, \"recall\": 0.5046774987690793, \"specificity\": 1.0, \"npv\": 0.5323105532310554, \"accuracy\": 0.6832493702770781, \"f1\": 0.6708115183246073, \"f2\": 0.5601705104382992, \"f0_5\": 0.8359158375468928, \"p4\": 0.6825861647803277, \"phi\": 0.518309905918297}, {\"truth_threshold\": 4.400000065565109, \"match_probability\": 0.9547759482410569, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1020.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1011.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5022156573116692, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4977843426883309, \"precision\": 1.0, \"recall\": 0.5022156573116692, \"specificity\": 1.0, \"npv\": 0.5310760667903525, \"accuracy\": 0.6816750629722922, \"f1\": 0.6686332350049164, \"f2\": 0.5577427821522309, \"f0_5\": 0.8345606283750614, \"p4\": 0.6809500591436524, \"phi\": 0.5164443009324556}, {\"truth_threshold\": 4.500000067055225, \"match_probability\": 0.9576762895591182, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1014.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1017.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.49926144756277696, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5007385524372231, \"precision\": 1.0, \"recall\": 0.49926144756277696, \"specificity\": 1.0, \"npv\": 0.5296022201665125, \"accuracy\": 0.6797858942065491, \"f1\": 0.6660098522167488, \"f2\": 0.5548260013131976, \"f0_5\": 0.8329226219812715, \"p4\": 0.6789824824414316, \"phi\": 0.5142081009404593}, {\"truth_threshold\": 4.6000000685453415, \"match_probability\": 0.9603983391922627, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1010.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1021.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.49729197439684886, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5027080256031512, \"precision\": 1.0, \"recall\": 0.49729197439684886, \"specificity\": 1.0, \"npv\": 0.528624192059095, \"accuracy\": 0.6785264483627204, \"f1\": 0.6642551792173627, \"f2\": 0.5528793518721261, \"f0_5\": 0.8318234228298468, \"p4\": 0.6776681406756905, \"phi\": 0.5127188003018871}, {\"truth_threshold\": 4.700000070035458, \"match_probability\": 0.9629520927573305, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1005.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1026.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4948301329394387, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5051698670605613, \"precision\": 1.0, \"recall\": 0.4948301329394387, \"specificity\": 1.0, \"npv\": 0.5274067250115154, \"accuracy\": 0.6769521410579346, \"f1\": 0.6620553359683794, \"f2\": 0.5504436411436083, \"f0_5\": 0.8304412493802678, \"p4\": 0.6760222065562214, \"phi\": 0.5108588257538497}, {\"truth_threshold\": 4.800000071525574, \"match_probability\": 0.9653471069144568, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 998.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1033.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4913835548990645, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5086164451009355, \"precision\": 1.0, \"recall\": 0.4913835548990645, \"specificity\": 1.0, \"npv\": 0.5257116620752984, \"accuracy\": 0.6747481108312342, \"f1\": 0.6589633542423242, \"f2\": 0.5470291602718702, \"f0_5\": 0.8284907853229287, \"p4\": 0.6737121749549234, \"phi\": 0.508257872897662}, {\"truth_threshold\": 4.90000007301569, \"match_probability\": 0.9675925026740654, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 995.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1036.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4899064500246184, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5100935499753816, \"precision\": 1.0, \"recall\": 0.4899064500246184, \"specificity\": 1.0, \"npv\": 0.5249885373681797, \"accuracy\": 0.6738035264483627, \"f1\": 0.6576338400528751, \"f2\": 0.5455642066016011, \"f0_5\": 0.8276493095990684, \"p4\": 0.6727200795968197, \"phi\": 0.5071442306145872}, {\"truth_threshold\": 5.000000074505806, \"match_probability\": 0.969696971214501, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 986.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1045.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4854751354012802, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5145248645987198, \"precision\": 1.0, \"recall\": 0.4854751354012802, \"specificity\": 1.0, \"npv\": 0.5228310502283106, \"accuracy\": 0.6709697732997482, \"f1\": 0.6536294332117998, \"f2\": 0.5411635565312843, \"f0_5\": 0.8251046025104602, \"p4\": 0.6697361249633867, \"phi\": 0.5038069817912239}, {\"truth_threshold\": 5.100000075995922, \"match_probability\": 0.9716687817966767, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 979.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1052.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.48202855736090594, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.517971442639094, \"precision\": 1.0, \"recall\": 0.48202855736090594, \"specificity\": 1.0, \"npv\": 0.5211652253072372, \"accuracy\": 0.6687657430730478, \"f1\": 0.6504983388704318, \"f2\": 0.5377348126991102, \"f0_5\": 0.8231040860938288, \"p4\": 0.6674071352914174, \"phi\": 0.5012150453662769}, {\"truth_threshold\": 5.200000077486038, \"match_probability\": 0.9735157914041783, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 974.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1057.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4795667159034958, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5204332840965041, \"precision\": 1.0, \"recall\": 0.4795667159034958, \"specificity\": 1.0, \"npv\": 0.5199818346957311, \"accuracy\": 0.667191435768262, \"f1\": 0.648252911813644, \"f2\": 0.5352824796658606, \"f0_5\": 0.8216635734773072, \"p4\": 0.6657391023001616, \"phi\": 0.4993655783036174}, {\"truth_threshold\": 5.300000078976154, \"match_probability\": 0.9752454557772836, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 965.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1066.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4751354012801576, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5248645987198425, \"precision\": 1.0, \"recall\": 0.4751354012801576, \"specificity\": 1.0, \"npv\": 0.5178652193577566, \"accuracy\": 0.6643576826196473, \"f1\": 0.644192256341789, \"f2\": 0.5308614809109913, \"f0_5\": 0.8190460023765065, \"p4\": 0.6627270219044649, \"phi\": 0.4960404205390772}, {\"truth_threshold\": 5.4000000804662704, \"match_probability\": 0.9768648415470134, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 961.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1070.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4731659281142294, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5268340718857706, \"precision\": 1.0, \"recall\": 0.4731659281142294, \"specificity\": 1.0, \"npv\": 0.5169300225733634, \"accuracy\": 0.6630982367758187, \"f1\": 0.642379679144385, \"f2\": 0.5288937809576224, \"f0_5\": 0.8178723404255319, \"p4\": 0.661384263989902, \"phi\": 0.49456412516582227}, {\"truth_threshold\": 5.500000081956387, \"match_probability\": 0.9783806392104205, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 946.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1085.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.465780403741999, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.534219596258001, \"precision\": 1.0, \"recall\": 0.465780403741999, \"specificity\": 1.0, \"npv\": 0.5134529147982063, \"accuracy\": 0.658375314861461, \"f1\": 0.6355391333557272, \"f2\": 0.5214994487320838, \"f0_5\": 0.8134135855546002, \"p4\": 0.656325954359785, \"phi\": 0.48903609882831217}, {\"truth_threshold\": 5.600000083446503, \"match_probability\": 0.9797991767207457, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 927.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1104.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4564254062038405, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5435745937961596, \"precision\": 1.0, \"recall\": 0.4564254062038405, \"specificity\": 1.0, \"npv\": 0.509115162294353, \"accuracy\": 0.6523929471032746, \"f1\": 0.6267748478701826, \"f2\": 0.5120981107059993, \"f0_5\": 0.8076319916361735, \"p4\": 0.64986435329492, \"phi\": 0.482050925478558}, {\"truth_threshold\": 5.700000084936619, \"match_probability\": 0.9811264334957893, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 926.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1105.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.45593303791235845, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5440669620876416, \"precision\": 1.0, \"recall\": 0.45593303791235845, \"specificity\": 1.0, \"npv\": 0.5088888888888888, \"accuracy\": 0.6520780856423174, \"f1\": 0.6263104497801826, \"f2\": 0.5116022099447514, \"f0_5\": 0.8073234524847428, \"p4\": 0.6495225157687339, \"phi\": 0.48168377289561637}, {\"truth_threshold\": 5.800000086426735, \"match_probability\": 0.9823680546749124, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 924.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1107.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4549483013293944, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5450516986706057, \"precision\": 1.0, \"recall\": 0.4549483013293944, \"specificity\": 1.0, \"npv\": 0.5084369449378331, \"accuracy\": 0.6514483627204031, \"f1\": 0.6253807106598985, \"f2\": 0.5106100795755968, \"f0_5\": 0.8067050811943426, \"p4\": 0.6488383014404575, \"phi\": 0.4809496069575002}, {\"truth_threshold\": 5.900000087916851, \"match_probability\": 0.9835293654795508, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 917.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1114.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.45150172328902016, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5484982767109798, \"precision\": 1.0, \"recall\": 0.45150172328902016, \"specificity\": 1.0, \"npv\": 0.5068614431164232, \"accuracy\": 0.6492443324937027, \"f1\": 0.6221166892808684, \"f2\": 0.507134166574494, \"f0_5\": 0.8045271100193017, \"p4\": 0.6464378241531998, \"phi\": 0.4783814534822862}, {\"truth_threshold\": 6.000000089406967, \"match_probability\": 0.9846153855541349, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 909.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1122.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.44756277695716395, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.552437223042836, \"precision\": 1.0, \"recall\": 0.44756277695716395, \"specificity\": 1.0, \"npv\": 0.5050727834142038, \"accuracy\": 0.6467254408060453, \"f1\": 0.6183673469387755, \"f2\": 0.503155097974095, \"f0_5\": 0.8020116463737427, \"p4\": 0.6436833004319238, \"phi\": 0.4754490272472384}, {\"truth_threshold\": 6.100000090897083, \"match_probability\": 0.985630843183972, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 900.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1131.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4431314623338257, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5568685376661743, \"precision\": 1.0, \"recall\": 0.4431314623338257, \"specificity\": 1.0, \"npv\": 0.5030755711775043, \"accuracy\": 0.6438916876574308, \"f1\": 0.6141248720573184, \"f2\": 0.49867021276595747, \"f0_5\": 0.7991475759190197, \"p4\": 0.6405698927488327, \"phi\": 0.4721531674364921}, {\"truth_threshold\": 6.200000092387199, \"match_probability\": 0.9865801893041345, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 894.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1137.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4401772525849335, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5598227474150664, \"precision\": 1.0, \"recall\": 0.4401772525849335, \"specificity\": 1.0, \"npv\": 0.5017528483786152, \"accuracy\": 0.6420025188916877, \"f1\": 0.6112820512820513, \"f2\": 0.49567531603459747, \"f0_5\": 0.7972177635098984, \"p4\": 0.6384854939205781, \"phi\": 0.4699576473215044}, {\"truth_threshold\": 6.3000000938773155, \"match_probability\": 0.987467611228855, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 892.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1139.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.43919251600196946, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5608074839980305, \"precision\": 1.0, \"recall\": 0.43919251600196946, \"specificity\": 1.0, \"npv\": 0.5013134851138353, \"accuracy\": 0.6413727959697733, \"f1\": 0.61033185083818, \"f2\": 0.49467613132209404, \"f0_5\": 0.7965708162171816, \"p4\": 0.6377891010276633, \"phi\": 0.46922609777468816}, {\"truth_threshold\": 6.400000095367432, \"match_probability\": 0.9882970460445225, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 887.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1144.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4367306745445593, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5632693254554406, \"precision\": 1.0, \"recall\": 0.4367306745445593, \"specificity\": 1.0, \"npv\": 0.5002184359982526, \"accuracy\": 0.6397984886649875, \"f1\": 0.6079506511309116, \"f2\": 0.4921762290533792, \"f0_5\": 0.7949453307044273, \"p4\": 0.6360445825846852, \"phi\": 0.46739783372748034}, {\"truth_threshold\": 6.500000096857548, \"match_probability\": 0.9890721936212699, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 884.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1147.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.43525356967011325, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5647464303298868, \"precision\": 1.0, \"recall\": 0.43525356967011325, \"specificity\": 1.0, \"npv\": 0.49956369982547993, \"accuracy\": 0.6388539042821159, \"f1\": 0.6065180102915952, \"f2\": 0.49067495559502666, \"f0_5\": 0.793964433267469, \"p4\": 0.63499541952982, \"phi\": 0.4663012798895679}, {\"truth_threshold\": 6.600000098347664, \"match_probability\": 0.9897965292084853, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 876.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1155.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.43131462333825704, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.568685376661743, \"precision\": 1.0, \"recall\": 0.43131462333825704, \"specificity\": 1.0, \"npv\": 0.49782608695652175, \"accuracy\": 0.6363350125944585, \"f1\": 0.6026831785345718, \"f2\": 0.4866666666666667, \"f0_5\": 0.7913279132791328, \"p4\": 0.6321884993464733, \"phi\": 0.46337853983930954}, {\"truth_threshold\": 6.70000009983778, \"match_probability\": 0.9904733155885336, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 866.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1165.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4263909404234367, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5736090595765633, \"precision\": 1.0, \"recall\": 0.4263909404234367, \"specificity\": 1.0, \"npv\": 0.49567099567099565, \"accuracy\": 0.6331863979848866, \"f1\": 0.597859855022437, \"f2\": 0.48164627363737483, \"f0_5\": 0.7879890809827116, \"p4\": 0.628660687467854, \"phi\": 0.45972776942966703}, {\"truth_threshold\": 6.800000101327896, \"match_probability\": 0.9911056147706719, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 856.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1175.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.42146725750861647, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5785327424913835, \"precision\": 1.0, \"recall\": 0.42146725750861647, \"specificity\": 1.0, \"npv\": 0.49353448275862066, \"accuracy\": 0.6300377833753149, \"f1\": 0.5930031174229303, \"f2\": 0.4766146993318486, \"f0_5\": 0.7846012832263978, \"p4\": 0.6251109156992851, \"phi\": 0.45607962565127746}, {\"truth_threshold\": 6.900000102818012, \"match_probability\": 0.9916962992137202, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 851.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1180.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4190054160512063, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5809945839487937, \"precision\": 1.0, \"recall\": 0.4190054160512063, \"specificity\": 1.0, \"npv\": 0.4924731182795699, \"accuracy\": 0.628463476070529, \"f1\": 0.5905621096460791, \"f2\": 0.4740947075208914, \"f0_5\": 0.7828886844526219, \"p4\": 0.6233275653061159, \"phi\": 0.45425642958439905}, {\"truth_threshold\": 7.000000104308128, \"match_probability\": 0.9922480625716311, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 847.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1184.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4170359428852782, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5829640571147218, \"precision\": 1.0, \"recall\": 0.4170359428852782, \"specificity\": 1.0, \"npv\": 0.49162730785744957, \"accuracy\": 0.6272040302267002, \"f1\": 0.5886031966643502, \"f2\": 0.4720766915616988, \"f0_5\": 0.78150950359845, \"p4\": 0.621896736471326, \"phi\": 0.4527982529565263}, {\"truth_threshold\": 7.1000001057982445, \"match_probability\": 0.9927634299608046, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 834.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1197.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.41063515509601184, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5893648449039882, \"precision\": 1.0, \"recall\": 0.41063515509601184, \"specificity\": 1.0, \"npv\": 0.4888983774551665, \"accuracy\": 0.6231108312342569, \"f1\": 0.5821989528795811, \"f2\": 0.4655056932350971, \"f0_5\": 0.7769703745108999, \"p4\": 0.6172204525656357, \"phi\": 0.44806122466967707}, {\"truth_threshold\": 7.200000107288361, \"match_probability\": 0.9932447677519157, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 831.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1200.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4091580502215657, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5908419497784343, \"precision\": 1.0, \"recall\": 0.4091580502215657, \"specificity\": 1.0, \"npv\": 0.488272921108742, \"accuracy\": 0.6221662468513854, \"f1\": 0.5807127882599581, \"f2\": 0.4639865996649916, \"f0_5\": 0.7759103641456583, \"p4\": 0.6161355181490583, \"phi\": 0.44696845120974843}, {\"truth_threshold\": 7.300000108778477, \"match_probability\": 0.9936942928922654, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 829.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1202.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.40817331363860165, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5918266863613983, \"precision\": 1.0, \"recall\": 0.40817331363860165, \"specificity\": 1.0, \"npv\": 0.4878568385172561, \"accuracy\": 0.621536523929471, \"f1\": 0.5797202797202797, \"f2\": 0.4629733050374176, \"f0_5\": 0.7752010473162521, \"p4\": 0.6154109979823547, \"phi\": 0.44624000533215374}, {\"truth_threshold\": 7.400000110268593, \"match_probability\": 0.9941140817673122, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 827.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1204.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4071885770556376, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5928114229443624, \"precision\": 1.0, \"recall\": 0.4071885770556376, \"specificity\": 1.0, \"npv\": 0.4874414644529587, \"accuracy\": 0.6209068010075567, \"f1\": 0.5787263820853744, \"f2\": 0.46195955759133056, \"f0_5\": 0.7744896047949054, \"p4\": 0.6146854858236389, \"phi\": 0.4455116118672065}, {\"truth_threshold\": 7.500000111758709, \"match_probability\": 0.9945060786121668, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 817.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1214.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.40226489414081734, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5977351058591827, \"precision\": 1.0, \"recall\": 0.40226489414081734, \"specificity\": 1.0, \"npv\": 0.4853751589656634, \"accuracy\": 0.6177581863979849, \"f1\": 0.5737359550561798, \"f2\": 0.4568840174477128, \"f0_5\": 0.7709001698433666, \"p4\": 0.611042815748838, \"phi\": 0.4418703281958464}, {\"truth_threshold\": 7.600000113248825, \"match_probability\": 0.9948721034855129, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 805.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1226.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.396356474643033, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.603643525356967, \"precision\": 1.0, \"recall\": 0.396356474643033, \"specificity\": 1.0, \"npv\": 0.48291859974694223, \"accuracy\": 0.6139798488664987, \"f1\": 0.5677009873060649, \"f2\": 0.45077836263859333, \"f0_5\": 0.7665206627309084, \"p4\": 0.6066374884822956, \"phi\": 0.43750190140758005}, {\"truth_threshold\": 7.700000114738941, \"match_probability\": 0.9952138598197071, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 803.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1228.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3953717380600689, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.604628261939931, \"precision\": 1.0, \"recall\": 0.3953717380600689, \"specificity\": 1.0, \"npv\": 0.48251158870627897, \"accuracy\": 0.6133501259445844, \"f1\": 0.5666901905434015, \"f2\": 0.4497591576117397, \"f0_5\": 0.7657829486934961, \"p4\": 0.6058995526108901, \"phi\": 0.436773906570581}, {\"truth_threshold\": 7.800000116229057, \"match_probability\": 0.9955329415617687, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 796.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1235.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.39192516001969474, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6080748399803053, \"precision\": 1.0, \"recall\": 0.39192516001969474, \"specificity\": 1.0, \"npv\": 0.4810924369747899, \"accuracy\": 0.6111460957178841, \"f1\": 0.5631411390166254, \"f2\": 0.4461883408071749, \"f0_5\": 0.763183125599233, \"p4\": 0.6033082263812478, \"phi\": 0.4342260129766634}, {\"truth_threshold\": 7.900000117719173, \"match_probability\": 0.99583083992065, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 794.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1237.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3909404234367307, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6090595765632694, \"precision\": 1.0, \"recall\": 0.3909404234367307, \"specificity\": 1.0, \"npv\": 0.480688497061293, \"accuracy\": 0.6105163727959698, \"f1\": 0.5621238938053097, \"f2\": 0.44516707782013903, \"f0_5\": 0.7624351834069522, \"p4\": 0.6025653750424809, \"phi\": 0.43349805603059816}, {\"truth_threshold\": 8.00000011920929, \"match_probability\": 0.9961089497366072, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 785.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1246.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3865091088133924, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6134908911866076, \"precision\": 1.0, \"recall\": 0.3865091088133924, \"specificity\": 1.0, \"npv\": 0.47887913007109995, \"accuracy\": 0.6076826196473551, \"f1\": 0.5575284090909091, \"f2\": 0.4405657200583679, \"f0_5\": 0.7590408044865596, \"p4\": 0.5992086772176372, \"phi\": 0.4302222051371983}, {\"truth_threshold\": 8.100000120699406, \"match_probability\": 0.9963685754887298, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 772.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1259.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.38010832102412606, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6198916789758739, \"precision\": 1.0, \"recall\": 0.38010832102412606, \"specificity\": 1.0, \"npv\": 0.4762895174708819, \"accuracy\": 0.6035894206549118, \"f1\": 0.5508383874420264, \"f2\": 0.43390287769784175, \"f0_5\": 0.7540535260793124, \"p4\": 0.5943189937980553, \"phi\": 0.4254898457157915}, {\"truth_threshold\": 8.200000122189522, \"match_probability\": 0.9966109369567457, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 765.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1266.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3766617429837518, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6233382570162481, \"precision\": 1.0, \"recall\": 0.3766617429837518, \"specificity\": 1.0, \"npv\": 0.47490667772708417, \"accuracy\": 0.6013853904282116, \"f1\": 0.5472103004291845, \"f2\": 0.43030712116098546, \"f0_5\": 0.7513258691809075, \"p4\": 0.591665315716949, \"phi\": 0.42294110344976693}, {\"truth_threshold\": 8.300000123679638, \"match_probability\": 0.9968371745531442, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 753.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1278.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3707533234859675, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6292466765140325, \"precision\": 1.0, \"recall\": 0.3707533234859675, \"specificity\": 1.0, \"npv\": 0.4725546842756913, \"accuracy\": 0.5976070528967254, \"f1\": 0.540948275862069, \"f2\": 0.42412977357215276, \"f0_5\": 0.7465794170136824, \"p4\": 0.5870811305491775, \"phi\": 0.41857044774335733}, {\"truth_threshold\": 8.400000125169754, \"match_probability\": 0.9970483543414643, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 749.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1282.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3687838503200394, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6312161496799606, \"precision\": 1.0, \"recall\": 0.3687838503200394, \"specificity\": 1.0, \"npv\": 0.4717758549649773, \"accuracy\": 0.5963476070528967, \"f1\": 0.5388489208633094, \"f2\": 0.4220669446635862, \"f0_5\": 0.7449771235329222, \"p4\": 0.5855429896383528, \"phi\": 0.41711307373662215}, {\"truth_threshold\": 8.50000012665987, \"match_probability\": 0.997245472756309, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 745.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1286.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.36681437715411125, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6331856228458888, \"precision\": 1.0, \"recall\": 0.36681437715411125, \"specificity\": 1.0, \"npv\": 0.4709995886466475, \"accuracy\": 0.595088161209068, \"f1\": 0.5367435158501441, \"f2\": 0.4200022550456647, \"f0_5\": 0.7433645978846538, \"p4\": 0.5839996987663111, \"phi\": 0.415655411066983}, {\"truth_threshold\": 8.600000128149986, \"match_probability\": 0.9974294610402847, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 736.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1295.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.362383062530773, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.637616937469227, \"precision\": 1.0, \"recall\": 0.362383062530773, \"specificity\": 1.0, \"npv\": 0.4692622950819672, \"accuracy\": 0.5922544080604534, \"f1\": 0.5319840983014095, \"f2\": 0.4153498871331828, \"f0_5\": 0.7396984924623116, \"p4\": 0.5805081133675749, \"phi\": 0.4123744749884776}, {\"truth_threshold\": 8.700000129640102, \"match_probability\": 0.997601189412643, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 732.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1299.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3604135893648449, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6395864106351551, \"precision\": 1.0, \"recall\": 0.3604135893648449, \"specificity\": 1.0, \"npv\": 0.468494271685761, \"accuracy\": 0.5909949622166247, \"f1\": 0.5298588490770901, \"f2\": 0.4132791327913279, \"f0_5\": 0.7380520266182699, \"p4\": 0.5789476138201, \"phi\": 0.41091568728284633}, {\"truth_threshold\": 8.800000131130219, \"match_probability\": 0.997761470983937, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 718.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1313.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3535204332840965, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6464795667159035, \"precision\": 1.0, \"recall\": 0.3535204332840965, \"specificity\": 1.0, \"npv\": 0.46582587469487385, \"accuracy\": 0.5865869017632241, \"f1\": 0.5223717715532921, \"f2\": 0.4060167382945035, \"f0_5\": 0.7322047725882113, \"p4\": 0.5734425262692389, \"phi\": 0.40580656113113184}, {\"truth_threshold\": 8.900000132620335, \"match_probability\": 0.9979110654305032, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 708.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1323.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.34859675036927623, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6514032496307238, \"precision\": 1.0, \"recall\": 0.34859675036927623, \"specificity\": 1.0, \"npv\": 0.4639384116693679, \"accuracy\": 0.5834382871536524, \"f1\": 0.5169769989047097, \"f2\": 0.4008152173913043, \"f0_5\": 0.7279457125231339, \"p4\": 0.5694677651256314, \"phi\": 0.4021534814960889}, {\"truth_threshold\": 9.00000013411045, \"match_probability\": 0.9980506824420605, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 704.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1327.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3466272772033481, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6533727227966519, \"precision\": 1.0, \"recall\": 0.3466272772033481, \"specificity\": 1.0, \"npv\": 0.46318770226537215, \"accuracy\": 0.5821788413098237, \"f1\": 0.5148080438756856, \"f2\": 0.3987313094698686, \"f0_5\": 0.7262224056117186, \"p4\": 0.5678676238912578, \"phi\": 0.4006912677739821}, {\"truth_threshold\": 9.100000135600567, \"match_probability\": 0.9981809849551747, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 700.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1331.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.34465780403741997, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.65534219596258, \"precision\": 1.0, \"recall\": 0.34465780403741997, \"specificity\": 1.0, \"npv\": 0.4624394184168013, \"accuracy\": 0.5809193954659949, \"f1\": 0.5126327352618089, \"f2\": 0.39664551223934724, \"f0_5\": 0.724487683709377, \"p4\": 0.5662615170898467, \"phi\": 0.39922844895106907}, {\"truth_threshold\": 9.200000137090683, \"match_probability\": 0.9983025921847976, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 690.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1341.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3397341211225997, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6602658788774003, \"precision\": 1.0, \"recall\": 0.3397341211225997, \"specificity\": 1.0, \"npv\": 0.46057924376508447, \"accuracy\": 0.5777707808564232, \"f1\": 0.5071664829106945, \"f2\": 0.39142273655547993, \"f0_5\": 0.720100187852223, \"p4\": 0.5622196307198002, \"phi\": 0.3955685586441907}, {\"truth_threshold\": 9.300000138580799, \"match_probability\": 0.9984160824655384, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 684.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1347.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.33677991137370755, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6632200886262924, \"precision\": 1.0, \"recall\": 0.33677991137370755, \"specificity\": 1.0, \"npv\": 0.45947030497592295, \"accuracy\": 0.5758816120906801, \"f1\": 0.5038674033149171, \"f2\": 0.388283378746594, \"f0_5\": 0.7174323473882945, \"p4\": 0.5597758409315445, \"phi\": 0.3933705232838903}, {\"truth_threshold\": 9.400000140070915, \"match_probability\": 0.9985219959137808, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 661.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1370.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3254554406696209, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6745445593303792, \"precision\": 1.0, \"recall\": 0.3254554406696209, \"specificity\": 1.0, \"npv\": 0.4552683896620278, \"accuracy\": 0.568639798488665, \"f1\": 0.49108469539375926, \"f2\": 0.3762094479225953, \"f0_5\": 0.706951871657754, \"p4\": 0.550272647956958, \"phi\": 0.38492801194561554}, {\"truth_threshold\": 9.500000141561031, \"match_probability\": 0.9986208369212233, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 655.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1376.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3225012309207287, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6774987690792713, \"precision\": 1.0, \"recall\": 0.3225012309207287, \"specificity\": 1.0, \"npv\": 0.45418484728282427, \"accuracy\": 0.5667506297229219, \"f1\": 0.48771407297096053, \"f2\": 0.3730493222462695, \"f0_5\": 0.7041496452375833, \"p4\": 0.5477568608833787, \"phi\": 0.38272074978272863}, {\"truth_threshold\": 9.600000143051147, \"match_probability\": 0.9987130764898899, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 641.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1390.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3156080748399803, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6843919251600197, \"precision\": 1.0, \"recall\": 0.3156080748399803, \"specificity\": 1.0, \"npv\": 0.4516765285996055, \"accuracy\": 0.5623425692695214, \"f1\": 0.47979041916167664, \"f2\": 0.3656588705077011, \"f0_5\": 0.6974972796517954, \"p4\": 0.5418247722541304, \"phi\": 0.3775615971490305}, {\"truth_threshold\": 9.700000144541264, \"match_probability\": 0.9987991544181472, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 627.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1404.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3087149187592319, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.691285081240768, \"precision\": 1.0, \"recall\": 0.3087149187592319, \"specificity\": 1.0, \"npv\": 0.4491957630443311, \"accuracy\": 0.5579345088161209, \"f1\": 0.4717832957110609, \"f2\": 0.35824477202605415, \"f0_5\": 0.6906807666886979, \"p4\": 0.535802646044032, \"phi\": 0.3723888203144958}, {\"truth_threshold\": 9.80000014603138, \"match_probability\": 0.9988794813467569, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 625.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1406.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.30773018217626785, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6922698178237322, \"precision\": 1.0, \"recall\": 0.30773018217626785, \"specificity\": 1.0, \"npv\": 0.448843590748726, \"accuracy\": 0.5573047858942065, \"f1\": 0.47063253012048195, \"f2\": 0.35718367813464397, \"f0_5\": 0.689693224453763, \"p4\": 0.5349347426333179, \"phi\": 0.37164865121476715}, {\"truth_threshold\": 9.900000147521496, \"match_probability\": 0.9989544406735176, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 620.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1411.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3052683407188577, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6947316592811423, \"precision\": 1.0, \"recall\": 0.3052683407188577, \"specificity\": 1.0, \"npv\": 0.44796557120500785, \"accuracy\": 0.5557304785894207, \"f1\": 0.46774801961523954, \"f2\": 0.3545288197621226, \"f0_5\": 0.6872090445577477, \"p4\": 0.5327564744189275, \"phi\": 0.36979684506621746}, {\"truth_threshold\": 10.000000149011612, \"match_probability\": 0.9990243903445719, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 616.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1415.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3032988675529296, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6967011324470704, \"precision\": 1.0, \"recall\": 0.3032988675529296, \"specificity\": 1.0, \"npv\": 0.447265625, \"accuracy\": 0.554471032745592, \"f1\": 0.4654325651681148, \"f2\": 0.3524027459954233, \"f0_5\": 0.6852057842046718, \"p4\": 0.5310049980284561, \"phi\": 0.36831393885903}, {\"truth_threshold\": 10.100000150501728, \"match_probability\": 0.9990896645300149, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 614.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1417.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3023141309699655, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6976858690300345, \"precision\": 1.0, \"recall\": 0.3023141309699655, \"specificity\": 1.0, \"npv\": 0.44691647150663544, \"accuracy\": 0.5538413098236776, \"f1\": 0.46427221172022687, \"f2\": 0.3513389791714351, \"f0_5\": 0.6841987965232895, \"p4\": 0.5301262691052692, \"phi\": 0.367571985738429}, {\"truth_threshold\": 10.200000151991844, \"match_probability\": 0.9991505751910027, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 600.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1431.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.29542097488921715, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7045790251107829, \"precision\": 1.0, \"recall\": 0.29542097488921715, \"specificity\": 1.0, \"npv\": 0.44448757763975155, \"accuracy\": 0.549433249370277, \"f1\": 0.45610034207525657, \"f2\": 0.343878954607978, \"f0_5\": 0.6770480704129993, \"p4\": 0.5239180605880351, \"phi\": 0.3623685327294328}, {\"truth_threshold\": 10.30000015348196, \"match_probability\": 0.9992074135451509, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 592.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1439.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.2914820285573609, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7085179714426391, \"precision\": 1.0, \"recall\": 0.2914820285573609, \"specificity\": 1.0, \"npv\": 0.44311145510835914, \"accuracy\": 0.5469143576826196, \"f1\": 0.45139153640869234, \"f2\": 0.33960532354290957, \"f0_5\": 0.6728802000454649, \"p4\": 0.5203244044266416, \"phi\": 0.3593870139723867}, {\"truth_threshold\": 10.400000154972076, \"match_probability\": 0.9992604514366183, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 590.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1441.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.29049729197439683, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7095027080256031, \"precision\": 1.0, \"recall\": 0.29049729197439683, \"specificity\": 1.0, \"npv\": 0.44276875483372, \"accuracy\": 0.5462846347607053, \"f1\": 0.45020984357115607, \"f2\": 0.33853568969474407, \"f0_5\": 0.6718287406057846, \"p4\": 0.5194206063238911, \"phi\": 0.3586406617354916}, {\"truth_threshold\": 10.500000156462193, \"match_probability\": 0.9993099426168967, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 570.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1461.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.28064992614475626, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7193500738552437, \"precision\": 1.0, \"recall\": 0.28064992614475626, \"specificity\": 1.0, \"npv\": 0.43937068303914045, \"accuracy\": 0.5399874055415617, \"f1\": 0.43829296424452135, \"f2\": 0.3278122843340235, \"f0_5\": 0.6610995128740431, \"p4\": 0.5102600262107828, \"phi\": 0.35115431044642736}, {\"truth_threshold\": 10.600000157952309, \"match_probability\": 0.9993561239419685, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 565.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1466.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.2781880846873461, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7218119153126539, \"precision\": 1.0, \"recall\": 0.2781880846873461, \"specificity\": 1.0, \"npv\": 0.4385292991191114, \"accuracy\": 0.5384130982367759, \"f1\": 0.4352850539291217, \"f2\": 0.32512371964552883, \"f0_5\": 0.6583546958750874, \"p4\": 0.5079339530793631, \"phi\": 0.3492758591732757}, {\"truth_threshold\": 10.700000159442425, \"match_probability\": 0.9993992164911604, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 543.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1488.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.2673559822747415, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7326440177252584, \"precision\": 1.0, \"recall\": 0.2673559822747415, \"specificity\": 1.0, \"npv\": 0.4348651728066844, \"accuracy\": 0.5314861460957179, \"f1\": 0.4219114219114219, \"f2\": 0.31325718241606093, \"f0_5\": 0.6459671663097787, \"p4\": 0.49751894698684695, \"phi\": 0.3409747870925449}, {\"truth_threshold\": 10.800000160932541, \"match_probability\": 0.9994394266126935, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 530.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1501.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.26095519448547516, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7390448055145249, \"precision\": 1.0, \"recall\": 0.26095519448547516, \"specificity\": 1.0, \"npv\": 0.4327286470143613, \"accuracy\": 0.5273929471032746, \"f1\": 0.4139008199921905, \"f2\": 0.30621677836838457, \"f0_5\": 0.6384003854492893, \"p4\": 0.49121990458507664, \"phi\": 0.3360398610895279}, {\"truth_threshold\": 10.900000162422657, \"match_probability\": 0.9994769469006325, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 513.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1518.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.25258493353028066, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7474150664697193, \"precision\": 1.0, \"recall\": 0.25258493353028066, \"specificity\": 1.0, \"npv\": 0.42996620352985354, \"accuracy\": 0.5220403022670025, \"f1\": 0.4033018867924528, \"f2\": 0.29697811740187563, \"f0_5\": 0.6282145481263777, \"p4\": 0.4828102344574534, \"phi\": 0.32954966991161616}, {\"truth_threshold\": 11.000000163912773, \"match_probability\": 0.9995119571076428, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 508.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1523.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.25012309207287053, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7498769079271295, \"precision\": 1.0, \"recall\": 0.25012309207287053, \"specificity\": 1.0, \"npv\": 0.42916041979010494, \"accuracy\": 0.5204659949622166, \"f1\": 0.40015754233950374, \"f2\": 0.2942539388322521, \"f0_5\": 0.6251538272212651, \"p4\": 0.48029802489967816, \"phi\": 0.32763231097251716}, {\"truth_threshold\": 11.10000016540289, \"match_probability\": 0.9995446249976983, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 502.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1529.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.24716888232397832, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7528311176760216, \"precision\": 1.0, \"recall\": 0.24716888232397832, \"specificity\": 1.0, \"npv\": 0.4281974569932685, \"accuracy\": 0.5185768261964736, \"f1\": 0.3963679431504145, \"f2\": 0.2909807558543937, \"f0_5\": 0.6214409507303789, \"p4\": 0.4772593147830282, \"phi\": 0.3253261238495857}, {\"truth_threshold\": 11.200000166893005, \"match_probability\": 0.9995751071426191, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 499.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1532.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.24569177744953224, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7543082225504677, \"precision\": 1.0, \"recall\": 0.24569177744953224, \"specificity\": 1.0, \"npv\": 0.42771759432200224, \"accuracy\": 0.517632241813602, \"f1\": 0.39446640316205533, \"f2\": 0.2893424562217326, \"f0_5\": 0.6195679165631984, \"p4\": 0.47572994995033335, \"phi\": 0.32417078214331824}, {\"truth_threshold\": 11.300000168383121, \"match_probability\": 0.9996035496660847, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 493.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1538.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.2427375677006401, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.75726243229936, \"precision\": 1.0, \"recall\": 0.2427375677006401, \"specificity\": 1.0, \"npv\": 0.42676108833395454, \"accuracy\": 0.5157430730478589, \"f1\": 0.3906497622820919, \"f2\": 0.28606243472206105, \"f0_5\": 0.6157881588808394, \"p4\": 0.47265083362918403, \"phi\": 0.32185547777140927}, {\"truth_threshold\": 11.400000169873238, \"match_probability\": 0.99963008893853, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 487.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1544.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.2397833579517479, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7602166420482521, \"precision\": 1.0, \"recall\": 0.2397833579517479, \"specificity\": 1.0, \"npv\": 0.42580885087393083, \"accuracy\": 0.5138539042821159, \"f1\": 0.3868149324861001, \"f2\": 0.2827778422947393, \"f0_5\": 0.6119628047248052, \"p4\": 0.46954403414712603, \"phi\": 0.31953384188239936}, {\"truth_threshold\": 11.500000171363354, \"match_probability\": 0.999654852226126, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 482.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1549.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.23732151649433778, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7626784835056623, \"precision\": 1.0, \"recall\": 0.23732151649433778, \"specificity\": 1.0, \"npv\": 0.425018559762435, \"accuracy\": 0.5122795969773299, \"f1\": 0.3836052526860326, \"f2\": 0.2800371833604462, \"f0_5\": 0.6087395807021975, \"p4\": 0.46693344428033184, \"phi\": 0.3175941579139333}, {\"truth_threshold\": 11.60000017285347, \"match_probability\": 0.9996779582968373, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 478.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1553.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.23535204332840964, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7646479566715904, \"precision\": 1.0, \"recall\": 0.23535204332840964, \"specificity\": 1.0, \"npv\": 0.42438843587842845, \"accuracy\": 0.5110201511335013, \"f1\": 0.3810282981267437, \"f2\": 0.27784236224133924, \"f0_5\": 0.6061374587877251, \"p4\": 0.4648305783799129, \"phi\": 0.31603905699918783}, {\"truth_threshold\": 11.700000174343586, \"match_probability\": 0.9996995179863626, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 471.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1560.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.23190546528803546, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7680945347119645, \"precision\": 1.0, \"recall\": 0.23190546528803546, \"specificity\": 1.0, \"npv\": 0.4232902033271719, \"accuracy\": 0.5088161209068011, \"f1\": 0.3764988009592326, \"f2\": 0.27399650959860383, \"f0_5\": 0.6015325670498084, \"p4\": 0.46111916274416753, \"phi\": 0.3133102480839957}, {\"truth_threshold\": 11.800000175833702, \"match_probability\": 0.9997196347265854, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 463.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1568.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.22796651895617923, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7720334810438207, \"precision\": 1.0, \"recall\": 0.22796651895617923, \"specificity\": 1.0, \"npv\": 0.4220420199041651, \"accuracy\": 0.5062972292191436, \"f1\": 0.37129109863672816, \"f2\": 0.2695935716781181, \"f0_5\": 0.5961885140355395, \"p4\": 0.45682744333981634, \"phi\": 0.3101797061878598}, {\"truth_threshold\": 11.900000177323818, \"match_probability\": 0.9997384050389891, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 445.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.2191038897095027, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7808961102904973, \"precision\": 1.0, \"recall\": 0.2191038897095027, \"specificity\": 1.0, \"npv\": 0.4192603441962651, \"accuracy\": 0.5006297229219143, \"f1\": 0.3594507269789984, \"f2\": 0.25965690278912357, \"f0_5\": 0.583836263447914, \"p4\": 0.44696743745394574, \"phi\": 0.3030867404132794}, {\"truth_threshold\": 12.000000178813934, \"match_probability\": 0.9997559189953416, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 416.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1615.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.2048252092565239, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7951747907434761, \"precision\": 1.0, \"recall\": 0.2048252092565239, \"specificity\": 1.0, \"npv\": 0.4148550724637681, \"accuracy\": 0.49149874055415615, \"f1\": 0.340008173273396, \"f2\": 0.24355971896955503, \"f0_5\": 0.5629228687415426, \"p4\": 0.43044577914486043, \"phi\": 0.29150090399263207}, {\"truth_threshold\": 12.200000181794167, \"match_probability\": 0.9997875084304283, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 409.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1622.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.20137863121614968, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7986213687838504, \"precision\": 1.0, \"recall\": 0.20137863121614968, \"specificity\": 1.0, \"npv\": 0.4138055655945067, \"accuracy\": 0.4892947103274559, \"f1\": 0.33524590163934426, \"f2\": 0.23965779913277863, \"f0_5\": 0.5576765748568312, \"p4\": 0.4263317832731881, \"phi\": 0.28867212956751886}, {\"truth_threshold\": 12.300000183284283, \"match_probability\": 0.9998017355340825, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 400.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1631.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.19694731659281142, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8030526834071886, \"precision\": 1.0, \"recall\": 0.19694731659281142, \"specificity\": 1.0, \"npv\": 0.41246397694524495, \"accuracy\": 0.4864609571788413, \"f1\": 0.3290826820238585, \"f2\": 0.2346316283435007, \"f0_5\": 0.5508124483613329, \"p4\": 0.42096603893246504, \"phi\": 0.2850152161737426}, {\"truth_threshold\": 12.400000184774399, \"match_probability\": 0.9998150102562988, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 399.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1632.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.1964549483013294, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8035450516986706, \"precision\": 1.0, \"recall\": 0.1964549483013294, \"specificity\": 1.0, \"npv\": 0.41231544832553113, \"accuracy\": 0.48614609571788414, \"f1\": 0.32839506172839505, \"f2\": 0.23407250967969026, \"f0_5\": 0.5500413564929694, \"p4\": 0.42036442533034846, \"phi\": 0.28460746667055603}, {\"truth_threshold\": 12.500000186264515, \"match_probability\": 0.9998273963279586, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 392.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1639.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.1930083702609552, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8069916297390448, \"precision\": 1.0, \"recall\": 0.1930083702609552, \"specificity\": 1.0, \"npv\": 0.4112787356321839, \"accuracy\": 0.4839420654911839, \"f1\": 0.32356582748658685, \"f2\": 0.23015500234852043, \"f0_5\": 0.5445957210336204, \"p4\": 0.4161220886855334, \"phi\": 0.2817449883979377}, {\"truth_threshold\": 12.600000187754631, \"match_probability\": 0.9998389532181915, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 384.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1647.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.18906942392909898, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.810930576070901, \"precision\": 1.0, \"recall\": 0.18906942392909898, \"specificity\": 1.0, \"npv\": 0.4101002865329513, \"accuracy\": 0.48142317380352645, \"f1\": 0.31801242236024846, \"f2\": 0.22566995768688294, \"f0_5\": 0.5382674516400336, \"p4\": 0.41120574947340216, \"phi\": 0.27845542718349653}, {\"truth_threshold\": 12.700000189244747, \"match_probability\": 0.9998497364189812, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 383.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1648.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.18857705563761692, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.811422944362383, \"precision\": 1.0, \"recall\": 0.18857705563761692, \"specificity\": 1.0, \"npv\": 0.40995345506623704, \"accuracy\": 0.4811083123425693, \"f1\": 0.31731565865782935, \"f2\": 0.22510873398377806, \"f0_5\": 0.5374684254841425, \"p4\": 0.4105860003688893, \"phi\": 0.2780428303424835}, {\"truth_threshold\": 12.800000190734863, \"match_probability\": 0.9998597977108138, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 379.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1652.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.18660758247168882, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8133924175283112, \"precision\": 1.0, \"recall\": 0.18660758247168882, \"specificity\": 1.0, \"npv\": 0.40936717912048626, \"accuracy\": 0.47984886649874053, \"f1\": 0.3145228215767635, \"f2\": 0.22286251911090205, \"f0_5\": 0.5342542994079503, \"p4\": 0.40809522958677485, \"phi\": 0.27638925384126056}, {\"truth_threshold\": 12.90000019222498, \"match_probability\": 0.9998691854106266, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 373.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1658.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.18365337272279667, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8163466272772033, \"precision\": 1.0, \"recall\": 0.18365337272279667, \"specificity\": 1.0, \"npv\": 0.4084909026043525, \"accuracy\": 0.47795969773299746, \"f1\": 0.3103161397670549, \"f2\": 0.21948923149346827, \"f0_5\": 0.529378370706784, \"p4\": 0.40432322000651333, \"phi\": 0.27389912739888156}, {\"truth_threshold\": 13.000000193715096, \"match_probability\": 0.9998779446032292, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 370.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1661.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.18217626784835056, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8178237321516494, \"precision\": 1.0, \"recall\": 0.18217626784835056, \"specificity\": 1.0, \"npv\": 0.4080541696364932, \"accuracy\": 0.4770151133501259, \"f1\": 0.3082049146189088, \"f2\": 0.21780080056510479, \"f0_5\": 0.5269154087154657, \"p4\": 0.40242079403659214, \"phi\": 0.2726495657512296}, {\"truth_threshold\": 13.100000195205212, \"match_probability\": 0.9998861173572945, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 369.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1662.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.18168389955686853, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8183161004431314, \"precision\": 1.0, \"recall\": 0.18168389955686853, \"specificity\": 1.0, \"npv\": 0.4079087994299964, \"accuracy\": 0.47670025188916876, \"f1\": 0.3075, \"f2\": 0.21723772518544684, \"f0_5\": 0.5260906757912746, \"p4\": 0.40178418596158894, \"phi\": 0.27223236645190135}, {\"truth_threshold\": 13.200000196695328, \"match_probability\": 0.9998937429269453, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 354.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1677.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.17429837518463812, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8257016248153619, \"precision\": 1.0, \"recall\": 0.17429837518463812, \"specificity\": 1.0, \"npv\": 0.4057406094968108, \"accuracy\": 0.47197732997481107, \"f1\": 0.2968553459119497, \"f2\": 0.20877565463552725, \"f0_5\": 0.5134899912967799, \"p4\": 0.3920831758418028, \"phi\": 0.2659321886904984}, {\"truth_threshold\": 13.300000198185444, \"match_probability\": 0.9999008579398913, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 346.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1685.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.17035942885278188, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8296405711472181, \"precision\": 1.0, \"recall\": 0.17035942885278188, \"specificity\": 1.0, \"npv\": 0.4045936395759717, \"accuracy\": 0.46945843828715367, \"f1\": 0.29112326461926796, \"f2\": 0.20425029515938606, \"f0_5\": 0.5065885797950219, \"p4\": 0.3867889182734259, \"phi\": 0.262538266459636}, {\"truth_threshold\": 13.40000019967556, \"match_probability\": 0.999907496573012, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 335.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1696.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.16494337764647957, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8350566223535204, \"precision\": 1.0, \"recall\": 0.16494337764647957, \"specificity\": 1.0, \"npv\": 0.40302710313269974, \"accuracy\": 0.4659949622166247, \"f1\": 0.28317836010143704, \"f2\": 0.19801394963943728, \"f0_5\": 0.4968851972708395, \"p4\": 0.3793659349517108, \"phi\": 0.2578306647274206}, {\"truth_threshold\": 13.500000201165676, \"match_probability\": 0.9999136907162209, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 325.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1706.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.16001969473165928, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8399803052683407, \"precision\": 1.0, \"recall\": 0.16001969473165928, \"specificity\": 1.0, \"npv\": 0.4016134689582603, \"accuracy\": 0.4628463476070529, \"f1\": 0.2758913412563667, \"f2\": 0.1923304533080838, \"f0_5\": 0.48784148904232966, \"p4\": 0.37246767025663613, \"phi\": 0.2535075239570288}, {\"truth_threshold\": 13.600000202655792, \"match_probability\": 0.9999194701253888, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 324.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1707.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.15952732644017725, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8404726735598228, \"precision\": 1.0, \"recall\": 0.15952732644017725, \"specificity\": 1.0, \"npv\": 0.4014726507713885, \"accuracy\": 0.46253148614609574, \"f1\": 0.2751592356687898, \"f2\": 0.19176136363636365, \"f0_5\": 0.48692515779981965, \"p4\": 0.3717697318528885, \"phi\": 0.25307283263205194}, {\"truth_threshold\": 13.700000204145908, \"match_probability\": 0.9999248625650565, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 322.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1709.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.1585425898572132, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8414574101427869, \"precision\": 1.0, \"recall\": 0.1585425898572132, \"specificity\": 1.0, \"npv\": 0.40119131044148565, \"accuracy\": 0.46190176322418136, \"f1\": 0.27369315767105823, \"f2\": 0.1906227800142079, \"f0_5\": 0.4850858692377222, \"p4\": 0.37036934721259873, \"phi\": 0.2522021201052885}, {\"truth_threshold\": 13.800000205636024, \"match_probability\": 0.999929893941616, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 315.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1716.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.155096011816839, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.844903988183161, \"precision\": 1.0, \"recall\": 0.155096011816839, \"specificity\": 1.0, \"npv\": 0.400209716882209, \"accuracy\": 0.4596977329974811, \"f1\": 0.26854219948849106, \"f2\": 0.18663348738002133, \"f0_5\": 0.4785779398359161, \"p4\": 0.36541997841978086, \"phi\": 0.2491403840784887}, {\"truth_threshold\": 13.90000020712614, \"match_probability\": 0.9999345884275949, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 313.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1718.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.15411127523387494, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8458887247661251, \"precision\": 1.0, \"recall\": 0.15411127523387494, \"specificity\": 1.0, \"npv\": 0.39993014320642684, \"accuracy\": 0.45906801007556675, \"f1\": 0.26706484641638223, \"f2\": 0.18549247362806684, \"f0_5\": 0.4766981419433445, \"p4\": 0.3639919317161557, \"phi\": 0.24826144359124447}, {\"truth_threshold\": 14.100000210106373, \"match_probability\": 0.9999430554367367, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 307.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1724.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.15115706548498276, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8488429345150172, \"precision\": 1.0, \"recall\": 0.15115706548498276, \"specificity\": 1.0, \"npv\": 0.399093760892297, \"accuracy\": 0.45717884130982367, \"f1\": 0.262617621899059, \"f2\": 0.18206618431977226, \"f0_5\": 0.4710033752684873, \"p4\": 0.35966979322171594, \"phi\": 0.24561319539032303}, {\"truth_threshold\": 14.200000211596489, \"match_probability\": 0.9999468686412301, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 306.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1725.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.15066469719350073, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8493353028064993, \"precision\": 1.0, \"recall\": 0.15066469719350073, \"specificity\": 1.0, \"npv\": 0.3989547038327526, \"accuracy\": 0.4568639798488665, \"f1\": 0.26187419768934533, \"f2\": 0.18149466192170818, \"f0_5\": 0.4700460829493088, \"p4\": 0.35894382186502344, \"phi\": 0.24517012388723966}, {\"truth_threshold\": 14.300000213086605, \"match_probability\": 0.9999504265130488, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 303.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1728.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14918759231905465, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8508124076809453, \"precision\": 1.0, \"recall\": 0.14918759231905465, \"specificity\": 1.0, \"npv\": 0.39853811347024015, \"accuracy\": 0.45591939546599497, \"f1\": 0.2596401028277635, \"f2\": 0.17977928088287648, \"f0_5\": 0.4671600370027752, \"p4\": 0.3567561397717773, \"phi\": 0.24383794125607963}, {\"truth_threshold\": 14.400000214576721, \"match_probability\": 0.9999537461476637, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 300.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1731.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14771048744460857, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8522895125553914, \"precision\": 1.0, \"recall\": 0.14771048744460857, \"specificity\": 1.0, \"npv\": 0.3981223922114047, \"accuracy\": 0.45497481108312343, \"f1\": 0.2574002574002574, \"f2\": 0.17806267806267806, \"f0_5\": 0.46425255338904364, \"p4\": 0.3545536533347784, \"phi\": 0.24250124250436372}, {\"truth_threshold\": 14.500000216066837, \"match_probability\": 0.9999568434961527, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 298.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1733.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14672575086164452, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8532742491383555, \"precision\": 1.0, \"recall\": 0.14672575086164452, \"specificity\": 1.0, \"npv\": 0.39784572619874914, \"accuracy\": 0.45434508816120905, \"f1\": 0.2559038213825676, \"f2\": 0.17691759677036334, \"f0_5\": 0.4623022029165374, \"p4\": 0.35307700645624607, \"phi\": 0.24160755969879688}, {\"truth_threshold\": 14.600000217556953, \"match_probability\": 0.9999597334417798, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 295.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1736.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14524864598719842, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8547513540128016, \"precision\": 1.0, \"recall\": 0.14524864598719842, \"specificity\": 1.0, \"npv\": 0.3974314474140923, \"accuracy\": 0.4534005037783375, \"f1\": 0.25365434221840066, \"f2\": 0.17519895474521915, \"f0_5\": 0.4593584553098723, \"p4\": 0.3508494030029032, \"phi\": 0.2402631465906275}, {\"truth_threshold\": 14.70000021904707, \"match_probability\": 0.9999624298714548, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 294.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1737.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.1447562776957164, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8552437223042836, \"precision\": 1.0, \"recall\": 0.1447562776957164, \"specificity\": 1.0, \"npv\": 0.397293546148508, \"accuracy\": 0.45308564231738035, \"f1\": 0.25290322580645164, \"f2\": 0.17462580185317178, \"f0_5\": 0.4583723105706268, \"p4\": 0.35010346944394827, \"phi\": 0.23981395892022075}, {\"truth_threshold\": 14.800000220537186, \"match_probability\": 0.9999649457424121, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 292.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1739.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14377154111275234, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8562284588872476, \"precision\": 1.0, \"recall\": 0.14377154111275234, \"specificity\": 1.0, \"npv\": 0.39701803051317613, \"accuracy\": 0.452455919395466, \"f1\": 0.2513990529487731, \"f2\": 0.17347908745247148, \"f0_5\": 0.4563926226945921, \"p4\": 0.3486064578319283, \"phi\": 0.2389139889090404}, {\"truth_threshold\": 15.000000223517418, \"match_probability\": 0.9999694833578969, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 290.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1741.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14278680452978829, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8572131954702117, \"precision\": 1.0, \"recall\": 0.14278680452978829, \"specificity\": 1.0, \"npv\": 0.39674289674289676, \"accuracy\": 0.45182619647355166, \"f1\": 0.24989228780697975, \"f2\": 0.17233182790587118, \"f0_5\": 0.4544030084612974, \"p4\": 0.3471025353224262, \"phi\": 0.23801187038845348}, {\"truth_threshold\": 15.100000225007534, \"match_probability\": 0.9999715269079685, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 289.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1742.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14229443623830626, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8577055637616937, \"precision\": 1.0, \"recall\": 0.14229443623830626, \"specificity\": 1.0, \"npv\": 0.39660547280914443, \"accuracy\": 0.45151133501259444, \"f1\": 0.24913793103448276, \"f2\": 0.17175799358136218, \"f0_5\": 0.45340445560087855, \"p4\": 0.346347962973042, \"phi\": 0.2375599969742467}, {\"truth_threshold\": 15.20000022649765, \"match_probability\": 0.9999734336151354, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 288.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1743.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14180206794682423, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8581979320531757, \"precision\": 1.0, \"recall\": 0.14180206794682423, \"specificity\": 1.0, \"npv\": 0.39646814404432135, \"accuracy\": 0.4511964735516373, \"f1\": 0.24838292367399742, \"f2\": 0.17118402282453637, \"f0_5\": 0.4524033930254477, \"p4\": 0.34559163939876736, \"phi\": 0.2371075762191587}, {\"truth_threshold\": 15.300000227987766, \"match_probability\": 0.9999752126423825, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 285.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1746.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14032496307237813, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8596750369276218, \"precision\": 1.0, \"recall\": 0.14032496307237813, \"specificity\": 1.0, \"npv\": 0.3960567277758561, \"accuracy\": 0.45025188916876574, \"f1\": 0.24611398963730569, \"f2\": 0.16946129147342134, \"f0_5\": 0.44938505203405865, \"p4\": 0.34331208180785255, \"phi\": 0.2357469951021941}, {\"truth_threshold\": 15.400000229477882, \"match_probability\": 0.9999768725392036, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 281.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1750.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.13835548990645002, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8616445100935499, \"precision\": 1.0, \"recall\": 0.13835548990645002, \"specificity\": 1.0, \"npv\": 0.3955094991364421, \"accuracy\": 0.44899244332493704, \"f1\": 0.2430795847750865, \"f2\": 0.16716240333135038, \"f0_5\": 0.44532488114104596, \"p4\": 0.34024766819652713, \"phi\": 0.23392501045351508}, {\"truth_threshold\": 15.500000230967999, \"match_probability\": 0.9999784212826682, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 277.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1754.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.13638601674052192, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8636139832594781, \"precision\": 1.0, \"recall\": 0.13638601674052192, \"specificity\": 1.0, \"npv\": 0.39496378061400483, \"accuracy\": 0.4477329974811083, \"f1\": 0.24003466204506066, \"f2\": 0.16486132603261516, \"f0_5\": 0.4412233195285123, \"p4\": 0.3371541980967607, \"phi\": 0.23209381033263574}, {\"truth_threshold\": 15.600000232458115, \"match_probability\": 0.9999798663157408, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 273.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1758.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.1344165435745938, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8655834564254062, \"precision\": 1.0, \"recall\": 0.1344165435745938, \"specificity\": 1.0, \"npv\": 0.39441956596624184, \"accuracy\": 0.4464735516372796, \"f1\": 0.23697916666666666, \"f2\": 0.1625580564487317, \"f0_5\": 0.43707973102785785, \"p4\": 0.33403113499095954, \"phi\": 0.23025315367085358}, {\"truth_threshold\": 15.70000023394823, \"match_probability\": 0.9999812145830361, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 271.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1760.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.13343180699162974, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8665681930083703, \"precision\": 1.0, \"recall\": 0.13343180699162974, \"specificity\": 1.0, \"npv\": 0.39414802065404475, \"accuracy\": 0.44584382871536526, \"f1\": 0.23544743701129453, \"f2\": 0.16140559857057774, \"f0_5\": 0.434991974317817, \"p4\": 0.33245833489583054, \"phi\": 0.22932920140715485}, {\"truth_threshold\": 15.800000235438347, \"match_probability\": 0.9999824725641815, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 268.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1763.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.13195470211718366, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8680452978828164, \"precision\": 1.0, \"recall\": 0.13195470211718366, \"specificity\": 1.0, \"npv\": 0.3937414030261348, \"accuracy\": 0.4448992443324937, \"f1\": 0.23314484558503698, \"f2\": 0.15967588179218303, \"f0_5\": 0.43184015468901066, \"p4\": 0.3300848527615133, \"phi\": 0.22793865303523134}, {\"truth_threshold\": 15.900000236928463, \"match_probability\": 0.9999836463049459, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 266.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1765.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.1309699655342196, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8690300344657804, \"precision\": 1.0, \"recall\": 0.1309699655342196, \"specificity\": 1.0, \"npv\": 0.39347079037800686, \"accuracy\": 0.44426952141057935, \"f1\": 0.23160644318676535, \"f2\": 0.15852205005959474, \"f0_5\": 0.4297253634894992, \"p4\": 0.32849290653450874, \"phi\": 0.2270084929127756}, {\"truth_threshold\": 16.00000023841858, \"match_probability\": 0.9999847414462861, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 261.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1770.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.12850812407680945, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8714918759231906, \"precision\": 1.0, \"recall\": 0.12850812407680945, \"specificity\": 1.0, \"npv\": 0.3927958833619211, \"accuracy\": 0.44269521410579343, \"f1\": 0.22774869109947643, \"f2\": 0.1556350626118068, \"f0_5\": 0.424390243902439, \"p4\": 0.3244788273615635, \"phi\": 0.22467189881232083}, {\"truth_threshold\": 16.20000024139881, \"match_probability\": 0.9999867166312594, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 255.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1776.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.1255539143279173, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8744460856720827, \"precision\": 1.0, \"recall\": 0.1255539143279173, \"specificity\": 1.0, \"npv\": 0.3919890448476549, \"accuracy\": 0.44080604534005036, \"f1\": 0.2230971128608924, \"f2\": 0.15216612960973863, \"f0_5\": 0.41789577187807275, \"p4\": 0.3195960922748543, \"phi\": 0.2218462507104517}, {\"truth_threshold\": 16.300000242888927, \"match_probability\": 0.9999876061677141, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 254.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1777.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.12506154603643527, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8749384539635647, \"precision\": 1.0, \"recall\": 0.12506154603643527, \"specificity\": 1.0, \"npv\": 0.391854893908282, \"accuracy\": 0.4404911838790932, \"f1\": 0.2223194748358862, \"f2\": 0.1515874910479828, \"f0_5\": 0.4168034131933049, \"p4\": 0.31877518775297364, \"phi\": 0.22137294065470842}, {\"truth_threshold\": 16.400000244379044, \"match_probability\": 0.9999884361359999, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 249.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1782.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.12259970457902511, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8774002954209749, \"precision\": 1.0, \"recall\": 0.12259970457902511, \"specificity\": 1.0, \"npv\": 0.3911855141783396, \"accuracy\": 0.4389168765743073, \"f1\": 0.21842105263157896, \"f2\": 0.14869222500895737, \"f0_5\": 0.41129831516352827, \"p4\": 0.3146395889340626, \"phi\": 0.21899595538241903}, {\"truth_threshold\": 16.50000024586916, \"match_probability\": 0.9999892105250341, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 242.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1789.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.11915312653865091, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.880846873461349, \"precision\": 1.0, \"recall\": 0.11915312653865091, \"specificity\": 1.0, \"npv\": 0.39025221540558963, \"accuracy\": 0.43671284634760704, \"f1\": 0.21293444786625604, \"f2\": 0.14463303848912265, \"f0_5\": 0.40346782260753583, \"p4\": 0.30876091977525716, \"phi\": 0.21563805694777322}, {\"truth_threshold\": 16.600000247359276, \"match_probability\": 0.9999899330566321, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 236.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1795.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.11619891678975874, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8838010832102413, \"precision\": 1.0, \"recall\": 0.11619891678975874, \"specificity\": 1.0, \"npv\": 0.38945578231292516, \"accuracy\": 0.43482367758186397, \"f1\": 0.20820467578297308, \"f2\": 0.14114832535885166, \"f0_5\": 0.39663865546218485, \"p4\": 0.30363716807944324, \"phi\": 0.21273067489732173}, {\"truth_threshold\": 16.700000248849392, \"match_probability\": 0.9999906072033913, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 232.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1799.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.11422944362383063, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8857705563761694, \"precision\": 1.0, \"recall\": 0.11422944362383063, \"specificity\": 1.0, \"npv\": 0.3889266304347826, \"accuracy\": 0.43356423173803527, \"f1\": 0.20503756076005303, \"f2\": 0.13882240306366683, \"f0_5\": 0.39202433254477864, \"p4\": 0.3001765927459395, \"phi\": 0.21077683128146796}, {\"truth_threshold\": 16.800000250339508, \"match_probability\": 0.9999912362053778, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 221.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1810.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.10881339241752831, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8911866075824717, \"precision\": 1.0, \"recall\": 0.10881339241752831, \"specificity\": 1.0, \"npv\": 0.38747884940778343, \"accuracy\": 0.4301007556675063, \"f1\": 0.19626998223801065, \"f2\": 0.1324146195326543, \"f0_5\": 0.379073756432247, \"p4\": 0.290469029799348, \"phi\": 0.20533603700788008}, {\"truth_threshold\": 16.900000251829624, \"match_probability\": 0.999991823085696, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 215.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1816.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.10585918266863614, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8941408173313639, \"precision\": 1.0, \"recall\": 0.10585918266863614, \"specificity\": 1.0, \"npv\": 0.386693684566025, \"accuracy\": 0.4282115869017632, \"f1\": 0.19145146927871773, \"f2\": 0.12891233960906584, \"f0_5\": 0.37184365271532344, \"p4\": 0.2850517881231799, \"phi\": 0.20232418884375344}, {\"truth_threshold\": 17.00000025331974, \"match_probability\": 0.9999923706650156, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 209.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1822.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.10290497291974397, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.897095027080256, \"precision\": 1.0, \"recall\": 0.10290497291974397, \"specificity\": 1.0, \"npv\": 0.38591169531513314, \"accuracy\": 0.4263224181360202, \"f1\": 0.18660714285714286, \"f2\": 0.12540501620064803, \"f0_5\": 0.36449250087199164, \"p4\": 0.27954493418624105, \"phi\": 0.19927928280635765}, {\"truth_threshold\": 17.100000254809856, \"match_probability\": 0.9999928815751264, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 203.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1828.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0999507631708518, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9000492368291482, \"precision\": 1.0, \"recall\": 0.0999507631708518, \"specificity\": 1.0, \"npv\": 0.3851328624285234, \"accuracy\": 0.4244332493702771, \"f1\": 0.18173679498657117, \"f2\": 0.12189263840518794, \"f0_5\": 0.3570172353148083, \"p4\": 0.2739457211855208, \"phi\": 0.19619970316467247}, {\"truth_threshold\": 17.200000256299973, \"match_probability\": 0.999993358271586, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 193.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1838.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.09502708025603152, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9049729197439685, \"precision\": 1.0, \"recall\": 0.09502708025603152, \"specificity\": 1.0, \"npv\": 0.38384177003017095, \"accuracy\": 0.42128463476070527, \"f1\": 0.1735611510791367, \"f2\": 0.11602741373091259, \"f0_5\": 0.3442739921512665, \"p4\": 0.26440064321915874, \"phi\": 0.19098524206407744}, {\"truth_threshold\": 17.30000025779009, \"match_probability\": 0.999993803045519, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 178.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1853.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.08764155588380108, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9123584441161989, \"precision\": 1.0, \"recall\": 0.08764155588380108, \"specificity\": 1.0, \"npv\": 0.3819212808539026, \"accuracy\": 0.41656171284634763, \"f1\": 0.16115889542779538, \"f2\": 0.10720308359431463, \"f0_5\": 0.32446226759022967, \"p4\": 0.24955621152340898, \"phi\": 0.18295402504227723}, {\"truth_threshold\": 17.400000259280205, \"match_probability\": 0.9999942180346287, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 160.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1871.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.07877892663712457, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9212210733628754, \"precision\": 1.0, \"recall\": 0.07877892663712457, \"specificity\": 1.0, \"npv\": 0.3796419098143236, \"accuracy\": 0.4108942065491184, \"f1\": 0.1460520310360566, \"f2\": 0.09657170449058426, \"f0_5\": 0.2995132909022838, \"p4\": 0.23084277458650382, \"phi\": 0.17293866589528342}, {\"truth_threshold\": 17.50000026077032, \"match_probability\": 0.9999946052334694, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 156.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1875.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.07680945347119646, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9231905465288035, \"precision\": 1.0, \"recall\": 0.07680945347119646, \"specificity\": 1.0, \"npv\": 0.3791390728476821, \"accuracy\": 0.40963476070528965, \"f1\": 0.14266117969821673, \"f2\": 0.09420289855072464, \"f0_5\": 0.2937853107344633, \"p4\": 0.2265418036022582, \"phi\": 0.1706501244506039}, {\"truth_threshold\": 17.600000262260437, \"match_probability\": 0.999994966503032, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 153.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1878.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.07533234859675036, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9246676514032496, \"precision\": 1.0, \"recall\": 0.07533234859675036, \"specificity\": 1.0, \"npv\": 0.3787628183923255, \"accuracy\": 0.4086901763224181, \"f1\": 0.1401098901098901, \"f2\": 0.09242479159115621, \"f0_5\": 0.28944381384790013, \"p4\": 0.22328051634216844, \"phi\": 0.16891741375778377}, {\"truth_threshold\": 17.700000263750553, \"match_probability\": 0.9999953035796879, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 149.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1882.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.07336287543082226, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9266371245691778, \"precision\": 1.0, \"recall\": 0.07336287543082226, \"specificity\": 1.0, \"npv\": 0.3782623059134457, \"accuracy\": 0.4074307304785894, \"f1\": 0.13669724770642203, \"f2\": 0.09005197630847335, \"f0_5\": 0.2835934526075371, \"p4\": 0.218883751244496, \"phi\": 0.16658454438783837}, {\"truth_threshold\": 17.80000026524067, \"match_probability\": 0.9999956180835331, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 144.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1887.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.07090103397341212, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9290989660265879, \"precision\": 1.0, \"recall\": 0.07090103397341212, \"specificity\": 1.0, \"npv\": 0.3776385224274406, \"accuracy\": 0.40585642317380355, \"f1\": 0.13241379310344828, \"f2\": 0.08708272859216255, \"f0_5\": 0.2761795166858458, \"p4\": 0.21330828694544357, \"phi\": 0.16363056471300563}, {\"truth_threshold\": 17.900000266730785, \"match_probability\": 0.9999959115261747, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 134.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1897.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.06597735105859183, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9340226489414082, \"precision\": 1.0, \"recall\": 0.06597735105859183, \"specificity\": 1.0, \"npv\": 0.37639710716633795, \"accuracy\": 0.4027078085642317, \"f1\": 0.12378752886836028, \"f2\": 0.08113344635504965, \"f0_5\": 0.2610050642773666, \"p4\": 0.2018827078967218, \"phi\": 0.15758706824150226}, {\"truth_threshold\": 18.0000002682209, \"match_probability\": 0.9999961853179954, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 132.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1899.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.06499261447562776, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9350073855243722, \"precision\": 1.0, \"recall\": 0.06499261447562776, \"specificity\": 1.0, \"npv\": 0.37614980289093297, \"accuracy\": 0.4020780856423174, \"f1\": 0.12205270457697642, \"f2\": 0.07994186046511628, \"f0_5\": 0.25791324736225085, \"p4\": 0.1995522156019492, \"phi\": 0.15635523376073404}, {\"truth_threshold\": 18.100000269711018, \"match_probability\": 0.999996440774932, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 131.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1900.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.06450024618414574, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9354997538158543, \"precision\": 1.0, \"recall\": 0.06450024618414574, \"specificity\": 1.0, \"npv\": 0.3760262725779967, \"accuracy\": 0.4017632241813602, \"f1\": 0.1211840888066605, \"f2\": 0.07934585099939431, \"f0_5\": 0.2563600782778865, \"p4\": 0.19838115580714064, \"phi\": 0.15573627436466905}, {\"truth_threshold\": 18.200000271201134, \"match_probability\": 0.9999966791247992, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 128.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1903.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.06302314130969966, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9369768586903003, \"precision\": 1.0, \"recall\": 0.06302314130969966, \"specificity\": 1.0, \"npv\": 0.37565616797900264, \"accuracy\": 0.40081863979848864, \"f1\": 0.11857341361741547, \"f2\": 0.07755695588948133, \"f0_5\": 0.25167125442390875, \"p4\": 0.19484445248024185, \"phi\": 0.15386692873519298}, {\"truth_threshold\": 18.30000027269125, \"match_probability\": 0.999996901513191, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 112.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1919.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.055145248645987195, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9448547513540128, \"precision\": 1.0, \"recall\": 0.055145248645987195, \"specificity\": 1.0, \"npv\": 0.37369451697127937, \"accuracy\": 0.3957808564231738, \"f1\": 0.10452636490900606, \"f2\": 0.06799417192812045, \"f0_5\": 0.22589753933037515, \"p4\": 0.17536236689966953, \"phi\": 0.14355304613982695}, {\"truth_threshold\": 18.400000274181366, \"match_probability\": 0.9999971090089864, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 91.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1940.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0448055145248646, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9551944854751354, \"precision\": 1.0, \"recall\": 0.0448055145248646, \"specificity\": 1.0, \"npv\": 0.3711507293354943, \"accuracy\": 0.3891687657430731, \"f1\": 0.08576814326107446, \"f2\": 0.05538648813146683, \"f0_5\": 0.18997912317327767, \"p4\": 0.1480768274225295, \"phi\": 0.1289558040343884}, {\"truth_threshold\": 18.500000275671482, \"match_probability\": 0.9999973026094866, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 77.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1954.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0379123584441162, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9620876415558838, \"precision\": 1.0, \"recall\": 0.0379123584441162, \"specificity\": 1.0, \"npv\": 0.36947402387867057, \"accuracy\": 0.38476070528967254, \"f1\": 0.0730550284629981, \"f2\": 0.04694549445189611, \"f0_5\": 0.16460025651988028, \"p4\": 0.12868698840481557, \"phi\": 0.11835384078718403}, {\"truth_threshold\": 18.600000277161598, \"match_probability\": 0.999997483245208, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 75.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1956.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.03692762186115214, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9630723781388478, \"precision\": 1.0, \"recall\": 0.03692762186115214, \"specificity\": 1.0, \"npv\": 0.3692357304095453, \"accuracy\": 0.38413098236775817, \"f1\": 0.07122507122507123, \"f2\": 0.045737285034760336, \"f0_5\": 0.16087516087516088, \"p4\": 0.12583247247071236, \"phi\": 0.11676899173235163}, {\"truth_threshold\": 18.700000278651714, \"match_probability\": 0.9999976517843541, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 73.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1958.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.035942885278188084, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9640571147218119, \"precision\": 1.0, \"recall\": 0.035942885278188084, \"specificity\": 1.0, \"npv\": 0.36899774411859493, \"accuracy\": 0.38350125944584385, \"f1\": 0.06939163498098859, \"f2\": 0.04452848603147493, \"f0_5\": 0.1571244080929832, \"p4\": 0.12295600444839334, \"phi\": 0.11516441978651591}, {\"truth_threshold\": 18.900000281631947, \"match_probability\": 0.9999979557589296, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 71.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1960.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.034958148695224026, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9650418513047759, \"precision\": 1.0, \"recall\": 0.034958148695224026, \"specificity\": 1.0, \"npv\": 0.3687600644122383, \"accuracy\": 0.38287153652392947, \"f1\": 0.0675547098001903, \"f2\": 0.043319097010372176, \"f0_5\": 0.15334773218142547, \"p4\": 0.1200573002429353, \"phi\": 0.11353928467532028}, {\"truth_threshold\": 19.000000283122063, \"match_probability\": 0.9999980926553794, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 69.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1962.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.033973412112259974, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.96602658788774, \"precision\": 1.0, \"recall\": 0.033973412112259974, \"specificity\": 1.0, \"npv\": 0.3685226906984229, \"accuracy\": 0.3822418136020151, \"f1\": 0.06571428571428571, \"f2\": 0.04210911753936287, \"f0_5\": 0.14954486345903772, \"p4\": 0.11713607088211223, \"phi\": 0.11189268628385163}, {\"truth_threshold\": 19.10000028461218, \"match_probability\": 0.9999982203843173, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 66.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1965.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.03249630723781388, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9675036927621861, \"precision\": 1.0, \"recall\": 0.03249630723781388, \"specificity\": 1.0, \"npv\": 0.36816720257234725, \"accuracy\": 0.38129722921914355, \"f1\": 0.06294706723891273, \"f2\": 0.040293040293040296, \"f0_5\": 0.1437908496732026, \"p4\": 0.11271134775969976, \"phi\": 0.10938041200177231}, {\"truth_threshold\": 19.200000286102295, \"match_probability\": 0.9999983395596597, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 65.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1966.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.032003938946331856, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9679960610536681, \"precision\": 1.0, \"recall\": 0.032003938946331856, \"specificity\": 1.0, \"npv\": 0.3680488588878174, \"accuracy\": 0.3809823677581864, \"f1\": 0.06202290076335878, \"f2\": 0.03968738551715716, \"f0_5\": 0.14185945002182454, \"p4\": 0.1112248557850375, \"phi\": 0.10853116238718177}, {\"truth_threshold\": 19.400000289082527, \"match_probability\": 0.9999985545024187, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 63.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1968.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0310192023633678, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9689807976366323, \"precision\": 1.0, \"recall\": 0.0310192023633678, \"specificity\": 1.0, \"npv\": 0.3678123996145198, \"accuracy\": 0.380352644836272, \"f1\": 0.06017191977077364, \"f2\": 0.03847563209967021, \"f0_5\": 0.13797634691195795, \"p4\": 0.10823426675729327, \"phi\": 0.1068140779831886}, {\"truth_threshold\": 19.500000290572643, \"match_probability\": 0.9999986513029383, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 62.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1969.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.03052683407188577, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9694731659281143, \"precision\": 1.0, \"recall\": 0.03052683407188577, \"specificity\": 1.0, \"npv\": 0.36769428387925496, \"accuracy\": 0.38003778337531485, \"f1\": 0.05924510272336359, \"f2\": 0.03786953334962131, \"f0_5\": 0.13602457218078104, \"p4\": 0.10673009231574156, \"phi\": 0.1059459408998895}, {\"truth_threshold\": 19.60000029206276, \"match_probability\": 0.9999987416210334, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 61.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1970.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.03003446578040374, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9699655342195963, \"precision\": 1.0, \"recall\": 0.03003446578040374, \"specificity\": 1.0, \"npv\": 0.36757624398073835, \"accuracy\": 0.3797229219143577, \"f1\": 0.058317399617590825, \"f2\": 0.03726328649969456, \"f0_5\": 0.13406593406593406, \"p4\": 0.10521994576679723, \"phi\": 0.10507119548919591}, {\"truth_threshold\": 19.700000293552876, \"match_probability\": 0.9999988258908107, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 60.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1971.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.029542097488921712, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9704579025110783, \"precision\": 1.0, \"recall\": 0.029542097488921712, \"specificity\": 1.0, \"npv\": 0.36745827984595636, \"accuracy\": 0.37940806045340053, \"f1\": 0.05738880918220947, \"f2\": 0.036656891495601175, \"f0_5\": 0.13210039630118892, \"p4\": 0.10370378756573473, \"phi\": 0.10418967475868576}, {\"truth_threshold\": 19.80000029504299, \"match_probability\": 0.9999989045173057, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 53.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1978.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.026095519448547513, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9739044805514525, \"precision\": 1.0, \"recall\": 0.026095519448547513, \"specificity\": 1.0, \"npv\": 0.36663464617355107, \"accuracy\": 0.37720403022670024, \"f1\": 0.0508637236084453, \"f2\": 0.03240797358444417, \"f0_5\": 0.1181453410610789, \"p4\": 0.0929189583154697, \"phi\": 0.09781370834261033}, {\"truth_threshold\": 19.900000296533108, \"match_probability\": 0.9999989778784306, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 46.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1985.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.022648941408173313, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9773510585918267, \"precision\": 1.0, \"recall\": 0.022648941408173313, \"specificity\": 1.0, \"npv\": 0.365814696485623, \"accuracy\": 0.375, \"f1\": 0.04429465575349061, \"f2\": 0.028151774785801713, \"f0_5\": 0.1038374717832957, \"p4\": 0.08182335429924091, \"phi\": 0.0910237091474061}, {\"truth_threshold\": 21.200000315904617, \"match_probability\": 0.9999995848894065, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 45.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1986.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.022156573116691284, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9778434268833087, \"precision\": 1.0, \"recall\": 0.022156573116691284, \"specificity\": 1.0, \"npv\": 0.3656978601085915, \"accuracy\": 0.37468513853904284, \"f1\": 0.04335260115606936, \"f2\": 0.027543150936467132, \"f0_5\": 0.10176390773405698, \"p4\": 0.08021203063702596, \"phi\": 0.09001450647597614}, {\"truth_threshold\": 21.500000320374966, \"match_probability\": 0.9999996628254004, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 44.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1987.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.021664204825209258, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9783357951747907, \"precision\": 1.0, \"recall\": 0.021664204825209258, \"specificity\": 1.0, \"npv\": 0.365581098339719, \"accuracy\": 0.3743702770780856, \"f1\": 0.042409638554216866, \"f2\": 0.02693437806072478, \"f0_5\": 0.09968282736746716, \"p4\": 0.07859401270561604, \"phi\": 0.08899451553133284}, {\"truth_threshold\": 21.600000321865082, \"match_probability\": 0.999999685404968, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 43.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1988.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.02117183653372723, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9788281634662728, \"precision\": 1.0, \"recall\": 0.02117183653372723, \"specificity\": 1.0, \"npv\": 0.3654644111075646, \"accuracy\": 0.37405541561712846, \"f1\": 0.041465766634522665, \"f2\": 0.026325456103832495, \"f0_5\": 0.09759418974126191, \"p4\": 0.07696925450739521, \"phi\": 0.08796336038865413}, {\"truth_threshold\": 21.700000323355198, \"match_probability\": 0.9999997064724503, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 42.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1989.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0206794682422452, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9793205317577548, \"precision\": 1.0, \"recall\": 0.0206794682422452, \"specificity\": 1.0, \"npv\": 0.3653477983407786, \"accuracy\": 0.3737405541561713, \"f1\": 0.04052098408104197, \"f2\": 0.025716385011021307, \"f0_5\": 0.09549795361527967, \"p4\": 0.0753377096255321, \"phi\": 0.08692064307839845}, {\"truth_threshold\": 21.90000032633543, \"match_probability\": 0.9999997444694171, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 41.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1990.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.02018709995076317, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9798129000492368, \"precision\": 1.0, \"recall\": 0.02018709995076317, \"specificity\": 1.0, \"npv\": 0.3652312599681021, \"accuracy\": 0.3734256926952141, \"f1\": 0.03957528957528957, \"f2\": 0.02510716472749541, \"f0_5\": 0.09339407744874716, \"p4\": 0.07369933121919056, \"phi\": 0.08586594173547067}, {\"truth_threshold\": 22.000000327825546, \"match_probability\": 0.9999997615815319, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 35.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1996.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.017232890201871, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.982767109798129, \"precision\": 1.0, \"recall\": 0.017232890201871, \"specificity\": 1.0, \"npv\": 0.36453358802929003, \"accuracy\": 0.371536523929471, \"f1\": 0.03388189738625363, \"f2\": 0.02144870694938105, \"f0_5\": 0.08060801473975127, \"p4\": 0.06372287901796007, \"phi\": 0.07925886257954269}, {\"truth_threshold\": 22.100000329315662, \"match_probability\": 0.9999997775477002, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 34.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1997.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.01674052191038897, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.983259478089611, \"precision\": 1.0, \"recall\": 0.01674052191038897, \"specificity\": 1.0, \"npv\": 0.364417568427753, \"accuracy\": 0.37122166246851385, \"f1\": 0.03292978208232446, \"f2\": 0.02083844079431233, \"f0_5\": 0.07844946931241348, \"p4\": 0.06203531774376609, \"phi\": 0.07810595552706255}, {\"truth_threshold\": 22.20000033080578, \"match_probability\": 0.9999997924446623, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 31.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2000.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.015263417035942885, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9847365829640571, \"precision\": 1.0, \"recall\": 0.015263417035942885, \"specificity\": 1.0, \"npv\": 0.3640699523052464, \"accuracy\": 0.3702770780856423, \"f1\": 0.030067895247332686, \"f2\": 0.01900674432863274, \"f0_5\": 0.07192575406032482, \"p4\": 0.0569290852372513, \"phi\": 0.0745449630242769}, {\"truth_threshold\": 22.300000332295895, \"match_probability\": 0.9999998063440199, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 27.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2004.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.013293943870014771, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9867060561299852, \"precision\": 1.0, \"recall\": 0.013293943870014771, \"specificity\": 1.0, \"npv\": 0.3636074944426802, \"accuracy\": 0.3690176322418136, \"f1\": 0.026239067055393587, \"f2\": 0.016562384983437616, \"f0_5\": 0.06311360448807854, \"p4\": 0.050017230584043997, \"phi\": 0.06952537394245138}, {\"truth_threshold\": 22.40000033378601, \"match_probability\": 0.9999998193125794, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 26.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2005.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.012801575578532743, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9871984244214672, \"precision\": 1.0, \"recall\": 0.012801575578532743, \"specificity\": 1.0, \"npv\": 0.3634920634920635, \"accuracy\": 0.36870277078085645, \"f1\": 0.025279533300923675, \"f2\": 0.015950920245398775, \"f0_5\": 0.06088992974238876, \"p4\": 0.048270424636238894, \"phi\": 0.0682148893057115}, {\"truth_threshold\": 22.600000336766243, \"match_probability\": 0.9999998427024609, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 25.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2006.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.012309207287050714, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9876907927129492, \"precision\": 1.0, \"recall\": 0.012309207287050714, \"specificity\": 1.0, \"npv\": 0.3633767058076801, \"accuracy\": 0.36838790931989923, \"f1\": 0.024319066147859923, \"f2\": 0.015339305436249846, \"f0_5\": 0.05865790708587518, \"p4\": 0.04651597386980402, \"phi\": 0.06687958728246145}, {\"truth_threshold\": 22.800000339746475, \"match_probability\": 0.9999998630645361, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 23.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2008.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.011324470704086657, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9886755292959133, \"precision\": 1.0, \"recall\": 0.011324470704086657, \"specificity\": 1.0, \"npv\": 0.36314620995876945, \"accuracy\": 0.3677581863979849, \"f1\": 0.022395326192794548, \"f2\": 0.014115625383576776, \"f0_5\": 0.054168629298162976, \"p4\": 0.042983917959230976, \"phi\": 0.06412829809045448}, {\"truth_threshold\": 23.000000342726707, \"match_probability\": 0.999999880790753, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 21.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2010.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0103397341211226, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9896602658788775, \"precision\": 1.0, \"recall\": 0.0103397341211226, \"specificity\": 1.0, \"npv\": 0.3629160063391442, \"accuracy\": 0.36712846347607053, \"f1\": 0.02046783625730994, \"f2\": 0.01289134438305709, \"f0_5\": 0.04964539007092199, \"p4\": 0.03942061774542593, \"phi\": 0.06125728539403615}, {\"truth_threshold\": 23.100000344216824, \"match_probability\": 0.9999998887738388, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 20.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2011.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.009847365829640572, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9901526341703595, \"precision\": 1.0, \"recall\": 0.009847365829640572, \"specificity\": 1.0, \"npv\": 0.36280101394169834, \"accuracy\": 0.36681360201511337, \"f1\": 0.019502681618722574, \"f2\": 0.012278978388998035, \"f0_5\": 0.04737091425864519, \"p4\": 0.03762710959306445, \"phi\": 0.05977151752840505}, {\"truth_threshold\": 23.20000034570694, \"match_probability\": 0.9999998962223214, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 17.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2014.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.008370260955194485, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9916297390448056, \"precision\": 1.0, \"recall\": 0.008370260955194485, \"specificity\": 1.0, \"npv\": 0.3624564735675847, \"accuracy\": 0.36586901763224183, \"f1\": 0.0166015625, \"f2\": 0.010440977766859108, \"f0_5\": 0.040495474035254886, \"p4\": 0.03219846095822884, \"phi\": 0.05508044361350257}, {\"truth_threshold\": 23.400000348687172, \"match_probability\": 0.9999999096562825, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 15.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2016.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.007385524372230428, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9926144756277696, \"precision\": 1.0, \"recall\": 0.007385524372230428, \"specificity\": 1.0, \"npv\": 0.3622271433090794, \"accuracy\": 0.36523929471032746, \"f1\": 0.01466275659824047, \"f2\": 0.009214891264283081, \"f0_5\": 0.035868005738880916, \"p4\": 0.028538670521671944, \"phi\": 0.05172269709897784}, {\"truth_threshold\": 23.500000350177288, \"match_probability\": 0.9999999157063305, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 12.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2019.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.005908419497784343, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9940915805022157, \"precision\": 1.0, \"recall\": 0.005908419497784343, \"specificity\": 1.0, \"npv\": 0.36188369152970923, \"accuracy\": 0.3642947103274559, \"f1\": 0.011747430249632892, \"f2\": 0.007374631268436578, \"f0_5\": 0.02886002886002886, \"p4\": 0.022986746233599045, \"phi\": 0.046240249339339734}, {\"truth_threshold\": 23.600000351667404, \"match_probability\": 0.9999999213512251, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 10.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2021.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.004923682914820286, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9950763170851797, \"precision\": 1.0, \"recall\": 0.004923682914820286, \"specificity\": 1.0, \"npv\": 0.3616550852811118, \"accuracy\": 0.36366498740554154, \"f1\": 0.009799118079372856, \"f2\": 0.006147037128104254, \"f0_5\": 0.024142926122646065, \"p4\": 0.019243252781973526, \"phi\": 0.0421980445572598}, {\"truth_threshold\": 23.70000035315752, \"match_probability\": 0.9999999266180979, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 9.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2022.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.004431314623338257, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9955686853766618, \"precision\": 1.0, \"recall\": 0.004431314623338257, \"specificity\": 1.0, \"npv\": 0.361540890432586, \"accuracy\": 0.3633501259445844, \"f1\": 0.008823529411764706, \"f2\": 0.005533013648100332, \"f0_5\": 0.02177068214804064, \"p4\": 0.017358654565300884, \"phi\": 0.040026259314463214}, {\"truth_threshold\": 23.900000356137753, \"match_probability\": 0.9999999361173434, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 7.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2024.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0034465780403741997, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9965534219596258, \"precision\": 1.0, \"recall\": 0.0034465780403741997, \"specificity\": 1.0, \"npv\": 0.3613127169454087, \"accuracy\": 0.36272040302267, \"f1\": 0.0068694798822374874, \"f2\": 0.004304513589964334, \"f0_5\": 0.016998542982030112, \"p4\": 0.013563435077429192, \"phi\": 0.035288701817040316}, {\"truth_threshold\": 24.100000359117985, \"match_probability\": 0.9999999443869169, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 6.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2025.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0029542097488921715, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9970457902511078, \"precision\": 1.0, \"recall\": 0.0029542097488921715, \"specificity\": 1.0, \"npv\": 0.361198738170347, \"accuracy\": 0.36240554156171284, \"f1\": 0.005891016200294551, \"f2\": 0.0036900369003690036, \"f0_5\": 0.014598540145985401, \"p4\": 0.011652683870064942, \"phi\": 0.032665835877723835}, {\"truth_threshold\": 24.2000003606081, \"match_probability\": 0.9999999481111586, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 5.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2026.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.002461841457410143, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9975381585425899, \"precision\": 1.0, \"recall\": 0.002461841457410143, \"specificity\": 1.0, \"npv\": 0.36108483128350677, \"accuracy\": 0.3620906801007557, \"f1\": 0.004911591355599214, \"f2\": 0.0030754090294009104, \"f0_5\": 0.01218917601170161, \"p4\": 0.009733083985039102, \"phi\": 0.02981498964104606}, {\"truth_threshold\": 24.300000362098217, \"match_probability\": 0.999999951585999, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 4.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2027.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0019694731659281144, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9980305268340719, \"precision\": 1.0, \"recall\": 0.0019694731659281144, \"specificity\": 1.0, \"npv\": 0.3609709962168979, \"accuracy\": 0.36177581863979846, \"f1\": 0.003931203931203931, \"f2\": 0.0024606299212598425, \"f0_5\": 0.009770395701025891, \"p4\": 0.007804568825263287, \"phi\": 0.026663133550419747}, {\"truth_threshold\": 24.400000363588333, \"match_probability\": 0.9999999548281396, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 3.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2028.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0014771048744460858, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9985228951255539, \"precision\": 1.0, \"recall\": 0.0014771048744460858, \"specificity\": 1.0, \"npv\": 0.36085723290261584, \"accuracy\": 0.3614609571788413, \"f1\": 0.0029498525073746312, \"f2\": 0.0018456995201181247, \"f0_5\": 0.007342143906020558, \"p4\": 0.00586707112734875, \"phi\": 0.023087312050119223}, {\"truth_threshold\": 24.600000366568565, \"match_probability\": 0.9999999606756114, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 2.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2029.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0009847365829640572, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.999015263417036, \"precision\": 1.0, \"recall\": 0.0009847365829640572, \"specificity\": 1.0, \"npv\": 0.36074354127284186, \"accuracy\": 0.36114609571788414, \"f1\": 0.001967535661583866, \"f2\": 0.0012306177701206006, \"f0_5\": 0.004904364884747425, \"p4\": 0.003920522953249476, \"phi\": 0.018847741566547744}, {\"truth_threshold\": 25.100000374019146, \"match_probability\": 0.9999999721934579, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2030.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0004923682914820286, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.999507631708518, \"precision\": 1.0, \"recall\": 0.0004923682914820286, \"specificity\": 1.0, \"npv\": 0.3606299212598425, \"accuracy\": 0.3608312342569269, \"f1\": 0.000984251968503937, \"f2\": 0.0006153846153846154, \"f0_5\": 0.002457002457002457, \"p4\": 0.001964855681779181, \"phi\": 0.013325266908696693}]}}, {\"mode\": \"vega-lite\"});\n",
              "</script>"
            ],
            "text/plain": [
              "alt.LayerChart(...)"
            ]
          },
          "execution_count": 2,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "chart"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {},
      "source": [
        "!!! info \"At a glance\"\n",
        "    **Useful for:** Selecting an optimal match weight threshold for generating linked clusters.\n",
        "\n",
        "    **API Documentation:** [accuracy_chart_from_labels_table()](../api_docs/evaluation.md#splink.internals.linker_components.evaluation.LinkerEvalution.accuracy_analysis_from_labels_table)\n",
        "\n",
        "    **What is needed to generate the chart?** A `linker` with some data and a corresponding labelled dataset"
      ]
    },
    {
      "attachments": {},
      "cell_type": "markdown",
      "metadata": {},
      "source": [
        "### What the chart shows\n",
        "\n",
        "For a given match weight threshold, a record pair with a score above this threshold will be labelled a match and below the threshold will be labelled a non-match. For all possible match weight thresholds, this chart shows various accuracy metrics comparing the Splink scores against clerical labels. \n",
        "\n",
        "**Precision** and **recall** are shown by default, but various additional metrics can be added: specificity, negative predictive value (NPV), accuracy, $F_1$, $F_2$, $F_{0.5}$, $P_4$ and $\\phi$ (Matthews correlation coefficient)."
      ]
    },
    {
      "attachments": {},
      "cell_type": "markdown",
      "metadata": {},
      "source": [
        "### How to interpret the chart\n",
        "\n",
        "**Precision** can be maximised by **increasing** the match threshold (reducing false positives).\n",
        "\n",
        "**Recall** can be maximised by **decreasing** the match threshold (reducing false negatives). \n",
        "\n",
        "Additional metrics can be used to find the optimal compromise between these two, looking for the threshold at which peak accuracy is achieved. \n",
        "\n",
        "!!! info \"Confusion matrix\"\n",
        "\n",
        "    See [threshold_selection_tool_from_labels_table](threshold_selection_tool_from_labels_table.ipynb) for a more complete visualisation of the impact of match threshold on false positives and false negatives, with reference to the confusion matrix."
      ]
    },
    {
      "attachments": {},
      "cell_type": "markdown",
      "metadata": {},
      "source": [
        "### Actions to take as a result of the chart\n",
        "\n",
        "Having identified an optimal match weight threshold, this can be applied when generating linked clusters using [cluster_pairwise_predictions_at_thresholds()](../api_docs/clustering.md#splink.clustering.cluster_pairwise_predictions_at_threshold)."
      ]
    },
    {
      "attachments": {},
      "cell_type": "markdown",
      "metadata": {},
      "source": [
        "## Worked Example"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 1,
      "metadata": {
        "tags": [
          "hide_output"
        ]
      },
      "outputs": [
        {
          "name": "stderr",
          "output_type": "stream",
          "text": [
            "Probability two random records match is estimated to be  0.000821.\n",
            "This means that amongst all possible pairwise record comparisons, one in 1,218.29 are expected to match.  With 499,500 total possible comparisons, we expect a total of around 410.00 matching pairs\n",
            "You are using the default value for `max_pairs`, which may be too small and thus lead to inaccurate estimates for your model's u-parameters. Consider increasing to 1e8 or 1e9, which will result in more accurate estimates, but with a longer run time.\n",
            "----- Estimating u probabilities using random sampling -----\n",
            "u probability not trained for dob - Abs difference of 'transformed dob <= 1 month' (comparison vector value: 1). This usually means the comparison level was never observed in the training data.\n",
            "\n",
            "Estimated u probabilities using random sampling\n",
            "\n",
            "Your model is not yet fully trained. Missing estimates for:\n",
            "    - first_name (no m values are trained).\n",
            "    - surname (no m values are trained).\n",
            "    - dob (some u values are not trained, no m values are trained).\n",
            "    - city (no m values are trained).\n",
            "    - email (no m values are trained).\n",
            "\n",
            "----- Starting EM training session -----\n",
            "\n",
            "Estimating the m probabilities of the model by blocking on:\n",
            "(l.\"first_name\" = r.\"first_name\") AND (l.\"surname\" = r.\"surname\")\n",
            "\n",
            "Parameter estimates will be made for the following comparison(s):\n",
            "    - dob\n",
            "    - city\n",
            "    - email\n",
            "\n",
            "Parameter estimates cannot be made for the following comparison(s) since they are used in the blocking rules: \n",
            "    - first_name\n",
            "    - surname\n",
            "\n",
            "WARNING:\n",
            "Level Abs difference of 'transformed dob <= 1 month' on comparison dob not observed in dataset, unable to train m value\n",
            "\n",
            "WARNING:\n",
            "Level Jaro-Winkler distance of transformed email >= 0.88 on comparison email not observed in dataset, unable to train m value\n",
            "\n",
            "Iteration 1: Largest change in params was -0.51 in the m_probability of dob, level `Exact match on dob`\n",
            "Iteration 2: Largest change in params was 0.0782 in probability_two_random_records_match\n",
            "Iteration 3: Largest change in params was 0.0205 in probability_two_random_records_match\n",
            "Iteration 4: Largest change in params was 0.00737 in probability_two_random_records_match\n",
            "Iteration 5: Largest change in params was 0.00323 in probability_two_random_records_match\n",
            "Iteration 6: Largest change in params was 0.00161 in probability_two_random_records_match\n",
            "Iteration 7: Largest change in params was 0.000862 in probability_two_random_records_match\n",
            "Iteration 8: Largest change in params was 0.000482 in probability_two_random_records_match\n",
            "Iteration 9: Largest change in params was 0.000276 in probability_two_random_records_match\n",
            "Iteration 10: Largest change in params was 0.00016 in probability_two_random_records_match\n",
            "Iteration 11: Largest change in params was 9.35e-05 in probability_two_random_records_match\n",
            "\n",
            "EM converged after 11 iterations\n",
            "m probability not trained for dob - Abs difference of 'transformed dob <= 1 month' (comparison vector value: 1). This usually means the comparison level was never observed in the training data.\n",
            "m probability not trained for email - Jaro-Winkler distance of transformed email >= 0.88 (comparison vector value: 1). This usually means the comparison level was never observed in the training data.\n",
            "\n",
            "Your model is not yet fully trained. Missing estimates for:\n",
            "    - first_name (no m values are trained).\n",
            "    - surname (no m values are trained).\n",
            "    - dob (some u values are not trained, some m values are not trained).\n",
            "    - email (some m values are not trained).\n",
            "\n",
            "----- Starting EM training session -----\n",
            "\n",
            "Estimating the m probabilities of the model by blocking on:\n",
            "l.\"dob\" = r.\"dob\"\n",
            "\n",
            "Parameter estimates will be made for the following comparison(s):\n",
            "    - first_name\n",
            "    - surname\n",
            "    - city\n",
            "    - email\n",
            "\n",
            "Parameter estimates cannot be made for the following comparison(s) since they are used in the blocking rules: \n",
            "    - dob\n",
            "\n",
            "WARNING:\n",
            "Level Jaro-Winkler distance of transformed email >= 0.88 on comparison email not observed in dataset, unable to train m value\n",
            "\n",
            "Iteration 1: Largest change in params was 0.588 in probability_two_random_records_match\n",
            "Iteration 2: Largest change in params was 0.128 in probability_two_random_records_match\n",
            "Iteration 3: Largest change in params was 0.0558 in the m_probability of first_name, level `All other comparisons`\n",
            "Iteration 4: Largest change in params was 0.0183 in probability_two_random_records_match\n",
            "Iteration 5: Largest change in params was 0.00723 in probability_two_random_records_match\n",
            "Iteration 6: Largest change in params was 0.00319 in probability_two_random_records_match\n",
            "Iteration 7: Largest change in params was 0.00149 in probability_two_random_records_match\n",
            "Iteration 8: Largest change in params was 0.000709 in probability_two_random_records_match\n",
            "Iteration 9: Largest change in params was 0.000343 in probability_two_random_records_match\n",
            "Iteration 10: Largest change in params was 0.000168 in probability_two_random_records_match\n",
            "Iteration 11: Largest change in params was 8.47e-05 in probability_two_random_records_match\n",
            "\n",
            "EM converged after 11 iterations\n",
            "m probability not trained for email - Jaro-Winkler distance of transformed email >= 0.88 (comparison vector value: 1). This usually means the comparison level was never observed in the training data.\n",
            "\n",
            "Your model is not yet fully trained. Missing estimates for:\n",
            "    - dob (some u values are not trained, some m values are not trained).\n",
            "    - email (some m values are not trained).\n"
          ]
        }
      ],
      "source": [
        "import splink.comparison_library as cl\n",
        "from splink import DuckDBAPI, Linker, SettingsCreator, block_on, splink_datasets\n",
        "from splink.datasets import splink_dataset_labels\n",
        "\n",
        "db_api = DuckDBAPI()\n",
        "\n",
        "df = splink_datasets.fake_1000\n",
        "\n",
        "settings = SettingsCreator(\n",
        "    link_type=\"dedupe_only\",\n",
        "    comparisons=[\n",
        "        cl.JaroWinklerAtThresholds(\"first_name\", [0.9, 0.7]),\n",
        "        cl.JaroAtThresholds(\"surname\", [0.9, 0.7]),\n",
        "        cl.DateOfBirthComparison(\n",
        "            \"dob\",\n",
        "            input_is_string=True,\n",
        "            datetime_metrics=[\"year\", \"month\"],\n",
        "            datetime_thresholds=[1, 1],\n",
        "        ),\n",
        "        cl.ExactMatch(\"city\").configure(term_frequency_adjustments=True),\n",
        "        cl.EmailComparison(\"email\"),\n",
        "    ],\n",
        "    blocking_rules_to_generate_predictions=[\n",
        "        block_on(\"substr(first_name,1,1)\"),\n",
        "        block_on(\"substr(surname, 1,1)\"),\n",
        "    ],\n",
        ")\n",
        "\n",
        "linker = Linker(df, settings, db_api)\n",
        "\n",
        "linker.training.estimate_probability_two_random_records_match(\n",
        "    [block_on(\"first_name\", \"surname\")], recall=0.7\n",
        ")\n",
        "linker.training.estimate_u_using_random_sampling(max_pairs=1e6)\n",
        "\n",
        "blocking_rule_for_training = block_on(\"first_name\", \"surname\")\n",
        "\n",
        "linker.training.estimate_parameters_using_expectation_maximisation(\n",
        "    blocking_rule_for_training\n",
        ")\n",
        "\n",
        "blocking_rule_for_training = block_on(\"dob\")\n",
        "linker.training.estimate_parameters_using_expectation_maximisation(\n",
        "    blocking_rule_for_training\n",
        ")\n",
        "\n",
        "\n",
        "df_labels = splink_dataset_labels.fake_1000_labels\n",
        "labels_table = linker.table_management.register_labels_table(df_labels)\n",
        "\n",
        "chart = linker.evaluation.accuracy_analysis_from_labels_table(\n",
        "    labels_table, output_type=\"accuracy\", add_metrics=[\"f1\"]\n",
        ")"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {},
      "source": [
        "Note that you can also produce a ROC chart, a precision recall chart, or get the results as a table:"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 3,
      "metadata": {},
      "outputs": [
        {
          "data": {
            "text/html": [
              "\n",
              "<style>\n",
              "  #altair-viz-de5f031e05104fb6af1495efdd1831fd.vega-embed {\n",
              "    width: 100%;\n",
              "    display: flex;\n",
              "  }\n",
              "\n",
              "  #altair-viz-de5f031e05104fb6af1495efdd1831fd.vega-embed details,\n",
              "  #altair-viz-de5f031e05104fb6af1495efdd1831fd.vega-embed details summary {\n",
              "    position: relative;\n",
              "  }\n",
              "</style>\n",
              "<div id=\"altair-viz-de5f031e05104fb6af1495efdd1831fd\"></div>\n",
              "<script type=\"text/javascript\">\n",
              "  var VEGA_DEBUG = (typeof VEGA_DEBUG == \"undefined\") ? {} : VEGA_DEBUG;\n",
              "  (function(spec, embedOpt){\n",
              "    let outputDiv = document.currentScript.previousElementSibling;\n",
              "    if (outputDiv.id !== \"altair-viz-de5f031e05104fb6af1495efdd1831fd\") {\n",
              "      outputDiv = document.getElementById(\"altair-viz-de5f031e05104fb6af1495efdd1831fd\");\n",
              "    }\n",
              "    const paths = {\n",
              "      \"vega\": \"https://cdn.jsdelivr.net/npm/vega@5?noext\",\n",
              "      \"vega-lib\": \"https://cdn.jsdelivr.net/npm/vega-lib?noext\",\n",
              "      \"vega-lite\": \"https://cdn.jsdelivr.net/npm/vega-lite@5.17.0?noext\",\n",
              "      \"vega-embed\": \"https://cdn.jsdelivr.net/npm/vega-embed@6?noext\",\n",
              "    };\n",
              "\n",
              "    function maybeLoadScript(lib, version) {\n",
              "      var key = `${lib.replace(\"-\", \"\")}_version`;\n",
              "      return (VEGA_DEBUG[key] == version) ?\n",
              "        Promise.resolve(paths[lib]) :\n",
              "        new Promise(function(resolve, reject) {\n",
              "          var s = document.createElement('script');\n",
              "          document.getElementsByTagName(\"head\")[0].appendChild(s);\n",
              "          s.async = true;\n",
              "          s.onload = () => {\n",
              "            VEGA_DEBUG[key] = version;\n",
              "            return resolve(paths[lib]);\n",
              "          };\n",
              "          s.onerror = () => reject(`Error loading script: ${paths[lib]}`);\n",
              "          s.src = paths[lib];\n",
              "        });\n",
              "    }\n",
              "\n",
              "    function showError(err) {\n",
              "      outputDiv.innerHTML = `<div class=\"error\" style=\"color:red;\">${err}</div>`;\n",
              "      throw err;\n",
              "    }\n",
              "\n",
              "    function displayChart(vegaEmbed) {\n",
              "      vegaEmbed(outputDiv, spec, embedOpt)\n",
              "        .catch(err => showError(`Javascript Error: ${err.message}<br>This usually means there's a typo in your chart specification. See the javascript console for the full traceback.`));\n",
              "    }\n",
              "\n",
              "    if(typeof define === \"function\" && define.amd) {\n",
              "      requirejs.config({paths});\n",
              "      require([\"vega-embed\"], displayChart, err => showError(`Error loading script: ${err.message}`));\n",
              "    } else {\n",
              "      maybeLoadScript(\"vega\", \"5\")\n",
              "        .then(() => maybeLoadScript(\"vega-lite\", \"5.17.0\"))\n",
              "        .then(() => maybeLoadScript(\"vega-embed\", \"6\"))\n",
              "        .catch(showError)\n",
              "        .then(() => displayChart(vegaEmbed));\n",
              "    }\n",
              "  })({\"config\": {\"view\": {\"continuousWidth\": 300, \"continuousHeight\": 300}}, \"data\": {\"name\": \"data-88481be63707845e366cd370703ce8b4\"}, \"mark\": {\"type\": \"line\", \"clip\": true, \"point\": true}, \"encoding\": {\"tooltip\": [{\"field\": \"truth_threshold\", \"format\": \".4f\", \"type\": \"quantitative\"}, {\"field\": \"match_probability\", \"format\": \".4%\", \"type\": \"quantitative\"}, {\"field\": \"fp_rate\", \"format\": \".4f\", \"title\": \"FP_rate\", \"type\": \"quantitative\"}, {\"field\": \"tp_rate\", \"format\": \".4f\", \"title\": \"TP_rate\", \"type\": \"quantitative\"}, {\"field\": \"tp\", \"format\": \",.0f\", \"title\": \"TP\", \"type\": \"quantitative\"}, {\"field\": \"tn\", \"format\": \",.0f\", \"title\": \"TN\", \"type\": \"quantitative\"}, {\"field\": \"fp\", \"format\": \",.0f\", \"title\": \"FP\", \"type\": \"quantitative\"}, {\"field\": \"fn\", \"format\": \",.0f\", \"title\": \"FN\", \"type\": \"quantitative\"}, {\"field\": \"precision\", \"format\": \".4f\", \"type\": \"quantitative\"}, {\"field\": \"recall\", \"format\": \".4f\", \"type\": \"quantitative\"}, {\"field\": \"f1\", \"format\": \".4f\", \"title\": \"F1\", \"type\": \"quantitative\"}], \"x\": {\"field\": \"fp_rate\", \"sort\": [\"truth_threshold\"], \"title\": \"False Positive Rate amongst clerically reviewed records\", \"type\": \"quantitative\"}, \"y\": {\"field\": \"tp_rate\", \"sort\": [\"truth_threshold\"], \"title\": \"True Positive Rate amongst clerically reviewed records\", \"type\": \"quantitative\"}}, \"height\": 400, \"params\": [{\"name\": \"mouse_zoom\", \"select\": {\"type\": \"interval\", \"encodings\": [\"x\"]}, \"bind\": \"scales\"}], \"title\": \"Receiver operating characteristic curve\", \"width\": 400, \"$schema\": \"https://vega.github.io/schema/vega-lite/v5.9.3.json\", \"datasets\": {\"data-88481be63707845e366cd370703ce8b4\": [{\"truth_threshold\": -23.800000354647636, \"match_probability\": 6.846773588489456e-08, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1055.0, \"fp\": 90.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9213973799126638, \"fp_rate\": 0.07860262008733625, \"fn_rate\": 0.2880354505169867, \"precision\": 0.94140625, \"recall\": 0.7119645494830132, \"specificity\": 0.9213973799126638, \"npv\": 0.6432926829268293, \"accuracy\": 0.7874685138539043, \"f1\": 0.8107653490328006, \"f2\": 0.7484472049689441, \"f0_5\": 0.8844036697247707, \"p4\": 0.7832976799979975, \"phi\": 0.6085442007563051}, {\"truth_threshold\": -22.70000033825636, \"match_probability\": 1.467637948991862e-07, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1077.0, \"fp\": 68.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9406113537117904, \"fp_rate\": 0.059388646288209605, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9550858652575958, \"recall\": 0.7119645494830132, \"specificity\": 0.9406113537117904, \"npv\": 0.648014440433213, \"accuracy\": 0.7943954659949622, \"f1\": 0.8157968970380818, \"f2\": 0.750155633948952, \"f0_5\": 0.8940274514653147, \"p4\": 0.7908413564901972, \"phi\": 0.6273505612520337}, {\"truth_threshold\": -21.700000323355198, \"match_probability\": 2.9352754975091214e-07, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1083.0, \"fp\": 62.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9458515283842794, \"fp_rate\": 0.05414847161572053, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9588859416445623, \"recall\": 0.7119645494830132, \"specificity\": 0.9458515283842794, \"npv\": 0.6492805755395683, \"accuracy\": 0.7962846347607053, \"f1\": 0.817179994348686, \"f2\": 0.7506229235880398, \"f0_5\": 0.896688577452561, \"p4\": 0.792886883910619, \"phi\": 0.6325043185815227}, {\"truth_threshold\": -21.600000321865082, \"match_probability\": 3.1459503204353755e-07, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1088.0, \"fp\": 57.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9502183406113537, \"fp_rate\": 0.04978165938864629, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9620758483033932, \"recall\": 0.7119645494830132, \"specificity\": 0.9502183406113537, \"npv\": 0.6503287507471608, \"accuracy\": 0.7978589420654912, \"f1\": 0.8183361629881154, \"f2\": 0.7510127765659084, \"f0_5\": 0.8989183140619172, \"p4\": 0.7945877557823284, \"phi\": 0.6368075433805553}, {\"truth_threshold\": -20.60000030696392, \"match_probability\": 6.29189872645777e-07, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1094.0, \"fp\": 51.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9554585152838428, \"fp_rate\": 0.0445414847161572, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9659318637274549, \"recall\": 0.7119645494830132, \"specificity\": 0.9554585152838428, \"npv\": 0.6515783204288267, \"accuracy\": 0.7997481108312342, \"f1\": 0.8197278911564626, \"f2\": 0.75148113501715, \"f0_5\": 0.9016086793864572, \"p4\": 0.7966244062798371, \"phi\": 0.6419817284271657}, {\"truth_threshold\": -19.000000283122063, \"match_probability\": 1.907344620533969e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1096.0, \"fp\": 49.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9572052401746725, \"fp_rate\": 0.04279475982532751, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9672240802675586, \"recall\": 0.7119645494830132, \"specificity\": 0.9572052401746725, \"npv\": 0.6519928613920285, \"accuracy\": 0.8003778337531486, \"f1\": 0.8201928530913216, \"f2\": 0.7516373843434868, \"f0_5\": 0.9025090500561728, \"p4\": 0.7973022397990062, \"phi\": 0.6437089952787838}, {\"truth_threshold\": -17.900000266730785, \"match_probability\": 4.088473825324779e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1097.0, \"fp\": 48.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9580786026200874, \"fp_rate\": 0.04192139737991266, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9678714859437751, \"recall\": 0.7119645494830132, \"specificity\": 0.9580786026200874, \"npv\": 0.6521997621878716, \"accuracy\": 0.8006926952141058, \"f1\": 0.8204255319148936, \"f2\": 0.751715533374922, \"f0_5\": 0.9029599100786811, \"p4\": 0.7976409617025867, \"phi\": 0.6445731096055997}, {\"truth_threshold\": -17.600000262260437, \"match_probability\": 5.03349696795731e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1100.0, \"fp\": 45.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9606986899563319, \"fp_rate\": 0.039301310043668124, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9698189134808853, \"recall\": 0.7119645494830132, \"specificity\": 0.9606986899563319, \"npv\": 0.6528189910979229, \"accuracy\": 0.8016372795969773, \"f1\": 0.8211243611584327, \"f2\": 0.7519500780031201, \"f0_5\": 0.9043151969981238, \"p4\": 0.7986563533248476, \"phi\": 0.6471673893914208}, {\"truth_threshold\": -17.50000026077032, \"match_probability\": 5.394766530610173e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1108.0, \"fp\": 37.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9676855895196507, \"fp_rate\": 0.032314410480349345, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9750505731625084, \"recall\": 0.7119645494830132, \"specificity\": 0.9676855895196507, \"npv\": 0.6544595392793857, \"accuracy\": 0.8041561712846348, \"f1\": 0.8229937393284007, \"f2\": 0.7525762464869367, \"f0_5\": 0.9079492653522542, \"p4\": 0.8013584652743565, \"phi\": 0.6540998665530485}, {\"truth_threshold\": -16.900000251829624, \"match_probability\": 8.176914304005986e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1110.0, \"fp\": 35.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9694323144104804, \"fp_rate\": 0.03056768558951965, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9763673193787981, \"recall\": 0.7119645494830132, \"specificity\": 0.9694323144104804, \"npv\": 0.6548672566371682, \"accuracy\": 0.8047858942065491, \"f1\": 0.8234624145785877, \"f2\": 0.7527329515877147, \"f0_5\": 0.9088623507228158, \"p4\": 0.8020327397013851, \"phi\": 0.6558363061606292}, {\"truth_threshold\": -16.50000024586916, \"match_probability\": 1.0789474965962542e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1111.0, \"fp\": 34.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9703056768558952, \"fp_rate\": 0.029694323144104803, \"fn_rate\": 0.2880354505169867, \"precision\": 0.977027027027027, \"recall\": 0.7119645494830132, \"specificity\": 0.9703056768558952, \"npv\": 0.6550707547169812, \"accuracy\": 0.8051007556675063, \"f1\": 0.8236969524352037, \"f2\": 0.7528113286130779, \"f0_5\": 0.90931958244246, \"p4\": 0.8023696912661274, \"phi\": 0.6567050301458078}, {\"truth_threshold\": -15.800000235438347, \"match_probability\": 1.7527435818536736e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1120.0, \"fp\": 25.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9781659388646288, \"fp_rate\": 0.021834061135371178, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9830047586675731, \"recall\": 0.7119645494830132, \"specificity\": 0.9781659388646288, \"npv\": 0.656891495601173, \"accuracy\": 0.8079345088161209, \"f1\": 0.8258138206739006, \"f2\": 0.7535174570088587, \"f0_5\": 0.9134554643082754, \"p4\": 0.8053967630362511, \"phi\": 0.6645388735433893}, {\"truth_threshold\": -15.400000229477882, \"match_probability\": 2.312746079632102e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1121.0, \"fp\": 24.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9790393013100437, \"fp_rate\": 0.02096069868995633, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9836734693877551, \"recall\": 0.7119645494830132, \"specificity\": 0.9790393013100437, \"npv\": 0.6570926143024619, \"accuracy\": 0.8082493702770781, \"f1\": 0.8260497000856898, \"f2\": 0.7535959974984365, \"f0_5\": 0.9139173302995829, \"p4\": 0.8057325018854461, \"phi\": 0.6654110243207023}, {\"truth_threshold\": -15.20000022649765, \"match_probability\": 2.6566384864664307e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1126.0, \"fp\": 19.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9834061135371179, \"fp_rate\": 0.016593886462882096, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9870307167235495, \"recall\": 0.7119645494830132, \"specificity\": 0.9834061135371179, \"npv\": 0.6580946814728229, \"accuracy\": 0.809823677581864, \"f1\": 0.8272311212814645, \"f2\": 0.75398894566691, \"f0_5\": 0.9162336839437334, \"p4\": 0.8074094203617235, \"phi\": 0.6697770344487317}, {\"truth_threshold\": -14.300000213086605, \"match_probability\": 4.957348695121048e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1126.0, \"fp\": 19.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9834061135371179, \"fp_rate\": 0.016593886462882096, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9870218579234973, \"recall\": 0.7114721811915312, \"specificity\": 0.9834061135371179, \"npv\": 0.6577102803738317, \"accuracy\": 0.8095088161209067, \"f1\": 0.82689556509299, \"f2\": 0.7535460992907801, \"f0_5\": 0.9160644097882592, \"p4\": 0.8071048961802441, \"phi\": 0.6693357668739984}, {\"truth_threshold\": -13.200000196695328, \"match_probability\": 0.00010625707305470121, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1135.0, \"fp\": 10.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9912663755458515, \"fp_rate\": 0.008733624454148471, \"fn_rate\": 0.28852781880846873, \"precision\": 0.993127147766323, \"recall\": 0.7114721811915312, \"specificity\": 0.9912663755458515, \"npv\": 0.6595002905287624, \"accuracy\": 0.8123425692695214, \"f1\": 0.8290304073436604, \"f2\": 0.7542540975049588, \"f0_5\": 0.9202649344032607, \"p4\": 0.8101156090778194, \"phi\": 0.6772196571827369}, {\"truth_threshold\": -12.800000190734863, \"match_probability\": 0.00014020228918616167, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1137.0, \"fp\": 8.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9930131004366812, \"fp_rate\": 0.0069868995633187774, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9944941500344116, \"recall\": 0.7114721811915312, \"specificity\": 0.9930131004366812, \"npv\": 0.6598955310504934, \"accuracy\": 0.8129722921914357, \"f1\": 0.8295063145809415, \"f2\": 0.7544116111517176, \"f0_5\": 0.9212036210633686, \"p4\": 0.8107834022242488, \"phi\": 0.6789756245799222}, {\"truth_threshold\": -12.400000184774399, \"match_probability\": 0.00018498974370122882, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1138.0, \"fp\": 7.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.993886462882096, \"fp_rate\": 0.00611353711790393, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9951790633608816, \"recall\": 0.7114721811915312, \"specificity\": 0.993886462882096, \"npv\": 0.660092807424594, \"accuracy\": 0.8132871536523929, \"f1\": 0.8297444731553258, \"f2\": 0.7544903926482874, \"f0_5\": 0.9216736828677127, \"p4\": 0.8111171302195401, \"phi\": 0.6798541595642643}, {\"truth_threshold\": -11.700000174343586, \"match_probability\": 0.0003004820136373637, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1139.0, \"fp\": 6.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.994759825327511, \"fp_rate\": 0.005240174672489083, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9958649207443143, \"recall\": 0.7114721811915312, \"specificity\": 0.994759825327511, \"npv\": 0.6602898550724637, \"accuracy\": 0.8136020151133502, \"f1\": 0.8299827685238369, \"f2\": 0.7545691906005222, \"f0_5\": 0.9221442246330568, \"p4\": 0.8114507463687338, \"phi\": 0.680733063624895}, {\"truth_threshold\": -11.300000168383121, \"match_probability\": 0.00039645033391533577, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1142.0, \"fp\": 3.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9973799126637555, \"fp_rate\": 0.0026200873362445414, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9979281767955801, \"recall\": 0.7114721811915312, \"specificity\": 0.9973799126637555, \"npv\": 0.6608796296296297, \"accuracy\": 0.8145465994962217, \"f1\": 0.8306984765737281, \"f2\": 0.7548056832427915, \"f0_5\": 0.9235587370573949, \"p4\": 0.81245092776752, \"phi\": 0.683372001937977}, {\"truth_threshold\": -10.900000162422657, \"match_probability\": 0.0005230530993675534, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1143.0, \"fp\": 2.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9982532751091703, \"fp_rate\": 0.0017467248908296944, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9986178299930891, \"recall\": 0.7114721811915312, \"specificity\": 0.9982532751091703, \"npv\": 0.6610757663389243, \"accuracy\": 0.8148614609571788, \"f1\": 0.8309373202990225, \"f2\": 0.7548845470692718, \"f0_5\": 0.9240312060365775, \"p4\": 0.8127841005555416, \"phi\": 0.6842523939858662}, {\"truth_threshold\": -10.700000159442425, \"match_probability\": 0.0006007835088396779, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1444.0, \"tn\": 1143.0, \"fp\": 2.0, \"fn\": 587.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7109798129000492, \"tn_rate\": 0.9982532751091703, \"fp_rate\": 0.0017467248908296944, \"fn_rate\": 0.28902018709995075, \"precision\": 0.9986168741355463, \"recall\": 0.7109798129000492, \"specificity\": 0.9982532751091703, \"npv\": 0.6606936416184971, \"accuracy\": 0.8145465994962217, \"f1\": 0.8306010928961749, \"f2\": 0.754440961337513, \"f0_5\": 0.9238643634037108, \"p4\": 0.8124788095466353, \"phi\": 0.6838163737767555}, {\"truth_threshold\": -10.200000151991844, \"match_probability\": 0.0008494248089972806, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1443.0, \"tn\": 1143.0, \"fp\": 2.0, \"fn\": 588.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7104874446085672, \"tn_rate\": 0.9982532751091703, \"fp_rate\": 0.0017467248908296944, \"fn_rate\": 0.2895125553914328, \"precision\": 0.9986159169550173, \"recall\": 0.7104874446085672, \"specificity\": 0.9982532751091703, \"npv\": 0.6603119584055459, \"accuracy\": 0.8142317380352645, \"f1\": 0.830264672036824, \"f2\": 0.7539972828926743, \"f0_5\": 0.9236973498911791, \"p4\": 0.8121735251016357, \"phi\": 0.6833805796370901}, {\"truth_threshold\": -9.100000135600567, \"match_probability\": 0.0018190150448253225, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1443.0, \"tn\": 1144.0, \"fp\": 1.0, \"fn\": 588.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7104874446085672, \"tn_rate\": 0.9991266375545852, \"fp_rate\": 0.0008733624454148472, \"fn_rate\": 0.2895125553914328, \"precision\": 0.9993074792243767, \"recall\": 0.7104874446085672, \"specificity\": 0.9991266375545852, \"npv\": 0.6605080831408776, \"accuracy\": 0.8145465994962217, \"f1\": 0.8305035971223022, \"f2\": 0.7540760869565217, \"f0_5\": 0.9241706161137441, \"p4\": 0.8125064984715595, \"phi\": 0.6842619488798015}, {\"truth_threshold\": -8.600000128149986, \"match_probability\": 0.0025705389597152823, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1440.0, \"tn\": 1144.0, \"fp\": 1.0, \"fn\": 591.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7090103397341211, \"tn_rate\": 0.9991266375545852, \"fp_rate\": 0.0008733624454148472, \"fn_rate\": 0.29098966026587886, \"precision\": 0.9993060374739764, \"recall\": 0.7090103397341211, \"specificity\": 0.9991266375545852, \"npv\": 0.6593659942363113, \"accuracy\": 0.8136020151133502, \"f1\": 0.8294930875576036, \"f2\": 0.7527443805541035, \"f0_5\": 0.9236690186016677, \"p4\": 0.811590547208778, \"phi\": 0.6829568226176045}, {\"truth_threshold\": -8.400000125169754, \"match_probability\": 0.0029516456585356845, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1439.0, \"tn\": 1144.0, \"fp\": 1.0, \"fn\": 592.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7085179714426391, \"tn_rate\": 0.9991266375545852, \"fp_rate\": 0.0008733624454148472, \"fn_rate\": 0.2914820285573609, \"precision\": 0.9993055555555556, \"recall\": 0.7085179714426391, \"specificity\": 0.9991266375545852, \"npv\": 0.6589861751152074, \"accuracy\": 0.8132871536523929, \"f1\": 0.829155862863728, \"f2\": 0.7523002927645337, \"f0_5\": 0.923501476062123, \"p4\": 0.8112852415850365, \"phi\": 0.6825222299358593}, {\"truth_threshold\": -7.600000113248825, \"match_probability\": 0.0051278965144870335, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1439.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 592.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7085179714426391, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2914820285573609, \"precision\": 1.0, \"recall\": 0.7085179714426391, \"specificity\": 1.0, \"npv\": 0.6591824985607369, \"accuracy\": 0.8136020151133502, \"f1\": 0.8293948126801153, \"f2\": 0.7523789605772248, \"f0_5\": 0.9239758571978939, \"p4\": 0.8116179257342173, \"phi\": 0.6834051848579609}, {\"truth_threshold\": -7.400000110268593, \"match_probability\": 0.005885918232687788, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1437.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 594.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.707533234859675, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.29246676514032494, \"precision\": 1.0, \"recall\": 0.707533234859675, \"specificity\": 1.0, \"npv\": 0.6584243818286372, \"accuracy\": 0.8129722921914357, \"f1\": 0.828719723183391, \"f2\": 0.7514904298713524, \"f0_5\": 0.9236405707674509, \"p4\": 0.811007239776182, \"phi\": 0.6825372757481436}, {\"truth_threshold\": -6.800000101327896, \"match_probability\": 0.00889438522932807, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1436.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 595.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.707040866568193, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.292959133431807, \"precision\": 1.0, \"recall\": 0.707040866568193, \"specificity\": 1.0, \"npv\": 0.6580459770114943, \"accuracy\": 0.8126574307304786, \"f1\": 0.8283818863570811, \"f2\": 0.7510460251046025, \"f0_5\": 0.9234726688102894, \"p4\": 0.8107019041426428, \"phi\": 0.6821036562194343}, {\"truth_threshold\": -6.70000009983778, \"match_probability\": 0.009526684411466419, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1428.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 603.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7031019202363368, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2968980797636632, \"precision\": 1.0, \"recall\": 0.7031019202363368, \"specificity\": 1.0, \"npv\": 0.6550343249427918, \"accuracy\": 0.8101385390428212, \"f1\": 0.8256721595836947, \"f2\": 0.7474874371859297, \"f0_5\": 0.9221232080588919, \"p4\": 0.8082593661032169, \"phi\": 0.6786426833673147}, {\"truth_threshold\": -6.400000095367432, \"match_probability\": 0.011702953955477532, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1424.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 607.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7011324470704087, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.29886755292959133, \"precision\": 1.0, \"recall\": 0.7011324470704087, \"specificity\": 1.0, \"npv\": 0.6535388127853882, \"accuracy\": 0.8088790931989924, \"f1\": 0.8243125904486251, \"f2\": 0.7457059069962296, \"f0_5\": 0.9214442862689272, \"p4\": 0.8070381719383619, \"phi\": 0.6769174743376838}, {\"truth_threshold\": -6.3000000938773155, \"match_probability\": 0.012532388771145032, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1423.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 608.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7006400787789266, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.29935992122107336, \"precision\": 1.0, \"recall\": 0.7006400787789266, \"specificity\": 1.0, \"npv\": 0.6531660011409013, \"accuracy\": 0.8085642317380353, \"f1\": 0.8239722061378112, \"f2\": 0.74526029119095, \"f0_5\": 0.9212741162760585, \"p4\": 0.8067328787708493, \"phi\": 0.6764867171608602}, {\"truth_threshold\": -5.700000084936619, \"match_probability\": 0.01887356650421064, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1422.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 609.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7001477104874446, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2998522895125554, \"precision\": 1.0, \"recall\": 0.7001477104874446, \"specificity\": 1.0, \"npv\": 0.6527936145952109, \"accuracy\": 0.8082493702770781, \"f1\": 0.8236316246741964, \"f2\": 0.7448145820238844, \"f0_5\": 0.9211037699183832, \"p4\": 0.8064275873033679, \"phi\": 0.676056177162564}, {\"truth_threshold\": -5.600000083446503, \"match_probability\": 0.02020082327925431, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1405.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 626.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6917774495322502, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3082225504677499, \"precision\": 1.0, \"recall\": 0.6917774495322502, \"specificity\": 1.0, \"npv\": 0.6465273856578204, \"accuracy\": 0.802896725440806, \"f1\": 0.8178114086146682, \"f2\": 0.737223213348725, \"f0_5\": 0.9181806299830088, \"p4\": 0.8012376730750075, \"phi\": 0.6687698153349331}, {\"truth_threshold\": -5.300000078976154, \"match_probability\": 0.024754544222716376, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1393.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 638.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6858690300344658, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.31413096996553425, \"precision\": 1.0, \"recall\": 0.6858690300344658, \"specificity\": 1.0, \"npv\": 0.6421761076836792, \"accuracy\": 0.7991183879093199, \"f1\": 0.8136682242990654, \"f2\": 0.7318482715141326, \"f0_5\": 0.9160857556227805, \"p4\": 0.7975738525329583, \"phi\": 0.6636630953189379}, {\"truth_threshold\": -4.90000007301569, \"match_probability\": 0.032407497325934585, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1391.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 640.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6848842934515017, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3151157065484983, \"precision\": 1.0, \"recall\": 0.6848842934515017, \"specificity\": 1.0, \"npv\": 0.6414565826330533, \"accuracy\": 0.7984886649874056, \"f1\": 0.8129748684979544, \"f2\": 0.7309511297950604, \"f0_5\": 0.9157340355497038, \"p4\": 0.7969631540364932, \"phi\": 0.6628148598035907}, {\"truth_threshold\": -4.800000071525574, \"match_probability\": 0.03465289308554322, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1390.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 641.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6843919251600197, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3156080748399803, \"precision\": 1.0, \"recall\": 0.6843919251600197, \"specificity\": 1.0, \"npv\": 0.641097424412094, \"accuracy\": 0.7981738035264484, \"f1\": 0.8126278865828706, \"f2\": 0.7305024174900148, \"f0_5\": 0.9155578975102094, \"p4\": 0.7966577964206586, \"phi\": 0.6623910480286727}, {\"truth_threshold\": -4.500000067055225, \"match_probability\": 0.04232371044088178, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1389.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 642.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6838995568685377, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.31610044313146235, \"precision\": 1.0, \"recall\": 0.6838995568685377, \"specificity\": 1.0, \"npv\": 0.6407386681589256, \"accuracy\": 0.7978589420654912, \"f1\": 0.8122807017543859, \"f2\": 0.7300536108483129, \"f0_5\": 0.9153815737445631, \"p4\": 0.7963524329131265, \"phi\": 0.6619674396995868}, {\"truth_threshold\": -4.300000064074993, \"match_probability\": 0.048313119674570026, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1388.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 643.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6834071885770556, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3165928114229444, \"precision\": 1.0, \"recall\": 0.6834071885770556, \"specificity\": 1.0, \"npv\": 0.6403803131991052, \"accuracy\": 0.797544080604534, \"f1\": 0.8119333138344546, \"f2\": 0.7296047098402019, \"f0_5\": 0.9152050639588554, \"p4\": 0.7960470632785187, \"phi\": 0.6615440344100268}, {\"truth_threshold\": -4.200000062584877, \"match_probability\": 0.05160178526561565, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1382.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 649.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6804529788281635, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.31954702117183653, \"precision\": 1.0, \"recall\": 0.6804529788281635, \"specificity\": 1.0, \"npv\": 0.6382385730211817, \"accuracy\": 0.7956549118387909, \"f1\": 0.8098447113975974, \"f2\": 0.7269093204292026, \"f0_5\": 0.9141420822860167, \"p4\": 0.7942147035798486, \"phi\": 0.6590078438192518}, {\"truth_threshold\": -3.8000000566244125, \"match_probability\": 0.06698457743861425, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1379.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 652.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6789758739537174, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3210241260462826, \"precision\": 1.0, \"recall\": 0.6789758739537174, \"specificity\": 1.0, \"npv\": 0.6371730662214803, \"accuracy\": 0.7947103274559194, \"f1\": 0.8087976539589443, \"f2\": 0.7255603493633589, \"f0_5\": 0.9136080561812641, \"p4\": 0.7932984218488349, \"phi\": 0.6577424568153551}, {\"truth_threshold\": -3.6000000536441803, \"match_probability\": 0.0761862214703254, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1377.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 654.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6779911373707533, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.32200886262924666, \"precision\": 1.0, \"recall\": 0.6779911373707533, \"specificity\": 1.0, \"npv\": 0.6364647026125625, \"accuracy\": 0.7940806045340051, \"f1\": 0.8080985915492958, \"f2\": 0.7246605620461004, \"f0_5\": 0.9132510943095902, \"p4\": 0.7926875252637993, \"phi\": 0.6568998611817706}, {\"truth_threshold\": -3.300000049173832, \"match_probability\": 0.09217307161544283, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1375.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 656.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6770064007877893, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3229935992122107, \"precision\": 1.0, \"recall\": 0.6770064007877893, \"specificity\": 1.0, \"npv\": 0.6357579122709606, \"accuracy\": 0.7934508816120907, \"f1\": 0.8073987081620669, \"f2\": 0.7237603958311402, \"f0_5\": 0.9128933740539105, \"p4\": 0.7920765927688658, \"phi\": 0.6560580583751121}, {\"truth_threshold\": -3.200000047683716, \"match_probability\": 0.09813940308831819, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1374.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 657.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6765140324963073, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.32348596750369274, \"precision\": 1.0, \"recall\": 0.6765140324963073, \"specificity\": 1.0, \"npv\": 0.6354051054384018, \"accuracy\": 0.7931360201511335, \"f1\": 0.8070484581497798, \"f2\": 0.7233101705622236, \"f0_5\": 0.912714228776405, \"p4\": 0.7917711124557005, \"phi\": 0.6556374532841107}, {\"truth_threshold\": -3.1000000461935997, \"match_probability\": 0.10444750015659417, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1370.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 661.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6745445593303792, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3254554406696209, \"precision\": 1.0, \"recall\": 0.6745445593303792, \"specificity\": 1.0, \"npv\": 0.6339977851605758, \"accuracy\": 0.7918765743073047, \"f1\": 0.8056453984122317, \"f2\": 0.7215083210448704, \"f0_5\": 0.9119957395819465, \"p4\": 0.7905490918185237, \"phi\": 0.6539569990508374}, {\"truth_threshold\": -2.7000000402331352, \"match_probability\": 0.13336855415354743, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1363.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 668.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.671097981290005, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3289020187099951, \"precision\": 1.0, \"recall\": 0.671097981290005, \"specificity\": 1.0, \"npv\": 0.631549917264203, \"accuracy\": 0.7896725440806045, \"f1\": 0.803182086034178, \"f2\": 0.7183514282702645, \"f0_5\": 0.9107309902445543, \"p4\": 0.7884101358393227, \"phi\": 0.6510237127477586}, {\"truth_threshold\": -2.600000038743019, \"match_probability\": 0.1415855743659812, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1356.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 675.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6676514032496307, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.33234859675036926, \"precision\": 1.0, \"recall\": 0.6676514032496307, \"specificity\": 1.0, \"npv\": 0.6291208791208791, \"accuracy\": 0.7874685138539043, \"f1\": 0.8007085916740478, \"f2\": 0.7151898734177216, \"f0_5\": 0.9094567404426559, \"p4\": 0.7862705739141664, \"phi\": 0.6480998671182523}, {\"truth_threshold\": -2.500000037252903, \"match_probability\": 0.15022110152606716, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1355.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 676.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6671590349581487, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3328409650418513, \"precision\": 1.0, \"recall\": 0.6671590349581487, \"specificity\": 1.0, \"npv\": 0.628775398132894, \"accuracy\": 0.7871536523929471, \"f1\": 0.8003544004725339, \"f2\": 0.7147378415444667, \"f0_5\": 0.9092739229633606, \"p4\": 0.7859648678428045, \"phi\": 0.6476829377278417}, {\"truth_threshold\": -2.400000035762787, \"match_probability\": 0.1592855907727143, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1351.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 680.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6651895617922206, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3348104382077794, \"precision\": 1.0, \"recall\": 0.6651895617922206, \"specificity\": 1.0, \"npv\": 0.6273972602739726, \"accuracy\": 0.7858942065491183, \"f1\": 0.7989355410999409, \"f2\": 0.7129287598944591, \"f0_5\": 0.9085406859448554, \"p4\": 0.7847418977635621, \"phi\": 0.6460171117170842}, {\"truth_threshold\": -2.3000000342726707, \"match_probability\": 0.16878839957195682, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1346.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 685.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6627277203348104, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3372722796651896, \"precision\": 1.0, \"recall\": 0.6627277203348104, \"specificity\": 1.0, \"npv\": 0.6256830601092896, \"accuracy\": 0.7843198992443325, \"f1\": 0.7971572401539828, \"f2\": 0.7106652587117213, \"f0_5\": 0.9076196898179366, \"p4\": 0.7832128384966869, \"phi\": 0.6439390561833764}, {\"truth_threshold\": -2.2000000327825546, \"match_probability\": 0.1787376058900962, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1344.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 687.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6617429837518464, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.33825701624815363, \"precision\": 1.0, \"recall\": 0.6617429837518464, \"specificity\": 1.0, \"npv\": 0.625, \"accuracy\": 0.7836901763224181, \"f1\": 0.7964444444444444, \"f2\": 0.7097591888466414, \"f0_5\": 0.907249898744431, \"p4\": 0.7826011005327976, \"phi\": 0.6431091391396206}, {\"truth_threshold\": -2.1000000312924385, \"match_probability\": 0.18913982061899084, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1340.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 691.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6597735105859183, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34022648941408173, \"precision\": 1.0, \"recall\": 0.6597735105859183, \"specificity\": 1.0, \"npv\": 0.6236383442265795, \"accuracy\": 0.7824307304785895, \"f1\": 0.7950163156333432, \"f2\": 0.7079459002535926, \"f0_5\": 0.9065079150317954, \"p4\": 0.7813774176935412, \"phi\": 0.6414515256091918}, {\"truth_threshold\": -2.0000000298023224, \"match_probability\": 0.19999999669481672, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1338.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 693.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6587887740029542, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3412112259970458, \"precision\": 1.0, \"recall\": 0.6587887740029542, \"specificity\": 1.0, \"npv\": 0.6229597388465724, \"accuracy\": 0.781801007556675, \"f1\": 0.7943009795191451, \"f2\": 0.7070386810399493, \"f0_5\": 0.9061357171881349, \"p4\": 0.7807654687830268, \"phi\": 0.6406238230099892}, {\"truth_threshold\": -1.700000025331974, \"match_probability\": 0.2353489599091234, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1335.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 696.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6573116691285081, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34268833087149186, \"precision\": 1.0, \"recall\": 0.6573116691285081, \"specificity\": 1.0, \"npv\": 0.6219445953286258, \"accuracy\": 0.7808564231738035, \"f1\": 0.7932263814616756, \"f2\": 0.7056771328893118, \"f0_5\": 0.9055759055759056, \"p4\": 0.7798474053553527, \"phi\": 0.6393836407517114}, {\"truth_threshold\": -1.600000023841858, \"match_probability\": 0.24805074388621665, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1333.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 698.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6563269325455441, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3436730674544559, \"precision\": 1.0, \"recall\": 0.6563269325455441, \"specificity\": 1.0, \"npv\": 0.6212696690179056, \"accuracy\": 0.7802267002518891, \"f1\": 0.7925089179548157, \"f2\": 0.7047689542138099, \"f0_5\": 0.9052016840961564, \"p4\": 0.7792352667284765, \"phi\": 0.6385577625791636}, {\"truth_threshold\": -1.4000000208616257, \"match_probability\": 0.2747995717943022, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1332.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 699.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6558345642540621, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34416543574593794, \"precision\": 1.0, \"recall\": 0.6558345642540621, \"specificity\": 1.0, \"npv\": 0.6209327548806941, \"accuracy\": 0.779911838790932, \"f1\": 0.792149866190901, \"f2\": 0.7043147208121827, \"f0_5\": 0.9050142682429678, \"p4\": 0.778929167747172, \"phi\": 0.6381450953570468}, {\"truth_threshold\": -1.3000000193715096, \"match_probability\": 0.2888262766358852, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1327.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 704.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6533727227966519, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3466272772033481, \"precision\": 1.0, \"recall\": 0.6533727227966519, \"specificity\": 1.0, \"npv\": 0.6192536506219578, \"accuracy\": 0.7783375314861462, \"f1\": 0.7903513996426444, \"f2\": 0.7020421119458259, \"f0_5\": 0.904074124540128, \"p4\": 0.7773983659139201, \"phi\": 0.6360844627945531}, {\"truth_threshold\": -1.2000000178813934, \"match_probability\": 0.3032695424040186, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1322.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 709.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6509108813392418, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34908911866075826, \"precision\": 1.0, \"recall\": 0.6509108813392418, \"specificity\": 1.0, \"npv\": 0.6175836030204962, \"accuracy\": 0.7767632241813602, \"f1\": 0.7885475693408888, \"f2\": 0.6997670971839932, \"f0_5\": 0.9031288427380789, \"p4\": 0.775867028931697, \"phi\": 0.6340283016890773}, {\"truth_threshold\": -1.1000000163912773, \"match_probability\": 0.318111997717226, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1320.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 711.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6499261447562777, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3500738552437223, \"precision\": 1.0, \"recall\": 0.6499261447562777, \"specificity\": 1.0, \"npv\": 0.6169181034482759, \"accuracy\": 0.7761335012594458, \"f1\": 0.7878245299910475, \"f2\": 0.6988564167725541, \"f0_5\": 0.9027492819039803, \"p4\": 0.7752543370502095, \"phi\": 0.6332070787700438}, {\"truth_threshold\": -1.0000000149011612, \"match_probability\": 0.33333333103806717, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1319.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 712.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6494337764647957, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.35056622353520434, \"precision\": 1.0, \"recall\": 0.6494337764647957, \"specificity\": 1.0, \"npv\": 0.6165858912224017, \"accuracy\": 0.7758186397984886, \"f1\": 0.7874626865671642, \"f2\": 0.6984009319072328, \"f0_5\": 0.9025591898179828, \"p4\": 0.7749479564070448, \"phi\": 0.6327967318590355}, {\"truth_threshold\": -0.9000000134110451, \"match_probability\": 0.34891031813411577, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1316.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 715.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6479566715903495, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3520433284096504, \"precision\": 1.0, \"recall\": 0.6479566715903495, \"specificity\": 1.0, \"npv\": 0.6155913978494624, \"accuracy\": 0.7748740554156172, \"f1\": 0.7863758589781894, \"f2\": 0.6970338983050848, \"f0_5\": 0.9019876627827279, \"p4\": 0.774028672532505, \"phi\": 0.6315667448577293}, {\"truth_threshold\": -0.7000000104308128, \"match_probability\": 0.38102425962470177, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1314.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 717.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6469719350073855, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.35302806499261447, \"precision\": 1.0, \"recall\": 0.6469719350073855, \"specificity\": 1.0, \"npv\": 0.6149301825993555, \"accuracy\": 0.7742443324937027, \"f1\": 0.7856502242152467, \"f2\": 0.6961220597584233, \"f0_5\": 0.901605599011939, \"p4\": 0.7734156957074743, \"phi\": 0.6307476279232052}, {\"truth_threshold\": -0.6000000089406967, \"match_probability\": 0.3975010577814427, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1309.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 722.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6445100935499753, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3554899064500246, \"precision\": 1.0, \"recall\": 0.6445100935499753, \"specificity\": 1.0, \"npv\": 0.6132833422603107, \"accuracy\": 0.7726700251889169, \"f1\": 0.7838323353293413, \"f2\": 0.6938407717587194, \"f0_5\": 0.9006467593229668, \"p4\": 0.7718828151071816, \"phi\": 0.6287028744111437}, {\"truth_threshold\": -0.5000000074505806, \"match_probability\": 0.41421356112001384, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1306.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 725.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6430329886755293, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.35696701132447073, \"precision\": 1.0, \"recall\": 0.6430329886755293, \"specificity\": 1.0, \"npv\": 0.6122994652406417, \"accuracy\": 0.7717254408060453, \"f1\": 0.7827389871141744, \"f2\": 0.6924708377518558, \"f0_5\": 0.9000689179875948, \"p4\": 0.7709627754494935, \"phi\": 0.627478091329186}, {\"truth_threshold\": -0.4000000059604645, \"match_probability\": 0.4311259267559445, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1300.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 731.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6400787789266371, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3599212210733629, \"precision\": 1.0, \"recall\": 0.6400787789266371, \"specificity\": 1.0, \"npv\": 0.6103411513859275, \"accuracy\": 0.7698362720403022, \"f1\": 0.7805463824677275, \"f2\": 0.6897283531409168, \"f0_5\": 0.8989074816761167, \"p4\": 0.7691219621523272, \"phi\": 0.6250331342479231}, {\"truth_threshold\": -0.30000000447034836, \"match_probability\": 0.4482004805735527, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1297.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 734.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.638601674052191, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.36139832594780896, \"precision\": 1.0, \"recall\": 0.638601674052191, \"specificity\": 1.0, \"npv\": 0.6093666844065992, \"accuracy\": 0.7688916876574308, \"f1\": 0.7794471153846154, \"f2\": 0.6883558008703959, \"f0_5\": 0.8983238675716858, \"p4\": 0.7682011740290052, \"phi\": 0.6238129405308033}, {\"truth_threshold\": -0.20000000298023224, \"match_probability\": 0.4653980381052749, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1284.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 747.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6322008862629247, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.36779911373707536, \"precision\": 1.0, \"recall\": 0.6322008862629247, \"specificity\": 1.0, \"npv\": 0.6051797040169133, \"accuracy\": 0.7647984886649875, \"f1\": 0.7746606334841629, \"f2\": 0.6823979591836735, \"f0_5\": 0.895772289660946, \"p4\": 0.7642079467115986, \"phi\": 0.6185427594175095}, {\"truth_threshold\": 0.0, \"match_probability\": 0.5, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1283.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 748.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6317085179714427, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3682914820285574, \"precision\": 1.0, \"recall\": 0.6317085179714427, \"specificity\": 1.0, \"npv\": 0.6048600105652404, \"accuracy\": 0.7644836272040302, \"f1\": 0.7742908871454436, \"f2\": 0.6819389816094398, \"f0_5\": 0.8955744799664945, \"p4\": 0.7639005528137026, \"phi\": 0.6181385126768588}, {\"truth_threshold\": 0.10000000149011612, \"match_probability\": 0.5173217450900928, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1279.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 752.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6297390448055146, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3702609551944855, \"precision\": 1.0, \"recall\": 0.6297390448055146, \"specificity\": 1.0, \"npv\": 0.6035846072746441, \"accuracy\": 0.7632241813602015, \"f1\": 0.7728096676737161, \"f2\": 0.6801020950760396, \"f0_5\": 0.8947810270043375, \"p4\": 0.7626706427097226, \"phi\": 0.6165231496419628}, {\"truth_threshold\": 0.20000000298023224, \"match_probability\": 0.5346019618947252, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1272.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 759.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6262924667651403, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.37370753323485967, \"precision\": 1.0, \"recall\": 0.6262924667651403, \"specificity\": 1.0, \"npv\": 0.6013655462184874, \"accuracy\": 0.7610201511335013, \"f1\": 0.7702089009990918, \"f2\": 0.6768837803320562, \"f0_5\": 0.8933839022334598, \"p4\": 0.7605169691954441, \"phi\": 0.6137024615957984}, {\"truth_threshold\": 0.30000000447034836, \"match_probability\": 0.5517995194264473, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1271.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 760.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6258000984736583, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3741999015263417, \"precision\": 1.0, \"recall\": 0.6258000984736583, \"specificity\": 1.0, \"npv\": 0.6010498687664042, \"accuracy\": 0.760705289672544, \"f1\": 0.7698364627498486, \"f2\": 0.6764236295902075, \"f0_5\": 0.893183415319747, \"p4\": 0.7602091587940459, \"phi\": 0.6133001443515198}, {\"truth_threshold\": 0.5000000074505806, \"match_probability\": 0.5857864388799862, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1268.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 763.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6243229935992122, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.37567700640078777, \"precision\": 1.0, \"recall\": 0.6243229935992122, \"specificity\": 1.0, \"npv\": 0.600104821802935, \"accuracy\": 0.7597607052896725, \"f1\": 0.7687177932706881, \"f2\": 0.6750425894378195, \"f0_5\": 0.892580599746586, \"p4\": 0.7592855072439135, \"phi\": 0.6120941421230318}, {\"truth_threshold\": 0.6000000089406967, \"match_probability\": 0.6024989422185573, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1266.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 765.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6233382570162481, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3766617429837518, \"precision\": 1.0, \"recall\": 0.6233382570162481, \"specificity\": 1.0, \"npv\": 0.599476439790576, \"accuracy\": 0.7591309823677582, \"f1\": 0.7679708826205641, \"f2\": 0.6741214057507987, \"f0_5\": 0.8921775898520085, \"p4\": 0.7586695530830421, \"phi\": 0.6112909283650163}, {\"truth_threshold\": 0.7000000104308128, \"match_probability\": 0.6189757403752982, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1255.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 776.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6179222058099458, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.38207779419005417, \"precision\": 1.0, \"recall\": 0.6179222058099458, \"specificity\": 1.0, \"npv\": 0.5960437272254034, \"accuracy\": 0.7556675062972292, \"f1\": 0.7638466220328667, \"f2\": 0.6690478729075594, \"f0_5\": 0.8899446886966388, \"p4\": 0.7552790297360157, \"phi\": 0.6068843832941353}, {\"truth_threshold\": 0.800000011920929, \"match_probability\": 0.6351831076021942, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1249.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 782.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6149679960610537, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3850320039389463, \"precision\": 1.0, \"recall\": 0.6149679960610537, \"specificity\": 1.0, \"npv\": 0.5941878567721848, \"accuracy\": 0.7537783375314862, \"f1\": 0.7615853658536585, \"f2\": 0.6662754721007148, \"f0_5\": 0.8887149565959869, \"p4\": 0.7534275803790252, \"phi\": 0.6044886397303119}, {\"truth_threshold\": 0.9000000134110451, \"match_probability\": 0.6510896818658842, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1246.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 785.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6134908911866076, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3865091088133924, \"precision\": 1.0, \"recall\": 0.6134908911866076, \"specificity\": 1.0, \"npv\": 0.5932642487046632, \"accuracy\": 0.7528337531486146, \"f1\": 0.7604516325907843, \"f2\": 0.6648879402347919, \"f0_5\": 0.8880969351389879, \"p4\": 0.7525012807216611, \"phi\": 0.6032928083832734}, {\"truth_threshold\": 1.0000000149011612, \"match_probability\": 0.6666666689619328, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1241.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 790.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6110290497291975, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3889709502708026, \"precision\": 1.0, \"recall\": 0.6110290497291975, \"specificity\": 1.0, \"npv\": 0.5917312661498708, \"accuracy\": 0.7512594458438288, \"f1\": 0.758557457212714, \"f2\": 0.6625734116390817, \"f0_5\": 0.8870621872766261, \"p4\": 0.7509565686139796, \"phi\": 0.6013027467512604}, {\"truth_threshold\": 1.1000000163912773, \"match_probability\": 0.681888002282774, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1238.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 793.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6095519448547514, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.39044805514524866, \"precision\": 1.0, \"recall\": 0.6095519448547514, \"specificity\": 1.0, \"npv\": 0.5908152734778122, \"accuracy\": 0.7503148614609572, \"f1\": 0.757418170694402, \"f2\": 0.6611835077974791, \"f0_5\": 0.8864384934841758, \"p4\": 0.7500292006663175, \"phi\": 0.6001104889920623}, {\"truth_threshold\": 1.2000000178813934, \"match_probability\": 0.6967304575959814, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1232.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 799.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6065977351058592, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3934022648941408, \"precision\": 1.0, \"recall\": 0.6065977351058592, \"specificity\": 1.0, \"npv\": 0.5889917695473251, \"accuracy\": 0.7484256926952141, \"f1\": 0.7551333129022372, \"f2\": 0.6584010260795211, \"f0_5\": 0.8851846529673804, \"p4\": 0.748173210499427, \"phi\": 0.5977299335012423}, {\"truth_threshold\": 1.3000000193715096, \"match_probability\": 0.7111737233641148, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1217.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 814.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5992122107336287, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.40078778926637126, \"precision\": 1.0, \"recall\": 0.5992122107336287, \"specificity\": 1.0, \"npv\": 0.5844818785094436, \"accuracy\": 0.7437027707808564, \"f1\": 0.749384236453202, \"f2\": 0.6514291831709667, \"f0_5\": 0.8820118857805479, \"p4\": 0.7435255360881731, \"phi\": 0.5918012154054669}, {\"truth_threshold\": 1.5000000223517418, \"match_probability\": 0.7387961280260511, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1216.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 815.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5987198424421467, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4012801575578533, \"precision\": 1.0, \"recall\": 0.5987198424421467, \"specificity\": 1.0, \"npv\": 0.5841836734693877, \"accuracy\": 0.7433879093198993, \"f1\": 0.7489990760702187, \"f2\": 0.6509635974304069, \"f0_5\": 0.8817984046410442, \"p4\": 0.7432152820546354, \"phi\": 0.5914070991600171}, {\"truth_threshold\": 1.600000023841858, \"match_probability\": 0.7519492561137834, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1213.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 818.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5972427375677006, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.40275726243229937, \"precision\": 1.0, \"recall\": 0.5972427375677006, \"specificity\": 1.0, \"npv\": 0.5832908813041263, \"accuracy\": 0.7424433249370277, \"f1\": 0.747842170160296, \"f2\": 0.6495662418335654, \"f0_5\": 0.8811564724684005, \"p4\": 0.742284201345094, \"phi\": 0.590225586321326}, {\"truth_threshold\": 1.700000025331974, \"match_probability\": 0.7646510400908766, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1200.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 831.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5908419497784343, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4091580502215657, \"precision\": 1.0, \"recall\": 0.5908419497784343, \"specificity\": 1.0, \"npv\": 0.5794534412955465, \"accuracy\": 0.7383501259445844, \"f1\": 0.7428040854224698, \"f2\": 0.6435006435006435, \"f0_5\": 0.8783487044356609, \"p4\": 0.7382438098538624, \"phi\": 0.5851199886013844}, {\"truth_threshold\": 1.8000000268220901, \"match_probability\": 0.7768953900182098, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1195.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 836.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5883801083210242, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4116198916789759, \"precision\": 1.0, \"recall\": 0.5883801083210242, \"specificity\": 1.0, \"npv\": 0.5779909136799596, \"accuracy\": 0.7367758186397985, \"f1\": 0.7408555486670799, \"f2\": 0.6411632149372251, \"f0_5\": 0.8772573777712523, \"p4\": 0.7366872427429624, \"phi\": 0.5831623756721471}, {\"truth_threshold\": 1.9000000283122063, \"match_probability\": 0.7886787621992872, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1190.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 841.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.585918266863614, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.414081733136386, \"precision\": 1.0, \"recall\": 0.585918266863614, \"specificity\": 1.0, \"npv\": 0.5765357502517623, \"accuracy\": 0.7352015113350125, \"f1\": 0.7389009624340267, \"f2\": 0.6388232767876315, \"f0_5\": 0.8761596230304816, \"p4\": 0.7351291857832581, \"phi\": 0.5812080759697219}, {\"truth_threshold\": 2.0000000298023224, \"match_probability\": 0.8000000033051833, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1189.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 842.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.585425898572132, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.41457410142786805, \"precision\": 1.0, \"recall\": 0.585425898572132, \"specificity\": 1.0, \"npv\": 0.5762455963764469, \"accuracy\": 0.7348866498740554, \"f1\": 0.7385093167701864, \"f2\": 0.6383549876516698, \"f0_5\": 0.8759392957123914, \"p4\": 0.7348173920177556, \"phi\": 0.5808176099748659}, {\"truth_threshold\": 2.1000000312924385, \"match_probability\": 0.8108601793810092, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1183.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 848.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5824716888232397, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4175283111767602, \"precision\": 1.0, \"recall\": 0.5824716888232397, \"specificity\": 1.0, \"npv\": 0.57451078775715, \"accuracy\": 0.7329974811083123, \"f1\": 0.7361543248288737, \"f2\": 0.6355431395723649, \"f0_5\": 0.8746118586426143, \"p4\": 0.732945325152551, \"phi\": 0.578477543896111}, {\"truth_threshold\": 2.2000000327825546, \"match_probability\": 0.8212623941099038, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1179.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 852.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5805022156573116, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4194977843426883, \"precision\": 1.0, \"recall\": 0.5805022156573116, \"specificity\": 1.0, \"npv\": 0.5733600400600901, \"accuracy\": 0.7317380352644837, \"f1\": 0.7345794392523365, \"f2\": 0.6336665591744598, \"f0_5\": 0.8737216540684749, \"p4\": 0.7316960140750485, \"phi\": 0.5769200755947459}, {\"truth_threshold\": 2.3000000342726707, \"match_probability\": 0.8312116004280432, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1164.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 867.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5731166912850812, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.42688330871491875, \"precision\": 1.0, \"recall\": 0.5731166912850812, \"specificity\": 1.0, \"npv\": 0.5690854870775348, \"accuracy\": 0.7270151133501259, \"f1\": 0.7286384976525822, \"f2\": 0.6266149870801033, \"f0_5\": 0.8703454463885151, \"p4\": 0.7270016744799519, \"phi\": 0.571097532311457}, {\"truth_threshold\": 2.400000035762787, \"match_probability\": 0.8407144092272857, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1151.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 880.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5667159034958149, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.43328409650418515, \"precision\": 1.0, \"recall\": 0.5667159034958149, \"specificity\": 1.0, \"npv\": 0.5654320987654321, \"accuracy\": 0.7229219143576826, \"f1\": 0.7234443746071653, \"f2\": 0.6204851752021563, \"f0_5\": 0.8673700075357951, \"p4\": 0.7229205464573797, \"phi\": 0.5660736371863528}, {\"truth_threshold\": 2.500000037252903, \"match_probability\": 0.8497788984739328, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1148.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 883.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5652387986213688, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4347612013786312, \"precision\": 1.0, \"recall\": 0.5652387986213688, \"specificity\": 1.0, \"npv\": 0.564595660749507, \"accuracy\": 0.7219773299748111, \"f1\": 0.7222396980182447, \"f2\": 0.6190681622088007, \"f0_5\": 0.8666767325985203, \"p4\": 0.7219769863862414, \"phi\": 0.5649171381617742}, {\"truth_threshold\": 2.600000038743019, \"match_probability\": 0.8584144256340188, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1147.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 884.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5647464303298868, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.43525356967011325, \"precision\": 1.0, \"recall\": 0.5647464303298868, \"specificity\": 1.0, \"npv\": 0.5643173977328734, \"accuracy\": 0.7216624685138538, \"f1\": 0.7218376337319069, \"f2\": 0.6185956207528853, \"f0_5\": 0.8664450823387219, \"p4\": 0.7216623155682867, \"phi\": 0.5645318732743893}, {\"truth_threshold\": 2.7000000402331352, \"match_probability\": 0.8666314458464526, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1144.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 887.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5632693254554406, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4367306745445593, \"precision\": 1.0, \"recall\": 0.5632693254554406, \"specificity\": 1.0, \"npv\": 0.5634842519685039, \"accuracy\": 0.7207178841309824, \"f1\": 0.7206299212598425, \"f2\": 0.6171773845489857, \"f0_5\": 0.8657484486151051, \"p4\": 0.7207178457145617, \"phi\": 0.5633767784627467}, {\"truth_threshold\": 2.8000000417232513, \"match_probability\": 0.8744413378412453, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1136.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 895.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5593303791235844, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.44066962087641554, \"precision\": 1.0, \"recall\": 0.5593303791235844, \"specificity\": 1.0, \"npv\": 0.5612745098039216, \"accuracy\": 0.718198992443325, \"f1\": 0.7173981686138301, \"f2\": 0.6133909287257019, \"f0_5\": 0.8638783269961977, \"p4\": 0.7181958416012424, \"phi\": 0.560301601247963}, {\"truth_threshold\": 2.9000000432133675, \"match_probability\": 0.8818562391739494, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1135.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 896.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5588380108321024, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.44116198916789756, \"precision\": 1.0, \"recall\": 0.5588380108321024, \"specificity\": 1.0, \"npv\": 0.560999510044096, \"accuracy\": 0.7178841309823678, \"f1\": 0.7169930511686671, \"f2\": 0.612917161680527, \"f0_5\": 0.8636432810835489, \"p4\": 0.717880234989325, \"phi\": 0.5599177174110734}, {\"truth_threshold\": 3.1000000461935997, \"match_probability\": 0.8955524998434058, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1109.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 922.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5460364352535697, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.45396356474643035, \"precision\": 1.0, \"recall\": 0.5460364352535697, \"specificity\": 1.0, \"npv\": 0.5539429124334785, \"accuracy\": 0.7096977329974811, \"f1\": 0.7063694267515923, \"f2\": 0.6005631972273368, \"f0_5\": 0.8574300293799288, \"p4\": 0.7096451676361052, \"phi\": 0.5499754660338557}, {\"truth_threshold\": 3.200000047683716, \"match_probability\": 0.9018605969116819, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1105.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 926.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5440669620876416, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.45593303791235845, \"precision\": 1.0, \"recall\": 0.5440669620876416, \"specificity\": 1.0, \"npv\": 0.5528730082085949, \"accuracy\": 0.7084382871536524, \"f1\": 0.704719387755102, \"f2\": 0.5986564091450861, \"f0_5\": 0.8564563633545187, \"p4\": 0.7083729914338502, \"phi\": 0.5484523115060287}, {\"truth_threshold\": 3.300000049173832, \"match_probability\": 0.9078269283845571, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1103.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 928.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5430822255046776, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4569177744953225, \"precision\": 1.0, \"recall\": 0.5430822255046776, \"specificity\": 1.0, \"npv\": 0.5523396044380126, \"accuracy\": 0.707808564231738, \"f1\": 0.7038927887683472, \"f2\": 0.597702395144684, \"f0_5\": 0.8559677169020643, \"p4\": 0.7077363543593591, \"phi\": 0.5476913561601727}, {\"truth_threshold\": 3.400000050663948, \"match_probability\": 0.9134653434169965, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1101.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 930.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5420974889217134, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.45790251107828656, \"precision\": 1.0, \"recall\": 0.5420974889217134, \"specificity\": 1.0, \"npv\": 0.5518072289156627, \"accuracy\": 0.7071788413098237, \"f1\": 0.7030651340996169, \"f2\": 0.5967479674796748, \"f0_5\": 0.8554778554778555, \"p4\": 0.7070993471091068, \"phi\": 0.5469308120448416}, {\"truth_threshold\": 3.500000052154064, \"match_probability\": 0.9187896995557598, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1094.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 937.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5386509108813392, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.46134908911866074, \"precision\": 1.0, \"recall\": 0.5386509108813392, \"specificity\": 1.0, \"npv\": 0.5499519692603266, \"accuracy\": 0.7049748110831234, \"f1\": 0.70016, \"f2\": 0.5934042091559991, \"f0_5\": 0.8537537068830966, \"p4\": 0.7048668608108392, \"phi\": 0.544272109503198}, {\"truth_threshold\": 3.6000000536441803, \"match_probability\": 0.9238137785296746, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1093.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 938.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5381585425898572, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.46184145741014276, \"precision\": 1.0, \"recall\": 0.5381585425898572, \"specificity\": 1.0, \"npv\": 0.5496879500720115, \"accuracy\": 0.7046599496221663, \"f1\": 0.6997439180537772, \"f2\": 0.5929261147878919, \"f0_5\": 0.8535061689832891, \"p4\": 0.7045475528853299, \"phi\": 0.5438926972206558}, {\"truth_threshold\": 3.7000000551342964, \"match_probability\": 0.9285512128432143, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1089.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 942.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5361890694239291, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4638109305760709, \"precision\": 1.0, \"recall\": 0.5361890694239291, \"specificity\": 1.0, \"npv\": 0.5486344034499281, \"accuracy\": 0.7034005037783375, \"f1\": 0.698076923076923, \"f2\": 0.5910126994464344, \"f0_5\": 0.8525129168623767, \"p4\": 0.7032693518148777, \"phi\": 0.5423760413585481}, {\"truth_threshold\": 3.8000000566244125, \"match_probability\": 0.9330154225613858, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1084.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 947.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.533727227966519, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.46627277203348105, \"precision\": 1.0, \"recall\": 0.533727227966519, \"specificity\": 1.0, \"npv\": 0.5473231357552581, \"accuracy\": 0.7018261964735516, \"f1\": 0.6959871589085073, \"f2\": 0.5886185925282363, \"f0_5\": 0.8512643317103816, \"p4\": 0.701669388939399, \"phi\": 0.5404824326919393}, {\"truth_threshold\": 3.9000000581145287, \"match_probability\": 0.9372195616099515, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1074.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 957.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5288035450516987, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4711964549483013, \"precision\": 1.0, \"recall\": 0.5288035450516987, \"specificity\": 1.0, \"npv\": 0.5447193149381542, \"accuracy\": 0.6986775818639799, \"f1\": 0.6917874396135266, \"f2\": 0.58382257012394, \"f0_5\": 0.8487434803224277, \"p4\": 0.698461896288862, \"phi\": 0.5367024359898404}, {\"truth_threshold\": 4.000000059604645, \"match_probability\": 0.9411764728755594, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1067.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 964.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5253569670113245, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.47464303298867555, \"precision\": 1.0, \"recall\": 0.5253569670113245, \"specificity\": 1.0, \"npv\": 0.5429113323850165, \"accuracy\": 0.6964735516372796, \"f1\": 0.6888315041962556, \"f2\": 0.5804591448155805, \"f0_5\": 0.8469598348944277, \"p4\": 0.6962104664501566, \"phi\": 0.5340620291107292}, {\"truth_threshold\": 4.200000062584877, \"match_probability\": 0.9483982147343843, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1030.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1001.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5071393402264894, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.49286065977351057, \"precision\": 1.0, \"recall\": 0.5071393402264894, \"specificity\": 1.0, \"npv\": 0.5335507921714818, \"accuracy\": 0.684823677581864, \"f1\": 0.6729826853969291, \"f2\": 0.5625955866287962, \"f0_5\": 0.8372622337831247, \"p4\": 0.6842191143036372, \"phi\": 0.5201774665622936}, {\"truth_threshold\": 4.300000064074993, \"match_probability\": 0.9516868803254299, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1025.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1006.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5046774987690793, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.49532250123092075, \"precision\": 1.0, \"recall\": 0.5046774987690793, \"specificity\": 1.0, \"npv\": 0.5323105532310554, \"accuracy\": 0.6832493702770781, \"f1\": 0.6708115183246073, \"f2\": 0.5601705104382992, \"f0_5\": 0.8359158375468928, \"p4\": 0.6825861647803277, \"phi\": 0.518309905918297}, {\"truth_threshold\": 4.400000065565109, \"match_probability\": 0.9547759482410569, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1020.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1011.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5022156573116692, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4977843426883309, \"precision\": 1.0, \"recall\": 0.5022156573116692, \"specificity\": 1.0, \"npv\": 0.5310760667903525, \"accuracy\": 0.6816750629722922, \"f1\": 0.6686332350049164, \"f2\": 0.5577427821522309, \"f0_5\": 0.8345606283750614, \"p4\": 0.6809500591436524, \"phi\": 0.5164443009324556}, {\"truth_threshold\": 4.500000067055225, \"match_probability\": 0.9576762895591182, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1014.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1017.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.49926144756277696, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5007385524372231, \"precision\": 1.0, \"recall\": 0.49926144756277696, \"specificity\": 1.0, \"npv\": 0.5296022201665125, \"accuracy\": 0.6797858942065491, \"f1\": 0.6660098522167488, \"f2\": 0.5548260013131976, \"f0_5\": 0.8329226219812715, \"p4\": 0.6789824824414316, \"phi\": 0.5142081009404593}, {\"truth_threshold\": 4.6000000685453415, \"match_probability\": 0.9603983391922627, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1010.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1021.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.49729197439684886, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5027080256031512, \"precision\": 1.0, \"recall\": 0.49729197439684886, \"specificity\": 1.0, \"npv\": 0.528624192059095, \"accuracy\": 0.6785264483627204, \"f1\": 0.6642551792173627, \"f2\": 0.5528793518721261, \"f0_5\": 0.8318234228298468, \"p4\": 0.6776681406756905, \"phi\": 0.5127188003018871}, {\"truth_threshold\": 4.700000070035458, \"match_probability\": 0.9629520927573305, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1005.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1026.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4948301329394387, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5051698670605613, \"precision\": 1.0, \"recall\": 0.4948301329394387, \"specificity\": 1.0, \"npv\": 0.5274067250115154, \"accuracy\": 0.6769521410579346, \"f1\": 0.6620553359683794, \"f2\": 0.5504436411436083, \"f0_5\": 0.8304412493802678, \"p4\": 0.6760222065562214, \"phi\": 0.5108588257538497}, {\"truth_threshold\": 4.800000071525574, \"match_probability\": 0.9653471069144568, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 998.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1033.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4913835548990645, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5086164451009355, \"precision\": 1.0, \"recall\": 0.4913835548990645, \"specificity\": 1.0, \"npv\": 0.5257116620752984, \"accuracy\": 0.6747481108312342, \"f1\": 0.6589633542423242, \"f2\": 0.5470291602718702, \"f0_5\": 0.8284907853229287, \"p4\": 0.6737121749549234, \"phi\": 0.508257872897662}, {\"truth_threshold\": 4.90000007301569, \"match_probability\": 0.9675925026740654, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 995.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1036.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4899064500246184, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5100935499753816, \"precision\": 1.0, \"recall\": 0.4899064500246184, \"specificity\": 1.0, \"npv\": 0.5249885373681797, \"accuracy\": 0.6738035264483627, \"f1\": 0.6576338400528751, \"f2\": 0.5455642066016011, \"f0_5\": 0.8276493095990684, \"p4\": 0.6727200795968197, \"phi\": 0.5071442306145872}, {\"truth_threshold\": 5.000000074505806, \"match_probability\": 0.969696971214501, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 986.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1045.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4854751354012802, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5145248645987198, \"precision\": 1.0, \"recall\": 0.4854751354012802, \"specificity\": 1.0, \"npv\": 0.5228310502283106, \"accuracy\": 0.6709697732997482, \"f1\": 0.6536294332117998, \"f2\": 0.5411635565312843, \"f0_5\": 0.8251046025104602, \"p4\": 0.6697361249633867, \"phi\": 0.5038069817912239}, {\"truth_threshold\": 5.100000075995922, \"match_probability\": 0.9716687817966767, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 979.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1052.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.48202855736090594, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.517971442639094, \"precision\": 1.0, \"recall\": 0.48202855736090594, \"specificity\": 1.0, \"npv\": 0.5211652253072372, \"accuracy\": 0.6687657430730478, \"f1\": 0.6504983388704318, \"f2\": 0.5377348126991102, \"f0_5\": 0.8231040860938288, \"p4\": 0.6674071352914174, \"phi\": 0.5012150453662769}, {\"truth_threshold\": 5.200000077486038, \"match_probability\": 0.9735157914041783, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 974.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1057.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4795667159034958, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5204332840965041, \"precision\": 1.0, \"recall\": 0.4795667159034958, \"specificity\": 1.0, \"npv\": 0.5199818346957311, \"accuracy\": 0.667191435768262, \"f1\": 0.648252911813644, \"f2\": 0.5352824796658606, \"f0_5\": 0.8216635734773072, \"p4\": 0.6657391023001616, \"phi\": 0.4993655783036174}, {\"truth_threshold\": 5.300000078976154, \"match_probability\": 0.9752454557772836, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 965.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1066.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4751354012801576, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5248645987198425, \"precision\": 1.0, \"recall\": 0.4751354012801576, \"specificity\": 1.0, \"npv\": 0.5178652193577566, \"accuracy\": 0.6643576826196473, \"f1\": 0.644192256341789, \"f2\": 0.5308614809109913, \"f0_5\": 0.8190460023765065, \"p4\": 0.6627270219044649, \"phi\": 0.4960404205390772}, {\"truth_threshold\": 5.4000000804662704, \"match_probability\": 0.9768648415470134, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 961.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1070.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4731659281142294, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5268340718857706, \"precision\": 1.0, \"recall\": 0.4731659281142294, \"specificity\": 1.0, \"npv\": 0.5169300225733634, \"accuracy\": 0.6630982367758187, \"f1\": 0.642379679144385, \"f2\": 0.5288937809576224, \"f0_5\": 0.8178723404255319, \"p4\": 0.661384263989902, \"phi\": 0.49456412516582227}, {\"truth_threshold\": 5.500000081956387, \"match_probability\": 0.9783806392104205, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 946.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1085.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.465780403741999, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.534219596258001, \"precision\": 1.0, \"recall\": 0.465780403741999, \"specificity\": 1.0, \"npv\": 0.5134529147982063, \"accuracy\": 0.658375314861461, \"f1\": 0.6355391333557272, \"f2\": 0.5214994487320838, \"f0_5\": 0.8134135855546002, \"p4\": 0.656325954359785, \"phi\": 0.48903609882831217}, {\"truth_threshold\": 5.600000083446503, \"match_probability\": 0.9797991767207457, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 927.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1104.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4564254062038405, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5435745937961596, \"precision\": 1.0, \"recall\": 0.4564254062038405, \"specificity\": 1.0, \"npv\": 0.509115162294353, \"accuracy\": 0.6523929471032746, \"f1\": 0.6267748478701826, \"f2\": 0.5120981107059993, \"f0_5\": 0.8076319916361735, \"p4\": 0.64986435329492, \"phi\": 0.482050925478558}, {\"truth_threshold\": 5.700000084936619, \"match_probability\": 0.9811264334957893, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 926.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1105.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.45593303791235845, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5440669620876416, \"precision\": 1.0, \"recall\": 0.45593303791235845, \"specificity\": 1.0, \"npv\": 0.5088888888888888, \"accuracy\": 0.6520780856423174, \"f1\": 0.6263104497801826, \"f2\": 0.5116022099447514, \"f0_5\": 0.8073234524847428, \"p4\": 0.6495225157687339, \"phi\": 0.48168377289561637}, {\"truth_threshold\": 5.800000086426735, \"match_probability\": 0.9823680546749124, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 924.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1107.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4549483013293944, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5450516986706057, \"precision\": 1.0, \"recall\": 0.4549483013293944, \"specificity\": 1.0, \"npv\": 0.5084369449378331, \"accuracy\": 0.6514483627204031, \"f1\": 0.6253807106598985, \"f2\": 0.5106100795755968, \"f0_5\": 0.8067050811943426, \"p4\": 0.6488383014404575, \"phi\": 0.4809496069575002}, {\"truth_threshold\": 5.900000087916851, \"match_probability\": 0.9835293654795508, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 917.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1114.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.45150172328902016, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5484982767109798, \"precision\": 1.0, \"recall\": 0.45150172328902016, \"specificity\": 1.0, \"npv\": 0.5068614431164232, \"accuracy\": 0.6492443324937027, \"f1\": 0.6221166892808684, \"f2\": 0.507134166574494, \"f0_5\": 0.8045271100193017, \"p4\": 0.6464378241531998, \"phi\": 0.4783814534822862}, {\"truth_threshold\": 6.000000089406967, \"match_probability\": 0.9846153855541349, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 909.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1122.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.44756277695716395, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.552437223042836, \"precision\": 1.0, \"recall\": 0.44756277695716395, \"specificity\": 1.0, \"npv\": 0.5050727834142038, \"accuracy\": 0.6467254408060453, \"f1\": 0.6183673469387755, \"f2\": 0.503155097974095, \"f0_5\": 0.8020116463737427, \"p4\": 0.6436833004319238, \"phi\": 0.4754490272472384}, {\"truth_threshold\": 6.100000090897083, \"match_probability\": 0.985630843183972, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 900.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1131.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4431314623338257, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5568685376661743, \"precision\": 1.0, \"recall\": 0.4431314623338257, \"specificity\": 1.0, \"npv\": 0.5030755711775043, \"accuracy\": 0.6438916876574308, \"f1\": 0.6141248720573184, \"f2\": 0.49867021276595747, \"f0_5\": 0.7991475759190197, \"p4\": 0.6405698927488327, \"phi\": 0.4721531674364921}, {\"truth_threshold\": 6.200000092387199, \"match_probability\": 0.9865801893041345, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 894.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1137.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4401772525849335, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5598227474150664, \"precision\": 1.0, \"recall\": 0.4401772525849335, \"specificity\": 1.0, \"npv\": 0.5017528483786152, \"accuracy\": 0.6420025188916877, \"f1\": 0.6112820512820513, \"f2\": 0.49567531603459747, \"f0_5\": 0.7972177635098984, \"p4\": 0.6384854939205781, \"phi\": 0.4699576473215044}, {\"truth_threshold\": 6.3000000938773155, \"match_probability\": 0.987467611228855, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 892.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1139.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.43919251600196946, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5608074839980305, \"precision\": 1.0, \"recall\": 0.43919251600196946, \"specificity\": 1.0, \"npv\": 0.5013134851138353, \"accuracy\": 0.6413727959697733, \"f1\": 0.61033185083818, \"f2\": 0.49467613132209404, \"f0_5\": 0.7965708162171816, \"p4\": 0.6377891010276633, \"phi\": 0.46922609777468816}, {\"truth_threshold\": 6.400000095367432, \"match_probability\": 0.9882970460445225, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 887.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1144.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4367306745445593, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5632693254554406, \"precision\": 1.0, \"recall\": 0.4367306745445593, \"specificity\": 1.0, \"npv\": 0.5002184359982526, \"accuracy\": 0.6397984886649875, \"f1\": 0.6079506511309116, \"f2\": 0.4921762290533792, \"f0_5\": 0.7949453307044273, \"p4\": 0.6360445825846852, \"phi\": 0.46739783372748034}, {\"truth_threshold\": 6.500000096857548, \"match_probability\": 0.9890721936212699, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 884.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1147.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.43525356967011325, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5647464303298868, \"precision\": 1.0, \"recall\": 0.43525356967011325, \"specificity\": 1.0, \"npv\": 0.49956369982547993, \"accuracy\": 0.6388539042821159, \"f1\": 0.6065180102915952, \"f2\": 0.49067495559502666, \"f0_5\": 0.793964433267469, \"p4\": 0.63499541952982, \"phi\": 0.4663012798895679}, {\"truth_threshold\": 6.600000098347664, \"match_probability\": 0.9897965292084853, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 876.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1155.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.43131462333825704, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.568685376661743, \"precision\": 1.0, \"recall\": 0.43131462333825704, \"specificity\": 1.0, \"npv\": 0.49782608695652175, \"accuracy\": 0.6363350125944585, \"f1\": 0.6026831785345718, \"f2\": 0.4866666666666667, \"f0_5\": 0.7913279132791328, \"p4\": 0.6321884993464733, \"phi\": 0.46337853983930954}, {\"truth_threshold\": 6.70000009983778, \"match_probability\": 0.9904733155885336, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 866.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1165.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4263909404234367, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5736090595765633, \"precision\": 1.0, \"recall\": 0.4263909404234367, \"specificity\": 1.0, \"npv\": 0.49567099567099565, \"accuracy\": 0.6331863979848866, \"f1\": 0.597859855022437, \"f2\": 0.48164627363737483, \"f0_5\": 0.7879890809827116, \"p4\": 0.628660687467854, \"phi\": 0.45972776942966703}, {\"truth_threshold\": 6.800000101327896, \"match_probability\": 0.9911056147706719, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 856.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1175.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.42146725750861647, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5785327424913835, \"precision\": 1.0, \"recall\": 0.42146725750861647, \"specificity\": 1.0, \"npv\": 0.49353448275862066, \"accuracy\": 0.6300377833753149, \"f1\": 0.5930031174229303, \"f2\": 0.4766146993318486, \"f0_5\": 0.7846012832263978, \"p4\": 0.6251109156992851, \"phi\": 0.45607962565127746}, {\"truth_threshold\": 6.900000102818012, \"match_probability\": 0.9916962992137202, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 851.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1180.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4190054160512063, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5809945839487937, \"precision\": 1.0, \"recall\": 0.4190054160512063, \"specificity\": 1.0, \"npv\": 0.4924731182795699, \"accuracy\": 0.628463476070529, \"f1\": 0.5905621096460791, \"f2\": 0.4740947075208914, \"f0_5\": 0.7828886844526219, \"p4\": 0.6233275653061159, \"phi\": 0.45425642958439905}, {\"truth_threshold\": 7.000000104308128, \"match_probability\": 0.9922480625716311, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 847.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1184.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4170359428852782, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5829640571147218, \"precision\": 1.0, \"recall\": 0.4170359428852782, \"specificity\": 1.0, \"npv\": 0.49162730785744957, \"accuracy\": 0.6272040302267002, \"f1\": 0.5886031966643502, \"f2\": 0.4720766915616988, \"f0_5\": 0.78150950359845, \"p4\": 0.621896736471326, \"phi\": 0.4527982529565263}, {\"truth_threshold\": 7.1000001057982445, \"match_probability\": 0.9927634299608046, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 834.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1197.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.41063515509601184, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5893648449039882, \"precision\": 1.0, \"recall\": 0.41063515509601184, \"specificity\": 1.0, \"npv\": 0.4888983774551665, \"accuracy\": 0.6231108312342569, \"f1\": 0.5821989528795811, \"f2\": 0.4655056932350971, \"f0_5\": 0.7769703745108999, \"p4\": 0.6172204525656357, \"phi\": 0.44806122466967707}, {\"truth_threshold\": 7.200000107288361, \"match_probability\": 0.9932447677519157, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 831.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1200.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4091580502215657, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5908419497784343, \"precision\": 1.0, \"recall\": 0.4091580502215657, \"specificity\": 1.0, \"npv\": 0.488272921108742, \"accuracy\": 0.6221662468513854, \"f1\": 0.5807127882599581, \"f2\": 0.4639865996649916, \"f0_5\": 0.7759103641456583, \"p4\": 0.6161355181490583, \"phi\": 0.44696845120974843}, {\"truth_threshold\": 7.300000108778477, \"match_probability\": 0.9936942928922654, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 829.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1202.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.40817331363860165, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5918266863613983, \"precision\": 1.0, \"recall\": 0.40817331363860165, \"specificity\": 1.0, \"npv\": 0.4878568385172561, \"accuracy\": 0.621536523929471, \"f1\": 0.5797202797202797, \"f2\": 0.4629733050374176, \"f0_5\": 0.7752010473162521, \"p4\": 0.6154109979823547, \"phi\": 0.44624000533215374}, {\"truth_threshold\": 7.400000110268593, \"match_probability\": 0.9941140817673122, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 827.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1204.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4071885770556376, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5928114229443624, \"precision\": 1.0, \"recall\": 0.4071885770556376, \"specificity\": 1.0, \"npv\": 0.4874414644529587, \"accuracy\": 0.6209068010075567, \"f1\": 0.5787263820853744, \"f2\": 0.46195955759133056, \"f0_5\": 0.7744896047949054, \"p4\": 0.6146854858236389, \"phi\": 0.4455116118672065}, {\"truth_threshold\": 7.500000111758709, \"match_probability\": 0.9945060786121668, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 817.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1214.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.40226489414081734, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5977351058591827, \"precision\": 1.0, \"recall\": 0.40226489414081734, \"specificity\": 1.0, \"npv\": 0.4853751589656634, \"accuracy\": 0.6177581863979849, \"f1\": 0.5737359550561798, \"f2\": 0.4568840174477128, \"f0_5\": 0.7709001698433666, \"p4\": 0.611042815748838, \"phi\": 0.4418703281958464}, {\"truth_threshold\": 7.600000113248825, \"match_probability\": 0.9948721034855129, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 805.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1226.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.396356474643033, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.603643525356967, \"precision\": 1.0, \"recall\": 0.396356474643033, \"specificity\": 1.0, \"npv\": 0.48291859974694223, \"accuracy\": 0.6139798488664987, \"f1\": 0.5677009873060649, \"f2\": 0.45077836263859333, \"f0_5\": 0.7665206627309084, \"p4\": 0.6066374884822956, \"phi\": 0.43750190140758005}, {\"truth_threshold\": 7.700000114738941, \"match_probability\": 0.9952138598197071, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 803.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1228.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3953717380600689, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.604628261939931, \"precision\": 1.0, \"recall\": 0.3953717380600689, \"specificity\": 1.0, \"npv\": 0.48251158870627897, \"accuracy\": 0.6133501259445844, \"f1\": 0.5666901905434015, \"f2\": 0.4497591576117397, \"f0_5\": 0.7657829486934961, \"p4\": 0.6058995526108901, \"phi\": 0.436773906570581}, {\"truth_threshold\": 7.800000116229057, \"match_probability\": 0.9955329415617687, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 796.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1235.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.39192516001969474, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6080748399803053, \"precision\": 1.0, \"recall\": 0.39192516001969474, \"specificity\": 1.0, \"npv\": 0.4810924369747899, \"accuracy\": 0.6111460957178841, \"f1\": 0.5631411390166254, \"f2\": 0.4461883408071749, \"f0_5\": 0.763183125599233, \"p4\": 0.6033082263812478, \"phi\": 0.4342260129766634}, {\"truth_threshold\": 7.900000117719173, \"match_probability\": 0.99583083992065, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 794.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1237.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3909404234367307, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6090595765632694, \"precision\": 1.0, \"recall\": 0.3909404234367307, \"specificity\": 1.0, \"npv\": 0.480688497061293, \"accuracy\": 0.6105163727959698, \"f1\": 0.5621238938053097, \"f2\": 0.44516707782013903, \"f0_5\": 0.7624351834069522, \"p4\": 0.6025653750424809, \"phi\": 0.43349805603059816}, {\"truth_threshold\": 8.00000011920929, \"match_probability\": 0.9961089497366072, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 785.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1246.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3865091088133924, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6134908911866076, \"precision\": 1.0, \"recall\": 0.3865091088133924, \"specificity\": 1.0, \"npv\": 0.47887913007109995, \"accuracy\": 0.6076826196473551, \"f1\": 0.5575284090909091, \"f2\": 0.4405657200583679, \"f0_5\": 0.7590408044865596, \"p4\": 0.5992086772176372, \"phi\": 0.4302222051371983}, {\"truth_threshold\": 8.100000120699406, \"match_probability\": 0.9963685754887298, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 772.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1259.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.38010832102412606, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6198916789758739, \"precision\": 1.0, \"recall\": 0.38010832102412606, \"specificity\": 1.0, \"npv\": 0.4762895174708819, \"accuracy\": 0.6035894206549118, \"f1\": 0.5508383874420264, \"f2\": 0.43390287769784175, \"f0_5\": 0.7540535260793124, \"p4\": 0.5943189937980553, \"phi\": 0.4254898457157915}, {\"truth_threshold\": 8.200000122189522, \"match_probability\": 0.9966109369567457, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 765.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1266.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3766617429837518, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6233382570162481, \"precision\": 1.0, \"recall\": 0.3766617429837518, \"specificity\": 1.0, \"npv\": 0.47490667772708417, \"accuracy\": 0.6013853904282116, \"f1\": 0.5472103004291845, \"f2\": 0.43030712116098546, \"f0_5\": 0.7513258691809075, \"p4\": 0.591665315716949, \"phi\": 0.42294110344976693}, {\"truth_threshold\": 8.300000123679638, \"match_probability\": 0.9968371745531442, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 753.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1278.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3707533234859675, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6292466765140325, \"precision\": 1.0, \"recall\": 0.3707533234859675, \"specificity\": 1.0, \"npv\": 0.4725546842756913, \"accuracy\": 0.5976070528967254, \"f1\": 0.540948275862069, \"f2\": 0.42412977357215276, \"f0_5\": 0.7465794170136824, \"p4\": 0.5870811305491775, \"phi\": 0.41857044774335733}, {\"truth_threshold\": 8.400000125169754, \"match_probability\": 0.9970483543414643, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 749.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1282.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3687838503200394, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6312161496799606, \"precision\": 1.0, \"recall\": 0.3687838503200394, \"specificity\": 1.0, \"npv\": 0.4717758549649773, \"accuracy\": 0.5963476070528967, \"f1\": 0.5388489208633094, \"f2\": 0.4220669446635862, \"f0_5\": 0.7449771235329222, \"p4\": 0.5855429896383528, \"phi\": 0.41711307373662215}, {\"truth_threshold\": 8.50000012665987, \"match_probability\": 0.997245472756309, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 745.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1286.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.36681437715411125, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6331856228458888, \"precision\": 1.0, \"recall\": 0.36681437715411125, \"specificity\": 1.0, \"npv\": 0.4709995886466475, \"accuracy\": 0.595088161209068, \"f1\": 0.5367435158501441, \"f2\": 0.4200022550456647, \"f0_5\": 0.7433645978846538, \"p4\": 0.5839996987663111, \"phi\": 0.415655411066983}, {\"truth_threshold\": 8.600000128149986, \"match_probability\": 0.9974294610402847, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 736.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1295.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.362383062530773, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.637616937469227, \"precision\": 1.0, \"recall\": 0.362383062530773, \"specificity\": 1.0, \"npv\": 0.4692622950819672, \"accuracy\": 0.5922544080604534, \"f1\": 0.5319840983014095, \"f2\": 0.4153498871331828, \"f0_5\": 0.7396984924623116, \"p4\": 0.5805081133675749, \"phi\": 0.4123744749884776}, {\"truth_threshold\": 8.700000129640102, \"match_probability\": 0.997601189412643, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 732.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1299.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3604135893648449, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6395864106351551, \"precision\": 1.0, \"recall\": 0.3604135893648449, \"specificity\": 1.0, \"npv\": 0.468494271685761, \"accuracy\": 0.5909949622166247, \"f1\": 0.5298588490770901, \"f2\": 0.4132791327913279, \"f0_5\": 0.7380520266182699, \"p4\": 0.5789476138201, \"phi\": 0.41091568728284633}, {\"truth_threshold\": 8.800000131130219, \"match_probability\": 0.997761470983937, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 718.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1313.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3535204332840965, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6464795667159035, \"precision\": 1.0, \"recall\": 0.3535204332840965, \"specificity\": 1.0, \"npv\": 0.46582587469487385, \"accuracy\": 0.5865869017632241, \"f1\": 0.5223717715532921, \"f2\": 0.4060167382945035, \"f0_5\": 0.7322047725882113, \"p4\": 0.5734425262692389, \"phi\": 0.40580656113113184}, {\"truth_threshold\": 8.900000132620335, \"match_probability\": 0.9979110654305032, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 708.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1323.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.34859675036927623, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6514032496307238, \"precision\": 1.0, \"recall\": 0.34859675036927623, \"specificity\": 1.0, \"npv\": 0.4639384116693679, \"accuracy\": 0.5834382871536524, \"f1\": 0.5169769989047097, \"f2\": 0.4008152173913043, \"f0_5\": 0.7279457125231339, \"p4\": 0.5694677651256314, \"phi\": 0.4021534814960889}, {\"truth_threshold\": 9.00000013411045, \"match_probability\": 0.9980506824420605, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 704.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1327.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3466272772033481, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6533727227966519, \"precision\": 1.0, \"recall\": 0.3466272772033481, \"specificity\": 1.0, \"npv\": 0.46318770226537215, \"accuracy\": 0.5821788413098237, \"f1\": 0.5148080438756856, \"f2\": 0.3987313094698686, \"f0_5\": 0.7262224056117186, \"p4\": 0.5678676238912578, \"phi\": 0.4006912677739821}, {\"truth_threshold\": 9.100000135600567, \"match_probability\": 0.9981809849551747, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 700.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1331.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.34465780403741997, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.65534219596258, \"precision\": 1.0, \"recall\": 0.34465780403741997, \"specificity\": 1.0, \"npv\": 0.4624394184168013, \"accuracy\": 0.5809193954659949, \"f1\": 0.5126327352618089, \"f2\": 0.39664551223934724, \"f0_5\": 0.724487683709377, \"p4\": 0.5662615170898467, \"phi\": 0.39922844895106907}, {\"truth_threshold\": 9.200000137090683, \"match_probability\": 0.9983025921847976, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 690.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1341.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3397341211225997, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6602658788774003, \"precision\": 1.0, \"recall\": 0.3397341211225997, \"specificity\": 1.0, \"npv\": 0.46057924376508447, \"accuracy\": 0.5777707808564232, \"f1\": 0.5071664829106945, \"f2\": 0.39142273655547993, \"f0_5\": 0.720100187852223, \"p4\": 0.5622196307198002, \"phi\": 0.3955685586441907}, {\"truth_threshold\": 9.300000138580799, \"match_probability\": 0.9984160824655384, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 684.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1347.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.33677991137370755, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6632200886262924, \"precision\": 1.0, \"recall\": 0.33677991137370755, \"specificity\": 1.0, \"npv\": 0.45947030497592295, \"accuracy\": 0.5758816120906801, \"f1\": 0.5038674033149171, \"f2\": 0.388283378746594, \"f0_5\": 0.7174323473882945, \"p4\": 0.5597758409315445, \"phi\": 0.3933705232838903}, {\"truth_threshold\": 9.400000140070915, \"match_probability\": 0.9985219959137808, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 661.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1370.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3254554406696209, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6745445593303792, \"precision\": 1.0, \"recall\": 0.3254554406696209, \"specificity\": 1.0, \"npv\": 0.4552683896620278, \"accuracy\": 0.568639798488665, \"f1\": 0.49108469539375926, \"f2\": 0.3762094479225953, \"f0_5\": 0.706951871657754, \"p4\": 0.550272647956958, \"phi\": 0.38492801194561554}, {\"truth_threshold\": 9.500000141561031, \"match_probability\": 0.9986208369212233, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 655.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1376.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3225012309207287, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6774987690792713, \"precision\": 1.0, \"recall\": 0.3225012309207287, \"specificity\": 1.0, \"npv\": 0.45418484728282427, \"accuracy\": 0.5667506297229219, \"f1\": 0.48771407297096053, \"f2\": 0.3730493222462695, \"f0_5\": 0.7041496452375833, \"p4\": 0.5477568608833787, \"phi\": 0.38272074978272863}, {\"truth_threshold\": 9.600000143051147, \"match_probability\": 0.9987130764898899, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 641.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1390.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3156080748399803, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6843919251600197, \"precision\": 1.0, \"recall\": 0.3156080748399803, \"specificity\": 1.0, \"npv\": 0.4516765285996055, \"accuracy\": 0.5623425692695214, \"f1\": 0.47979041916167664, \"f2\": 0.3656588705077011, \"f0_5\": 0.6974972796517954, \"p4\": 0.5418247722541304, \"phi\": 0.3775615971490305}, {\"truth_threshold\": 9.700000144541264, \"match_probability\": 0.9987991544181472, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 627.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1404.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3087149187592319, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.691285081240768, \"precision\": 1.0, \"recall\": 0.3087149187592319, \"specificity\": 1.0, \"npv\": 0.4491957630443311, \"accuracy\": 0.5579345088161209, \"f1\": 0.4717832957110609, \"f2\": 0.35824477202605415, \"f0_5\": 0.6906807666886979, \"p4\": 0.535802646044032, \"phi\": 0.3723888203144958}, {\"truth_threshold\": 9.80000014603138, \"match_probability\": 0.9988794813467569, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 625.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1406.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.30773018217626785, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6922698178237322, \"precision\": 1.0, \"recall\": 0.30773018217626785, \"specificity\": 1.0, \"npv\": 0.448843590748726, \"accuracy\": 0.5573047858942065, \"f1\": 0.47063253012048195, \"f2\": 0.35718367813464397, \"f0_5\": 0.689693224453763, \"p4\": 0.5349347426333179, \"phi\": 0.37164865121476715}, {\"truth_threshold\": 9.900000147521496, \"match_probability\": 0.9989544406735176, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 620.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1411.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3052683407188577, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6947316592811423, \"precision\": 1.0, \"recall\": 0.3052683407188577, \"specificity\": 1.0, \"npv\": 0.44796557120500785, \"accuracy\": 0.5557304785894207, \"f1\": 0.46774801961523954, \"f2\": 0.3545288197621226, \"f0_5\": 0.6872090445577477, \"p4\": 0.5327564744189275, \"phi\": 0.36979684506621746}, {\"truth_threshold\": 10.000000149011612, \"match_probability\": 0.9990243903445719, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 616.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1415.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3032988675529296, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6967011324470704, \"precision\": 1.0, \"recall\": 0.3032988675529296, \"specificity\": 1.0, \"npv\": 0.447265625, \"accuracy\": 0.554471032745592, \"f1\": 0.4654325651681148, \"f2\": 0.3524027459954233, \"f0_5\": 0.6852057842046718, \"p4\": 0.5310049980284561, \"phi\": 0.36831393885903}, {\"truth_threshold\": 10.100000150501728, \"match_probability\": 0.9990896645300149, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 614.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1417.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3023141309699655, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6976858690300345, \"precision\": 1.0, \"recall\": 0.3023141309699655, \"specificity\": 1.0, \"npv\": 0.44691647150663544, \"accuracy\": 0.5538413098236776, \"f1\": 0.46427221172022687, \"f2\": 0.3513389791714351, \"f0_5\": 0.6841987965232895, \"p4\": 0.5301262691052692, \"phi\": 0.367571985738429}, {\"truth_threshold\": 10.200000151991844, \"match_probability\": 0.9991505751910027, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 600.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1431.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.29542097488921715, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7045790251107829, \"precision\": 1.0, \"recall\": 0.29542097488921715, \"specificity\": 1.0, \"npv\": 0.44448757763975155, \"accuracy\": 0.549433249370277, \"f1\": 0.45610034207525657, \"f2\": 0.343878954607978, \"f0_5\": 0.6770480704129993, \"p4\": 0.5239180605880351, \"phi\": 0.3623685327294328}, {\"truth_threshold\": 10.30000015348196, \"match_probability\": 0.9992074135451509, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 592.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1439.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.2914820285573609, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7085179714426391, \"precision\": 1.0, \"recall\": 0.2914820285573609, \"specificity\": 1.0, \"npv\": 0.44311145510835914, \"accuracy\": 0.5469143576826196, \"f1\": 0.45139153640869234, \"f2\": 0.33960532354290957, \"f0_5\": 0.6728802000454649, \"p4\": 0.5203244044266416, \"phi\": 0.3593870139723867}, {\"truth_threshold\": 10.400000154972076, \"match_probability\": 0.9992604514366183, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 590.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1441.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.29049729197439683, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7095027080256031, \"precision\": 1.0, \"recall\": 0.29049729197439683, \"specificity\": 1.0, \"npv\": 0.44276875483372, \"accuracy\": 0.5462846347607053, \"f1\": 0.45020984357115607, \"f2\": 0.33853568969474407, \"f0_5\": 0.6718287406057846, \"p4\": 0.5194206063238911, \"phi\": 0.3586406617354916}, {\"truth_threshold\": 10.500000156462193, \"match_probability\": 0.9993099426168967, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 570.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1461.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.28064992614475626, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7193500738552437, \"precision\": 1.0, \"recall\": 0.28064992614475626, \"specificity\": 1.0, \"npv\": 0.43937068303914045, \"accuracy\": 0.5399874055415617, \"f1\": 0.43829296424452135, \"f2\": 0.3278122843340235, \"f0_5\": 0.6610995128740431, \"p4\": 0.5102600262107828, \"phi\": 0.35115431044642736}, {\"truth_threshold\": 10.600000157952309, \"match_probability\": 0.9993561239419685, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 565.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1466.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.2781880846873461, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7218119153126539, \"precision\": 1.0, \"recall\": 0.2781880846873461, \"specificity\": 1.0, \"npv\": 0.4385292991191114, \"accuracy\": 0.5384130982367759, \"f1\": 0.4352850539291217, \"f2\": 0.32512371964552883, \"f0_5\": 0.6583546958750874, \"p4\": 0.5079339530793631, \"phi\": 0.3492758591732757}, {\"truth_threshold\": 10.700000159442425, \"match_probability\": 0.9993992164911604, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 543.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1488.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.2673559822747415, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7326440177252584, \"precision\": 1.0, \"recall\": 0.2673559822747415, \"specificity\": 1.0, \"npv\": 0.4348651728066844, \"accuracy\": 0.5314861460957179, \"f1\": 0.4219114219114219, \"f2\": 0.31325718241606093, \"f0_5\": 0.6459671663097787, \"p4\": 0.49751894698684695, \"phi\": 0.3409747870925449}, {\"truth_threshold\": 10.800000160932541, \"match_probability\": 0.9994394266126935, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 530.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1501.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.26095519448547516, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7390448055145249, \"precision\": 1.0, \"recall\": 0.26095519448547516, \"specificity\": 1.0, \"npv\": 0.4327286470143613, \"accuracy\": 0.5273929471032746, \"f1\": 0.4139008199921905, \"f2\": 0.30621677836838457, \"f0_5\": 0.6384003854492893, \"p4\": 0.49121990458507664, \"phi\": 0.3360398610895279}, {\"truth_threshold\": 10.900000162422657, \"match_probability\": 0.9994769469006325, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 513.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1518.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.25258493353028066, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7474150664697193, \"precision\": 1.0, \"recall\": 0.25258493353028066, \"specificity\": 1.0, \"npv\": 0.42996620352985354, \"accuracy\": 0.5220403022670025, \"f1\": 0.4033018867924528, \"f2\": 0.29697811740187563, \"f0_5\": 0.6282145481263777, \"p4\": 0.4828102344574534, \"phi\": 0.32954966991161616}, {\"truth_threshold\": 11.000000163912773, \"match_probability\": 0.9995119571076428, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 508.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1523.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.25012309207287053, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7498769079271295, \"precision\": 1.0, \"recall\": 0.25012309207287053, \"specificity\": 1.0, \"npv\": 0.42916041979010494, \"accuracy\": 0.5204659949622166, \"f1\": 0.40015754233950374, \"f2\": 0.2942539388322521, \"f0_5\": 0.6251538272212651, \"p4\": 0.48029802489967816, \"phi\": 0.32763231097251716}, {\"truth_threshold\": 11.10000016540289, \"match_probability\": 0.9995446249976983, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 502.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1529.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.24716888232397832, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7528311176760216, \"precision\": 1.0, \"recall\": 0.24716888232397832, \"specificity\": 1.0, \"npv\": 0.4281974569932685, \"accuracy\": 0.5185768261964736, \"f1\": 0.3963679431504145, \"f2\": 0.2909807558543937, \"f0_5\": 0.6214409507303789, \"p4\": 0.4772593147830282, \"phi\": 0.3253261238495857}, {\"truth_threshold\": 11.200000166893005, \"match_probability\": 0.9995751071426191, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 499.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1532.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.24569177744953224, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7543082225504677, \"precision\": 1.0, \"recall\": 0.24569177744953224, \"specificity\": 1.0, \"npv\": 0.42771759432200224, \"accuracy\": 0.517632241813602, \"f1\": 0.39446640316205533, \"f2\": 0.2893424562217326, \"f0_5\": 0.6195679165631984, \"p4\": 0.47572994995033335, \"phi\": 0.32417078214331824}, {\"truth_threshold\": 11.300000168383121, \"match_probability\": 0.9996035496660847, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 493.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1538.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.2427375677006401, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.75726243229936, \"precision\": 1.0, \"recall\": 0.2427375677006401, \"specificity\": 1.0, \"npv\": 0.42676108833395454, \"accuracy\": 0.5157430730478589, \"f1\": 0.3906497622820919, \"f2\": 0.28606243472206105, \"f0_5\": 0.6157881588808394, \"p4\": 0.47265083362918403, \"phi\": 0.32185547777140927}, {\"truth_threshold\": 11.400000169873238, \"match_probability\": 0.99963008893853, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 487.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1544.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.2397833579517479, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7602166420482521, \"precision\": 1.0, \"recall\": 0.2397833579517479, \"specificity\": 1.0, \"npv\": 0.42580885087393083, \"accuracy\": 0.5138539042821159, \"f1\": 0.3868149324861001, \"f2\": 0.2827778422947393, \"f0_5\": 0.6119628047248052, \"p4\": 0.46954403414712603, \"phi\": 0.31953384188239936}, {\"truth_threshold\": 11.500000171363354, \"match_probability\": 0.999654852226126, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 482.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1549.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.23732151649433778, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7626784835056623, \"precision\": 1.0, \"recall\": 0.23732151649433778, \"specificity\": 1.0, \"npv\": 0.425018559762435, \"accuracy\": 0.5122795969773299, \"f1\": 0.3836052526860326, \"f2\": 0.2800371833604462, \"f0_5\": 0.6087395807021975, \"p4\": 0.46693344428033184, \"phi\": 0.3175941579139333}, {\"truth_threshold\": 11.60000017285347, \"match_probability\": 0.9996779582968373, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 478.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1553.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.23535204332840964, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7646479566715904, \"precision\": 1.0, \"recall\": 0.23535204332840964, \"specificity\": 1.0, \"npv\": 0.42438843587842845, \"accuracy\": 0.5110201511335013, \"f1\": 0.3810282981267437, \"f2\": 0.27784236224133924, \"f0_5\": 0.6061374587877251, \"p4\": 0.4648305783799129, \"phi\": 0.31603905699918783}, {\"truth_threshold\": 11.700000174343586, \"match_probability\": 0.9996995179863626, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 471.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1560.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.23190546528803546, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7680945347119645, \"precision\": 1.0, \"recall\": 0.23190546528803546, \"specificity\": 1.0, \"npv\": 0.4232902033271719, \"accuracy\": 0.5088161209068011, \"f1\": 0.3764988009592326, \"f2\": 0.27399650959860383, \"f0_5\": 0.6015325670498084, \"p4\": 0.46111916274416753, \"phi\": 0.3133102480839957}, {\"truth_threshold\": 11.800000175833702, \"match_probability\": 0.9997196347265854, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 463.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1568.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.22796651895617923, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7720334810438207, \"precision\": 1.0, \"recall\": 0.22796651895617923, \"specificity\": 1.0, \"npv\": 0.4220420199041651, \"accuracy\": 0.5062972292191436, \"f1\": 0.37129109863672816, \"f2\": 0.2695935716781181, \"f0_5\": 0.5961885140355395, \"p4\": 0.45682744333981634, \"phi\": 0.3101797061878598}, {\"truth_threshold\": 11.900000177323818, \"match_probability\": 0.9997384050389891, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 445.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.2191038897095027, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7808961102904973, \"precision\": 1.0, \"recall\": 0.2191038897095027, \"specificity\": 1.0, \"npv\": 0.4192603441962651, \"accuracy\": 0.5006297229219143, \"f1\": 0.3594507269789984, \"f2\": 0.25965690278912357, \"f0_5\": 0.583836263447914, \"p4\": 0.44696743745394574, \"phi\": 0.3030867404132794}, {\"truth_threshold\": 12.000000178813934, \"match_probability\": 0.9997559189953416, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 416.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1615.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.2048252092565239, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7951747907434761, \"precision\": 1.0, \"recall\": 0.2048252092565239, \"specificity\": 1.0, \"npv\": 0.4148550724637681, \"accuracy\": 0.49149874055415615, \"f1\": 0.340008173273396, \"f2\": 0.24355971896955503, \"f0_5\": 0.5629228687415426, \"p4\": 0.43044577914486043, \"phi\": 0.29150090399263207}, {\"truth_threshold\": 12.200000181794167, \"match_probability\": 0.9997875084304283, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 409.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1622.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.20137863121614968, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7986213687838504, \"precision\": 1.0, \"recall\": 0.20137863121614968, \"specificity\": 1.0, \"npv\": 0.4138055655945067, \"accuracy\": 0.4892947103274559, \"f1\": 0.33524590163934426, \"f2\": 0.23965779913277863, \"f0_5\": 0.5576765748568312, \"p4\": 0.4263317832731881, \"phi\": 0.28867212956751886}, {\"truth_threshold\": 12.300000183284283, \"match_probability\": 0.9998017355340825, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 400.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1631.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.19694731659281142, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8030526834071886, \"precision\": 1.0, \"recall\": 0.19694731659281142, \"specificity\": 1.0, \"npv\": 0.41246397694524495, \"accuracy\": 0.4864609571788413, \"f1\": 0.3290826820238585, \"f2\": 0.2346316283435007, \"f0_5\": 0.5508124483613329, \"p4\": 0.42096603893246504, \"phi\": 0.2850152161737426}, {\"truth_threshold\": 12.400000184774399, \"match_probability\": 0.9998150102562988, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 399.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1632.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.1964549483013294, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8035450516986706, \"precision\": 1.0, \"recall\": 0.1964549483013294, \"specificity\": 1.0, \"npv\": 0.41231544832553113, \"accuracy\": 0.48614609571788414, \"f1\": 0.32839506172839505, \"f2\": 0.23407250967969026, \"f0_5\": 0.5500413564929694, \"p4\": 0.42036442533034846, \"phi\": 0.28460746667055603}, {\"truth_threshold\": 12.500000186264515, \"match_probability\": 0.9998273963279586, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 392.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1639.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.1930083702609552, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8069916297390448, \"precision\": 1.0, \"recall\": 0.1930083702609552, \"specificity\": 1.0, \"npv\": 0.4112787356321839, \"accuracy\": 0.4839420654911839, \"f1\": 0.32356582748658685, \"f2\": 0.23015500234852043, \"f0_5\": 0.5445957210336204, \"p4\": 0.4161220886855334, \"phi\": 0.2817449883979377}, {\"truth_threshold\": 12.600000187754631, \"match_probability\": 0.9998389532181915, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 384.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1647.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.18906942392909898, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.810930576070901, \"precision\": 1.0, \"recall\": 0.18906942392909898, \"specificity\": 1.0, \"npv\": 0.4101002865329513, \"accuracy\": 0.48142317380352645, \"f1\": 0.31801242236024846, \"f2\": 0.22566995768688294, \"f0_5\": 0.5382674516400336, \"p4\": 0.41120574947340216, \"phi\": 0.27845542718349653}, {\"truth_threshold\": 12.700000189244747, \"match_probability\": 0.9998497364189812, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 383.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1648.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.18857705563761692, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.811422944362383, \"precision\": 1.0, \"recall\": 0.18857705563761692, \"specificity\": 1.0, \"npv\": 0.40995345506623704, \"accuracy\": 0.4811083123425693, \"f1\": 0.31731565865782935, \"f2\": 0.22510873398377806, \"f0_5\": 0.5374684254841425, \"p4\": 0.4105860003688893, \"phi\": 0.2780428303424835}, {\"truth_threshold\": 12.800000190734863, \"match_probability\": 0.9998597977108138, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 379.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1652.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.18660758247168882, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8133924175283112, \"precision\": 1.0, \"recall\": 0.18660758247168882, \"specificity\": 1.0, \"npv\": 0.40936717912048626, \"accuracy\": 0.47984886649874053, \"f1\": 0.3145228215767635, \"f2\": 0.22286251911090205, \"f0_5\": 0.5342542994079503, \"p4\": 0.40809522958677485, \"phi\": 0.27638925384126056}, {\"truth_threshold\": 12.90000019222498, \"match_probability\": 0.9998691854106266, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 373.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1658.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.18365337272279667, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8163466272772033, \"precision\": 1.0, \"recall\": 0.18365337272279667, \"specificity\": 1.0, \"npv\": 0.4084909026043525, \"accuracy\": 0.47795969773299746, \"f1\": 0.3103161397670549, \"f2\": 0.21948923149346827, \"f0_5\": 0.529378370706784, \"p4\": 0.40432322000651333, \"phi\": 0.27389912739888156}, {\"truth_threshold\": 13.000000193715096, \"match_probability\": 0.9998779446032292, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 370.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1661.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.18217626784835056, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8178237321516494, \"precision\": 1.0, \"recall\": 0.18217626784835056, \"specificity\": 1.0, \"npv\": 0.4080541696364932, \"accuracy\": 0.4770151133501259, \"f1\": 0.3082049146189088, \"f2\": 0.21780080056510479, \"f0_5\": 0.5269154087154657, \"p4\": 0.40242079403659214, \"phi\": 0.2726495657512296}, {\"truth_threshold\": 13.100000195205212, \"match_probability\": 0.9998861173572945, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 369.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1662.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.18168389955686853, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8183161004431314, \"precision\": 1.0, \"recall\": 0.18168389955686853, \"specificity\": 1.0, \"npv\": 0.4079087994299964, \"accuracy\": 0.47670025188916876, \"f1\": 0.3075, \"f2\": 0.21723772518544684, \"f0_5\": 0.5260906757912746, \"p4\": 0.40178418596158894, \"phi\": 0.27223236645190135}, {\"truth_threshold\": 13.200000196695328, \"match_probability\": 0.9998937429269453, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 354.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1677.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.17429837518463812, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8257016248153619, \"precision\": 1.0, \"recall\": 0.17429837518463812, \"specificity\": 1.0, \"npv\": 0.4057406094968108, \"accuracy\": 0.47197732997481107, \"f1\": 0.2968553459119497, \"f2\": 0.20877565463552725, \"f0_5\": 0.5134899912967799, \"p4\": 0.3920831758418028, \"phi\": 0.2659321886904984}, {\"truth_threshold\": 13.300000198185444, \"match_probability\": 0.9999008579398913, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 346.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1685.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.17035942885278188, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8296405711472181, \"precision\": 1.0, \"recall\": 0.17035942885278188, \"specificity\": 1.0, \"npv\": 0.4045936395759717, \"accuracy\": 0.46945843828715367, \"f1\": 0.29112326461926796, \"f2\": 0.20425029515938606, \"f0_5\": 0.5065885797950219, \"p4\": 0.3867889182734259, \"phi\": 0.262538266459636}, {\"truth_threshold\": 13.40000019967556, \"match_probability\": 0.999907496573012, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 335.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1696.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.16494337764647957, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8350566223535204, \"precision\": 1.0, \"recall\": 0.16494337764647957, \"specificity\": 1.0, \"npv\": 0.40302710313269974, \"accuracy\": 0.4659949622166247, \"f1\": 0.28317836010143704, \"f2\": 0.19801394963943728, \"f0_5\": 0.4968851972708395, \"p4\": 0.3793659349517108, \"phi\": 0.2578306647274206}, {\"truth_threshold\": 13.500000201165676, \"match_probability\": 0.9999136907162209, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 325.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1706.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.16001969473165928, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8399803052683407, \"precision\": 1.0, \"recall\": 0.16001969473165928, \"specificity\": 1.0, \"npv\": 0.4016134689582603, \"accuracy\": 0.4628463476070529, \"f1\": 0.2758913412563667, \"f2\": 0.1923304533080838, \"f0_5\": 0.48784148904232966, \"p4\": 0.37246767025663613, \"phi\": 0.2535075239570288}, {\"truth_threshold\": 13.600000202655792, \"match_probability\": 0.9999194701253888, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 324.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1707.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.15952732644017725, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8404726735598228, \"precision\": 1.0, \"recall\": 0.15952732644017725, \"specificity\": 1.0, \"npv\": 0.4014726507713885, \"accuracy\": 0.46253148614609574, \"f1\": 0.2751592356687898, \"f2\": 0.19176136363636365, \"f0_5\": 0.48692515779981965, \"p4\": 0.3717697318528885, \"phi\": 0.25307283263205194}, {\"truth_threshold\": 13.700000204145908, \"match_probability\": 0.9999248625650565, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 322.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1709.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.1585425898572132, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8414574101427869, \"precision\": 1.0, \"recall\": 0.1585425898572132, \"specificity\": 1.0, \"npv\": 0.40119131044148565, \"accuracy\": 0.46190176322418136, \"f1\": 0.27369315767105823, \"f2\": 0.1906227800142079, \"f0_5\": 0.4850858692377222, \"p4\": 0.37036934721259873, \"phi\": 0.2522021201052885}, {\"truth_threshold\": 13.800000205636024, \"match_probability\": 0.999929893941616, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 315.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1716.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.155096011816839, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.844903988183161, \"precision\": 1.0, \"recall\": 0.155096011816839, \"specificity\": 1.0, \"npv\": 0.400209716882209, \"accuracy\": 0.4596977329974811, \"f1\": 0.26854219948849106, \"f2\": 0.18663348738002133, \"f0_5\": 0.4785779398359161, \"p4\": 0.36541997841978086, \"phi\": 0.2491403840784887}, {\"truth_threshold\": 13.90000020712614, \"match_probability\": 0.9999345884275949, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 313.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1718.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.15411127523387494, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8458887247661251, \"precision\": 1.0, \"recall\": 0.15411127523387494, \"specificity\": 1.0, \"npv\": 0.39993014320642684, \"accuracy\": 0.45906801007556675, \"f1\": 0.26706484641638223, \"f2\": 0.18549247362806684, \"f0_5\": 0.4766981419433445, \"p4\": 0.3639919317161557, \"phi\": 0.24826144359124447}, {\"truth_threshold\": 14.100000210106373, \"match_probability\": 0.9999430554367367, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 307.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1724.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.15115706548498276, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8488429345150172, \"precision\": 1.0, \"recall\": 0.15115706548498276, \"specificity\": 1.0, \"npv\": 0.399093760892297, \"accuracy\": 0.45717884130982367, \"f1\": 0.262617621899059, \"f2\": 0.18206618431977226, \"f0_5\": 0.4710033752684873, \"p4\": 0.35966979322171594, \"phi\": 0.24561319539032303}, {\"truth_threshold\": 14.200000211596489, \"match_probability\": 0.9999468686412301, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 306.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1725.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.15066469719350073, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8493353028064993, \"precision\": 1.0, \"recall\": 0.15066469719350073, \"specificity\": 1.0, \"npv\": 0.3989547038327526, \"accuracy\": 0.4568639798488665, \"f1\": 0.26187419768934533, \"f2\": 0.18149466192170818, \"f0_5\": 0.4700460829493088, \"p4\": 0.35894382186502344, \"phi\": 0.24517012388723966}, {\"truth_threshold\": 14.300000213086605, \"match_probability\": 0.9999504265130488, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 303.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1728.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14918759231905465, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8508124076809453, \"precision\": 1.0, \"recall\": 0.14918759231905465, \"specificity\": 1.0, \"npv\": 0.39853811347024015, \"accuracy\": 0.45591939546599497, \"f1\": 0.2596401028277635, \"f2\": 0.17977928088287648, \"f0_5\": 0.4671600370027752, \"p4\": 0.3567561397717773, \"phi\": 0.24383794125607963}, {\"truth_threshold\": 14.400000214576721, \"match_probability\": 0.9999537461476637, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 300.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1731.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14771048744460857, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8522895125553914, \"precision\": 1.0, \"recall\": 0.14771048744460857, \"specificity\": 1.0, \"npv\": 0.3981223922114047, \"accuracy\": 0.45497481108312343, \"f1\": 0.2574002574002574, \"f2\": 0.17806267806267806, \"f0_5\": 0.46425255338904364, \"p4\": 0.3545536533347784, \"phi\": 0.24250124250436372}, {\"truth_threshold\": 14.500000216066837, \"match_probability\": 0.9999568434961527, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 298.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1733.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14672575086164452, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8532742491383555, \"precision\": 1.0, \"recall\": 0.14672575086164452, \"specificity\": 1.0, \"npv\": 0.39784572619874914, \"accuracy\": 0.45434508816120905, \"f1\": 0.2559038213825676, \"f2\": 0.17691759677036334, \"f0_5\": 0.4623022029165374, \"p4\": 0.35307700645624607, \"phi\": 0.24160755969879688}, {\"truth_threshold\": 14.600000217556953, \"match_probability\": 0.9999597334417798, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 295.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1736.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14524864598719842, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8547513540128016, \"precision\": 1.0, \"recall\": 0.14524864598719842, \"specificity\": 1.0, \"npv\": 0.3974314474140923, \"accuracy\": 0.4534005037783375, \"f1\": 0.25365434221840066, \"f2\": 0.17519895474521915, \"f0_5\": 0.4593584553098723, \"p4\": 0.3508494030029032, \"phi\": 0.2402631465906275}, {\"truth_threshold\": 14.70000021904707, \"match_probability\": 0.9999624298714548, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 294.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1737.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.1447562776957164, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8552437223042836, \"precision\": 1.0, \"recall\": 0.1447562776957164, \"specificity\": 1.0, \"npv\": 0.397293546148508, \"accuracy\": 0.45308564231738035, \"f1\": 0.25290322580645164, \"f2\": 0.17462580185317178, \"f0_5\": 0.4583723105706268, \"p4\": 0.35010346944394827, \"phi\": 0.23981395892022075}, {\"truth_threshold\": 14.800000220537186, \"match_probability\": 0.9999649457424121, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 292.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1739.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14377154111275234, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8562284588872476, \"precision\": 1.0, \"recall\": 0.14377154111275234, \"specificity\": 1.0, \"npv\": 0.39701803051317613, \"accuracy\": 0.452455919395466, \"f1\": 0.2513990529487731, \"f2\": 0.17347908745247148, \"f0_5\": 0.4563926226945921, \"p4\": 0.3486064578319283, \"phi\": 0.2389139889090404}, {\"truth_threshold\": 15.000000223517418, \"match_probability\": 0.9999694833578969, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 290.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1741.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14278680452978829, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8572131954702117, \"precision\": 1.0, \"recall\": 0.14278680452978829, \"specificity\": 1.0, \"npv\": 0.39674289674289676, \"accuracy\": 0.45182619647355166, \"f1\": 0.24989228780697975, \"f2\": 0.17233182790587118, \"f0_5\": 0.4544030084612974, \"p4\": 0.3471025353224262, \"phi\": 0.23801187038845348}, {\"truth_threshold\": 15.100000225007534, \"match_probability\": 0.9999715269079685, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 289.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1742.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14229443623830626, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8577055637616937, \"precision\": 1.0, \"recall\": 0.14229443623830626, \"specificity\": 1.0, \"npv\": 0.39660547280914443, \"accuracy\": 0.45151133501259444, \"f1\": 0.24913793103448276, \"f2\": 0.17175799358136218, \"f0_5\": 0.45340445560087855, \"p4\": 0.346347962973042, \"phi\": 0.2375599969742467}, {\"truth_threshold\": 15.20000022649765, \"match_probability\": 0.9999734336151354, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 288.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1743.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14180206794682423, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8581979320531757, \"precision\": 1.0, \"recall\": 0.14180206794682423, \"specificity\": 1.0, \"npv\": 0.39646814404432135, \"accuracy\": 0.4511964735516373, \"f1\": 0.24838292367399742, \"f2\": 0.17118402282453637, \"f0_5\": 0.4524033930254477, \"p4\": 0.34559163939876736, \"phi\": 0.2371075762191587}, {\"truth_threshold\": 15.300000227987766, \"match_probability\": 0.9999752126423825, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 285.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1746.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14032496307237813, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8596750369276218, \"precision\": 1.0, \"recall\": 0.14032496307237813, \"specificity\": 1.0, \"npv\": 0.3960567277758561, \"accuracy\": 0.45025188916876574, \"f1\": 0.24611398963730569, \"f2\": 0.16946129147342134, \"f0_5\": 0.44938505203405865, \"p4\": 0.34331208180785255, \"phi\": 0.2357469951021941}, {\"truth_threshold\": 15.400000229477882, \"match_probability\": 0.9999768725392036, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 281.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1750.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.13835548990645002, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8616445100935499, \"precision\": 1.0, \"recall\": 0.13835548990645002, \"specificity\": 1.0, \"npv\": 0.3955094991364421, \"accuracy\": 0.44899244332493704, \"f1\": 0.2430795847750865, \"f2\": 0.16716240333135038, \"f0_5\": 0.44532488114104596, \"p4\": 0.34024766819652713, \"phi\": 0.23392501045351508}, {\"truth_threshold\": 15.500000230967999, \"match_probability\": 0.9999784212826682, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 277.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1754.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.13638601674052192, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8636139832594781, \"precision\": 1.0, \"recall\": 0.13638601674052192, \"specificity\": 1.0, \"npv\": 0.39496378061400483, \"accuracy\": 0.4477329974811083, \"f1\": 0.24003466204506066, \"f2\": 0.16486132603261516, \"f0_5\": 0.4412233195285123, \"p4\": 0.3371541980967607, \"phi\": 0.23209381033263574}, {\"truth_threshold\": 15.600000232458115, \"match_probability\": 0.9999798663157408, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 273.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1758.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.1344165435745938, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8655834564254062, \"precision\": 1.0, \"recall\": 0.1344165435745938, \"specificity\": 1.0, \"npv\": 0.39441956596624184, \"accuracy\": 0.4464735516372796, \"f1\": 0.23697916666666666, \"f2\": 0.1625580564487317, \"f0_5\": 0.43707973102785785, \"p4\": 0.33403113499095954, \"phi\": 0.23025315367085358}, {\"truth_threshold\": 15.70000023394823, \"match_probability\": 0.9999812145830361, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 271.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1760.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.13343180699162974, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8665681930083703, \"precision\": 1.0, \"recall\": 0.13343180699162974, \"specificity\": 1.0, \"npv\": 0.39414802065404475, \"accuracy\": 0.44584382871536526, \"f1\": 0.23544743701129453, \"f2\": 0.16140559857057774, \"f0_5\": 0.434991974317817, \"p4\": 0.33245833489583054, \"phi\": 0.22932920140715485}, {\"truth_threshold\": 15.800000235438347, \"match_probability\": 0.9999824725641815, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 268.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1763.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.13195470211718366, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8680452978828164, \"precision\": 1.0, \"recall\": 0.13195470211718366, \"specificity\": 1.0, \"npv\": 0.3937414030261348, \"accuracy\": 0.4448992443324937, \"f1\": 0.23314484558503698, \"f2\": 0.15967588179218303, \"f0_5\": 0.43184015468901066, \"p4\": 0.3300848527615133, \"phi\": 0.22793865303523134}, {\"truth_threshold\": 15.900000236928463, \"match_probability\": 0.9999836463049459, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 266.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1765.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.1309699655342196, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8690300344657804, \"precision\": 1.0, \"recall\": 0.1309699655342196, \"specificity\": 1.0, \"npv\": 0.39347079037800686, \"accuracy\": 0.44426952141057935, \"f1\": 0.23160644318676535, \"f2\": 0.15852205005959474, \"f0_5\": 0.4297253634894992, \"p4\": 0.32849290653450874, \"phi\": 0.2270084929127756}, {\"truth_threshold\": 16.00000023841858, \"match_probability\": 0.9999847414462861, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 261.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1770.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.12850812407680945, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8714918759231906, \"precision\": 1.0, \"recall\": 0.12850812407680945, \"specificity\": 1.0, \"npv\": 0.3927958833619211, \"accuracy\": 0.44269521410579343, \"f1\": 0.22774869109947643, \"f2\": 0.1556350626118068, \"f0_5\": 0.424390243902439, \"p4\": 0.3244788273615635, \"phi\": 0.22467189881232083}, {\"truth_threshold\": 16.20000024139881, \"match_probability\": 0.9999867166312594, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 255.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1776.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.1255539143279173, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8744460856720827, \"precision\": 1.0, \"recall\": 0.1255539143279173, \"specificity\": 1.0, \"npv\": 0.3919890448476549, \"accuracy\": 0.44080604534005036, \"f1\": 0.2230971128608924, \"f2\": 0.15216612960973863, \"f0_5\": 0.41789577187807275, \"p4\": 0.3195960922748543, \"phi\": 0.2218462507104517}, {\"truth_threshold\": 16.300000242888927, \"match_probability\": 0.9999876061677141, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 254.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1777.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.12506154603643527, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8749384539635647, \"precision\": 1.0, \"recall\": 0.12506154603643527, \"specificity\": 1.0, \"npv\": 0.391854893908282, \"accuracy\": 0.4404911838790932, \"f1\": 0.2223194748358862, \"f2\": 0.1515874910479828, \"f0_5\": 0.4168034131933049, \"p4\": 0.31877518775297364, \"phi\": 0.22137294065470842}, {\"truth_threshold\": 16.400000244379044, \"match_probability\": 0.9999884361359999, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 249.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1782.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.12259970457902511, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8774002954209749, \"precision\": 1.0, \"recall\": 0.12259970457902511, \"specificity\": 1.0, \"npv\": 0.3911855141783396, \"accuracy\": 0.4389168765743073, \"f1\": 0.21842105263157896, \"f2\": 0.14869222500895737, \"f0_5\": 0.41129831516352827, \"p4\": 0.3146395889340626, \"phi\": 0.21899595538241903}, {\"truth_threshold\": 16.50000024586916, \"match_probability\": 0.9999892105250341, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 242.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1789.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.11915312653865091, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.880846873461349, \"precision\": 1.0, \"recall\": 0.11915312653865091, \"specificity\": 1.0, \"npv\": 0.39025221540558963, \"accuracy\": 0.43671284634760704, \"f1\": 0.21293444786625604, \"f2\": 0.14463303848912265, \"f0_5\": 0.40346782260753583, \"p4\": 0.30876091977525716, \"phi\": 0.21563805694777322}, {\"truth_threshold\": 16.600000247359276, \"match_probability\": 0.9999899330566321, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 236.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1795.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.11619891678975874, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8838010832102413, \"precision\": 1.0, \"recall\": 0.11619891678975874, \"specificity\": 1.0, \"npv\": 0.38945578231292516, \"accuracy\": 0.43482367758186397, \"f1\": 0.20820467578297308, \"f2\": 0.14114832535885166, \"f0_5\": 0.39663865546218485, \"p4\": 0.30363716807944324, \"phi\": 0.21273067489732173}, {\"truth_threshold\": 16.700000248849392, \"match_probability\": 0.9999906072033913, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 232.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1799.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.11422944362383063, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8857705563761694, \"precision\": 1.0, \"recall\": 0.11422944362383063, \"specificity\": 1.0, \"npv\": 0.3889266304347826, \"accuracy\": 0.43356423173803527, \"f1\": 0.20503756076005303, \"f2\": 0.13882240306366683, \"f0_5\": 0.39202433254477864, \"p4\": 0.3001765927459395, \"phi\": 0.21077683128146796}, {\"truth_threshold\": 16.800000250339508, \"match_probability\": 0.9999912362053778, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 221.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1810.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.10881339241752831, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8911866075824717, \"precision\": 1.0, \"recall\": 0.10881339241752831, \"specificity\": 1.0, \"npv\": 0.38747884940778343, \"accuracy\": 0.4301007556675063, \"f1\": 0.19626998223801065, \"f2\": 0.1324146195326543, \"f0_5\": 0.379073756432247, \"p4\": 0.290469029799348, \"phi\": 0.20533603700788008}, {\"truth_threshold\": 16.900000251829624, \"match_probability\": 0.999991823085696, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 215.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1816.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.10585918266863614, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8941408173313639, \"precision\": 1.0, \"recall\": 0.10585918266863614, \"specificity\": 1.0, \"npv\": 0.386693684566025, \"accuracy\": 0.4282115869017632, \"f1\": 0.19145146927871773, \"f2\": 0.12891233960906584, \"f0_5\": 0.37184365271532344, \"p4\": 0.2850517881231799, \"phi\": 0.20232418884375344}, {\"truth_threshold\": 17.00000025331974, \"match_probability\": 0.9999923706650156, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 209.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1822.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.10290497291974397, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.897095027080256, \"precision\": 1.0, \"recall\": 0.10290497291974397, \"specificity\": 1.0, \"npv\": 0.38591169531513314, \"accuracy\": 0.4263224181360202, \"f1\": 0.18660714285714286, \"f2\": 0.12540501620064803, \"f0_5\": 0.36449250087199164, \"p4\": 0.27954493418624105, \"phi\": 0.19927928280635765}, {\"truth_threshold\": 17.100000254809856, \"match_probability\": 0.9999928815751264, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 203.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1828.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0999507631708518, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9000492368291482, \"precision\": 1.0, \"recall\": 0.0999507631708518, \"specificity\": 1.0, \"npv\": 0.3851328624285234, \"accuracy\": 0.4244332493702771, \"f1\": 0.18173679498657117, \"f2\": 0.12189263840518794, \"f0_5\": 0.3570172353148083, \"p4\": 0.2739457211855208, \"phi\": 0.19619970316467247}, {\"truth_threshold\": 17.200000256299973, \"match_probability\": 0.999993358271586, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 193.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1838.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.09502708025603152, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9049729197439685, \"precision\": 1.0, \"recall\": 0.09502708025603152, \"specificity\": 1.0, \"npv\": 0.38384177003017095, \"accuracy\": 0.42128463476070527, \"f1\": 0.1735611510791367, \"f2\": 0.11602741373091259, \"f0_5\": 0.3442739921512665, \"p4\": 0.26440064321915874, \"phi\": 0.19098524206407744}, {\"truth_threshold\": 17.30000025779009, \"match_probability\": 0.999993803045519, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 178.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1853.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.08764155588380108, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9123584441161989, \"precision\": 1.0, \"recall\": 0.08764155588380108, \"specificity\": 1.0, \"npv\": 0.3819212808539026, \"accuracy\": 0.41656171284634763, \"f1\": 0.16115889542779538, \"f2\": 0.10720308359431463, \"f0_5\": 0.32446226759022967, \"p4\": 0.24955621152340898, \"phi\": 0.18295402504227723}, {\"truth_threshold\": 17.400000259280205, \"match_probability\": 0.9999942180346287, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 160.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1871.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.07877892663712457, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9212210733628754, \"precision\": 1.0, \"recall\": 0.07877892663712457, \"specificity\": 1.0, \"npv\": 0.3796419098143236, \"accuracy\": 0.4108942065491184, \"f1\": 0.1460520310360566, \"f2\": 0.09657170449058426, \"f0_5\": 0.2995132909022838, \"p4\": 0.23084277458650382, \"phi\": 0.17293866589528342}, {\"truth_threshold\": 17.50000026077032, \"match_probability\": 0.9999946052334694, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 156.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1875.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.07680945347119646, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9231905465288035, \"precision\": 1.0, \"recall\": 0.07680945347119646, \"specificity\": 1.0, \"npv\": 0.3791390728476821, \"accuracy\": 0.40963476070528965, \"f1\": 0.14266117969821673, \"f2\": 0.09420289855072464, \"f0_5\": 0.2937853107344633, \"p4\": 0.2265418036022582, \"phi\": 0.1706501244506039}, {\"truth_threshold\": 17.600000262260437, \"match_probability\": 0.999994966503032, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 153.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1878.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.07533234859675036, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9246676514032496, \"precision\": 1.0, \"recall\": 0.07533234859675036, \"specificity\": 1.0, \"npv\": 0.3787628183923255, \"accuracy\": 0.4086901763224181, \"f1\": 0.1401098901098901, \"f2\": 0.09242479159115621, \"f0_5\": 0.28944381384790013, \"p4\": 0.22328051634216844, \"phi\": 0.16891741375778377}, {\"truth_threshold\": 17.700000263750553, \"match_probability\": 0.9999953035796879, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 149.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1882.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.07336287543082226, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9266371245691778, \"precision\": 1.0, \"recall\": 0.07336287543082226, \"specificity\": 1.0, \"npv\": 0.3782623059134457, \"accuracy\": 0.4074307304785894, \"f1\": 0.13669724770642203, \"f2\": 0.09005197630847335, \"f0_5\": 0.2835934526075371, \"p4\": 0.218883751244496, \"phi\": 0.16658454438783837}, {\"truth_threshold\": 17.80000026524067, \"match_probability\": 0.9999956180835331, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 144.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1887.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.07090103397341212, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9290989660265879, \"precision\": 1.0, \"recall\": 0.07090103397341212, \"specificity\": 1.0, \"npv\": 0.3776385224274406, \"accuracy\": 0.40585642317380355, \"f1\": 0.13241379310344828, \"f2\": 0.08708272859216255, \"f0_5\": 0.2761795166858458, \"p4\": 0.21330828694544357, \"phi\": 0.16363056471300563}, {\"truth_threshold\": 17.900000266730785, \"match_probability\": 0.9999959115261747, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 134.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1897.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.06597735105859183, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9340226489414082, \"precision\": 1.0, \"recall\": 0.06597735105859183, \"specificity\": 1.0, \"npv\": 0.37639710716633795, \"accuracy\": 0.4027078085642317, \"f1\": 0.12378752886836028, \"f2\": 0.08113344635504965, \"f0_5\": 0.2610050642773666, \"p4\": 0.2018827078967218, \"phi\": 0.15758706824150226}, {\"truth_threshold\": 18.0000002682209, \"match_probability\": 0.9999961853179954, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 132.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1899.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.06499261447562776, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9350073855243722, \"precision\": 1.0, \"recall\": 0.06499261447562776, \"specificity\": 1.0, \"npv\": 0.37614980289093297, \"accuracy\": 0.4020780856423174, \"f1\": 0.12205270457697642, \"f2\": 0.07994186046511628, \"f0_5\": 0.25791324736225085, \"p4\": 0.1995522156019492, \"phi\": 0.15635523376073404}, {\"truth_threshold\": 18.100000269711018, \"match_probability\": 0.999996440774932, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 131.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1900.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.06450024618414574, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9354997538158543, \"precision\": 1.0, \"recall\": 0.06450024618414574, \"specificity\": 1.0, \"npv\": 0.3760262725779967, \"accuracy\": 0.4017632241813602, \"f1\": 0.1211840888066605, \"f2\": 0.07934585099939431, \"f0_5\": 0.2563600782778865, \"p4\": 0.19838115580714064, \"phi\": 0.15573627436466905}, {\"truth_threshold\": 18.200000271201134, \"match_probability\": 0.9999966791247992, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 128.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1903.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.06302314130969966, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9369768586903003, \"precision\": 1.0, \"recall\": 0.06302314130969966, \"specificity\": 1.0, \"npv\": 0.37565616797900264, \"accuracy\": 0.40081863979848864, \"f1\": 0.11857341361741547, \"f2\": 0.07755695588948133, \"f0_5\": 0.25167125442390875, \"p4\": 0.19484445248024185, \"phi\": 0.15386692873519298}, {\"truth_threshold\": 18.30000027269125, \"match_probability\": 0.999996901513191, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 112.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1919.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.055145248645987195, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9448547513540128, \"precision\": 1.0, \"recall\": 0.055145248645987195, \"specificity\": 1.0, \"npv\": 0.37369451697127937, \"accuracy\": 0.3957808564231738, \"f1\": 0.10452636490900606, \"f2\": 0.06799417192812045, \"f0_5\": 0.22589753933037515, \"p4\": 0.17536236689966953, \"phi\": 0.14355304613982695}, {\"truth_threshold\": 18.400000274181366, \"match_probability\": 0.9999971090089864, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 91.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1940.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0448055145248646, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9551944854751354, \"precision\": 1.0, \"recall\": 0.0448055145248646, \"specificity\": 1.0, \"npv\": 0.3711507293354943, \"accuracy\": 0.3891687657430731, \"f1\": 0.08576814326107446, \"f2\": 0.05538648813146683, \"f0_5\": 0.18997912317327767, \"p4\": 0.1480768274225295, \"phi\": 0.1289558040343884}, {\"truth_threshold\": 18.500000275671482, \"match_probability\": 0.9999973026094866, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 77.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1954.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0379123584441162, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9620876415558838, \"precision\": 1.0, \"recall\": 0.0379123584441162, \"specificity\": 1.0, \"npv\": 0.36947402387867057, \"accuracy\": 0.38476070528967254, \"f1\": 0.0730550284629981, \"f2\": 0.04694549445189611, \"f0_5\": 0.16460025651988028, \"p4\": 0.12868698840481557, \"phi\": 0.11835384078718403}, {\"truth_threshold\": 18.600000277161598, \"match_probability\": 0.999997483245208, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 75.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1956.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.03692762186115214, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9630723781388478, \"precision\": 1.0, \"recall\": 0.03692762186115214, \"specificity\": 1.0, \"npv\": 0.3692357304095453, \"accuracy\": 0.38413098236775817, \"f1\": 0.07122507122507123, \"f2\": 0.045737285034760336, \"f0_5\": 0.16087516087516088, \"p4\": 0.12583247247071236, \"phi\": 0.11676899173235163}, {\"truth_threshold\": 18.700000278651714, \"match_probability\": 0.9999976517843541, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 73.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1958.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.035942885278188084, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9640571147218119, \"precision\": 1.0, \"recall\": 0.035942885278188084, \"specificity\": 1.0, \"npv\": 0.36899774411859493, \"accuracy\": 0.38350125944584385, \"f1\": 0.06939163498098859, \"f2\": 0.04452848603147493, \"f0_5\": 0.1571244080929832, \"p4\": 0.12295600444839334, \"phi\": 0.11516441978651591}, {\"truth_threshold\": 18.900000281631947, \"match_probability\": 0.9999979557589296, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 71.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1960.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.034958148695224026, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9650418513047759, \"precision\": 1.0, \"recall\": 0.034958148695224026, \"specificity\": 1.0, \"npv\": 0.3687600644122383, \"accuracy\": 0.38287153652392947, \"f1\": 0.0675547098001903, \"f2\": 0.043319097010372176, \"f0_5\": 0.15334773218142547, \"p4\": 0.1200573002429353, \"phi\": 0.11353928467532028}, {\"truth_threshold\": 19.000000283122063, \"match_probability\": 0.9999980926553794, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 69.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1962.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.033973412112259974, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.96602658788774, \"precision\": 1.0, \"recall\": 0.033973412112259974, \"specificity\": 1.0, \"npv\": 0.3685226906984229, \"accuracy\": 0.3822418136020151, \"f1\": 0.06571428571428571, \"f2\": 0.04210911753936287, \"f0_5\": 0.14954486345903772, \"p4\": 0.11713607088211223, \"phi\": 0.11189268628385163}, {\"truth_threshold\": 19.10000028461218, \"match_probability\": 0.9999982203843173, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 66.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1965.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.03249630723781388, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9675036927621861, \"precision\": 1.0, \"recall\": 0.03249630723781388, \"specificity\": 1.0, \"npv\": 0.36816720257234725, \"accuracy\": 0.38129722921914355, \"f1\": 0.06294706723891273, \"f2\": 0.040293040293040296, \"f0_5\": 0.1437908496732026, \"p4\": 0.11271134775969976, \"phi\": 0.10938041200177231}, {\"truth_threshold\": 19.200000286102295, \"match_probability\": 0.9999983395596597, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 65.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1966.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.032003938946331856, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9679960610536681, \"precision\": 1.0, \"recall\": 0.032003938946331856, \"specificity\": 1.0, \"npv\": 0.3680488588878174, \"accuracy\": 0.3809823677581864, \"f1\": 0.06202290076335878, \"f2\": 0.03968738551715716, \"f0_5\": 0.14185945002182454, \"p4\": 0.1112248557850375, \"phi\": 0.10853116238718177}, {\"truth_threshold\": 19.400000289082527, \"match_probability\": 0.9999985545024187, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 63.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1968.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0310192023633678, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9689807976366323, \"precision\": 1.0, \"recall\": 0.0310192023633678, \"specificity\": 1.0, \"npv\": 0.3678123996145198, \"accuracy\": 0.380352644836272, \"f1\": 0.06017191977077364, \"f2\": 0.03847563209967021, \"f0_5\": 0.13797634691195795, \"p4\": 0.10823426675729327, \"phi\": 0.1068140779831886}, {\"truth_threshold\": 19.500000290572643, \"match_probability\": 0.9999986513029383, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 62.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1969.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.03052683407188577, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9694731659281143, \"precision\": 1.0, \"recall\": 0.03052683407188577, \"specificity\": 1.0, \"npv\": 0.36769428387925496, \"accuracy\": 0.38003778337531485, \"f1\": 0.05924510272336359, \"f2\": 0.03786953334962131, \"f0_5\": 0.13602457218078104, \"p4\": 0.10673009231574156, \"phi\": 0.1059459408998895}, {\"truth_threshold\": 19.60000029206276, \"match_probability\": 0.9999987416210334, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 61.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1970.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.03003446578040374, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9699655342195963, \"precision\": 1.0, \"recall\": 0.03003446578040374, \"specificity\": 1.0, \"npv\": 0.36757624398073835, \"accuracy\": 0.3797229219143577, \"f1\": 0.058317399617590825, \"f2\": 0.03726328649969456, \"f0_5\": 0.13406593406593406, \"p4\": 0.10521994576679723, \"phi\": 0.10507119548919591}, {\"truth_threshold\": 19.700000293552876, \"match_probability\": 0.9999988258908107, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 60.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1971.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.029542097488921712, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9704579025110783, \"precision\": 1.0, \"recall\": 0.029542097488921712, \"specificity\": 1.0, \"npv\": 0.36745827984595636, \"accuracy\": 0.37940806045340053, \"f1\": 0.05738880918220947, \"f2\": 0.036656891495601175, \"f0_5\": 0.13210039630118892, \"p4\": 0.10370378756573473, \"phi\": 0.10418967475868576}, {\"truth_threshold\": 19.80000029504299, \"match_probability\": 0.9999989045173057, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 53.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1978.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.026095519448547513, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9739044805514525, \"precision\": 1.0, \"recall\": 0.026095519448547513, \"specificity\": 1.0, \"npv\": 0.36663464617355107, \"accuracy\": 0.37720403022670024, \"f1\": 0.0508637236084453, \"f2\": 0.03240797358444417, \"f0_5\": 0.1181453410610789, \"p4\": 0.0929189583154697, \"phi\": 0.09781370834261033}, {\"truth_threshold\": 19.900000296533108, \"match_probability\": 0.9999989778784306, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 46.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1985.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.022648941408173313, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9773510585918267, \"precision\": 1.0, \"recall\": 0.022648941408173313, \"specificity\": 1.0, \"npv\": 0.365814696485623, \"accuracy\": 0.375, \"f1\": 0.04429465575349061, \"f2\": 0.028151774785801713, \"f0_5\": 0.1038374717832957, \"p4\": 0.08182335429924091, \"phi\": 0.0910237091474061}, {\"truth_threshold\": 21.200000315904617, \"match_probability\": 0.9999995848894065, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 45.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1986.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.022156573116691284, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9778434268833087, \"precision\": 1.0, \"recall\": 0.022156573116691284, \"specificity\": 1.0, \"npv\": 0.3656978601085915, \"accuracy\": 0.37468513853904284, \"f1\": 0.04335260115606936, \"f2\": 0.027543150936467132, \"f0_5\": 0.10176390773405698, \"p4\": 0.08021203063702596, \"phi\": 0.09001450647597614}, {\"truth_threshold\": 21.500000320374966, \"match_probability\": 0.9999996628254004, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 44.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1987.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.021664204825209258, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9783357951747907, \"precision\": 1.0, \"recall\": 0.021664204825209258, \"specificity\": 1.0, \"npv\": 0.365581098339719, \"accuracy\": 0.3743702770780856, \"f1\": 0.042409638554216866, \"f2\": 0.02693437806072478, \"f0_5\": 0.09968282736746716, \"p4\": 0.07859401270561604, \"phi\": 0.08899451553133284}, {\"truth_threshold\": 21.600000321865082, \"match_probability\": 0.999999685404968, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 43.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1988.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.02117183653372723, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9788281634662728, \"precision\": 1.0, \"recall\": 0.02117183653372723, \"specificity\": 1.0, \"npv\": 0.3654644111075646, \"accuracy\": 0.37405541561712846, \"f1\": 0.041465766634522665, \"f2\": 0.026325456103832495, \"f0_5\": 0.09759418974126191, \"p4\": 0.07696925450739521, \"phi\": 0.08796336038865413}, {\"truth_threshold\": 21.700000323355198, \"match_probability\": 0.9999997064724503, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 42.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1989.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0206794682422452, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9793205317577548, \"precision\": 1.0, \"recall\": 0.0206794682422452, \"specificity\": 1.0, \"npv\": 0.3653477983407786, \"accuracy\": 0.3737405541561713, \"f1\": 0.04052098408104197, \"f2\": 0.025716385011021307, \"f0_5\": 0.09549795361527967, \"p4\": 0.0753377096255321, \"phi\": 0.08692064307839845}, {\"truth_threshold\": 21.90000032633543, \"match_probability\": 0.9999997444694171, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 41.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1990.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.02018709995076317, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9798129000492368, \"precision\": 1.0, \"recall\": 0.02018709995076317, \"specificity\": 1.0, \"npv\": 0.3652312599681021, \"accuracy\": 0.3734256926952141, \"f1\": 0.03957528957528957, \"f2\": 0.02510716472749541, \"f0_5\": 0.09339407744874716, \"p4\": 0.07369933121919056, \"phi\": 0.08586594173547067}, {\"truth_threshold\": 22.000000327825546, \"match_probability\": 0.9999997615815319, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 35.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1996.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.017232890201871, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.982767109798129, \"precision\": 1.0, \"recall\": 0.017232890201871, \"specificity\": 1.0, \"npv\": 0.36453358802929003, \"accuracy\": 0.371536523929471, \"f1\": 0.03388189738625363, \"f2\": 0.02144870694938105, \"f0_5\": 0.08060801473975127, \"p4\": 0.06372287901796007, \"phi\": 0.07925886257954269}, {\"truth_threshold\": 22.100000329315662, \"match_probability\": 0.9999997775477002, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 34.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1997.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.01674052191038897, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.983259478089611, \"precision\": 1.0, \"recall\": 0.01674052191038897, \"specificity\": 1.0, \"npv\": 0.364417568427753, \"accuracy\": 0.37122166246851385, \"f1\": 0.03292978208232446, \"f2\": 0.02083844079431233, \"f0_5\": 0.07844946931241348, \"p4\": 0.06203531774376609, \"phi\": 0.07810595552706255}, {\"truth_threshold\": 22.20000033080578, \"match_probability\": 0.9999997924446623, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 31.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2000.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.015263417035942885, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9847365829640571, \"precision\": 1.0, \"recall\": 0.015263417035942885, \"specificity\": 1.0, \"npv\": 0.3640699523052464, \"accuracy\": 0.3702770780856423, \"f1\": 0.030067895247332686, \"f2\": 0.01900674432863274, \"f0_5\": 0.07192575406032482, \"p4\": 0.0569290852372513, \"phi\": 0.0745449630242769}, {\"truth_threshold\": 22.300000332295895, \"match_probability\": 0.9999998063440199, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 27.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2004.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.013293943870014771, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9867060561299852, \"precision\": 1.0, \"recall\": 0.013293943870014771, \"specificity\": 1.0, \"npv\": 0.3636074944426802, \"accuracy\": 0.3690176322418136, \"f1\": 0.026239067055393587, \"f2\": 0.016562384983437616, \"f0_5\": 0.06311360448807854, \"p4\": 0.050017230584043997, \"phi\": 0.06952537394245138}, {\"truth_threshold\": 22.40000033378601, \"match_probability\": 0.9999998193125794, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 26.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2005.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.012801575578532743, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9871984244214672, \"precision\": 1.0, \"recall\": 0.012801575578532743, \"specificity\": 1.0, \"npv\": 0.3634920634920635, \"accuracy\": 0.36870277078085645, \"f1\": 0.025279533300923675, \"f2\": 0.015950920245398775, \"f0_5\": 0.06088992974238876, \"p4\": 0.048270424636238894, \"phi\": 0.0682148893057115}, {\"truth_threshold\": 22.600000336766243, \"match_probability\": 0.9999998427024609, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 25.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2006.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.012309207287050714, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9876907927129492, \"precision\": 1.0, \"recall\": 0.012309207287050714, \"specificity\": 1.0, \"npv\": 0.3633767058076801, \"accuracy\": 0.36838790931989923, \"f1\": 0.024319066147859923, \"f2\": 0.015339305436249846, \"f0_5\": 0.05865790708587518, \"p4\": 0.04651597386980402, \"phi\": 0.06687958728246145}, {\"truth_threshold\": 22.800000339746475, \"match_probability\": 0.9999998630645361, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 23.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2008.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.011324470704086657, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9886755292959133, \"precision\": 1.0, \"recall\": 0.011324470704086657, \"specificity\": 1.0, \"npv\": 0.36314620995876945, \"accuracy\": 0.3677581863979849, \"f1\": 0.022395326192794548, \"f2\": 0.014115625383576776, \"f0_5\": 0.054168629298162976, \"p4\": 0.042983917959230976, \"phi\": 0.06412829809045448}, {\"truth_threshold\": 23.000000342726707, \"match_probability\": 0.999999880790753, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 21.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2010.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0103397341211226, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9896602658788775, \"precision\": 1.0, \"recall\": 0.0103397341211226, \"specificity\": 1.0, \"npv\": 0.3629160063391442, \"accuracy\": 0.36712846347607053, \"f1\": 0.02046783625730994, \"f2\": 0.01289134438305709, \"f0_5\": 0.04964539007092199, \"p4\": 0.03942061774542593, \"phi\": 0.06125728539403615}, {\"truth_threshold\": 23.100000344216824, \"match_probability\": 0.9999998887738388, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 20.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2011.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.009847365829640572, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9901526341703595, \"precision\": 1.0, \"recall\": 0.009847365829640572, \"specificity\": 1.0, \"npv\": 0.36280101394169834, \"accuracy\": 0.36681360201511337, \"f1\": 0.019502681618722574, \"f2\": 0.012278978388998035, \"f0_5\": 0.04737091425864519, \"p4\": 0.03762710959306445, \"phi\": 0.05977151752840505}, {\"truth_threshold\": 23.20000034570694, \"match_probability\": 0.9999998962223214, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 17.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2014.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.008370260955194485, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9916297390448056, \"precision\": 1.0, \"recall\": 0.008370260955194485, \"specificity\": 1.0, \"npv\": 0.3624564735675847, \"accuracy\": 0.36586901763224183, \"f1\": 0.0166015625, \"f2\": 0.010440977766859108, \"f0_5\": 0.040495474035254886, \"p4\": 0.03219846095822884, \"phi\": 0.05508044361350257}, {\"truth_threshold\": 23.400000348687172, \"match_probability\": 0.9999999096562825, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 15.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2016.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.007385524372230428, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9926144756277696, \"precision\": 1.0, \"recall\": 0.007385524372230428, \"specificity\": 1.0, \"npv\": 0.3622271433090794, \"accuracy\": 0.36523929471032746, \"f1\": 0.01466275659824047, \"f2\": 0.009214891264283081, \"f0_5\": 0.035868005738880916, \"p4\": 0.028538670521671944, \"phi\": 0.05172269709897784}, {\"truth_threshold\": 23.500000350177288, \"match_probability\": 0.9999999157063305, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 12.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2019.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.005908419497784343, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9940915805022157, \"precision\": 1.0, \"recall\": 0.005908419497784343, \"specificity\": 1.0, \"npv\": 0.36188369152970923, \"accuracy\": 0.3642947103274559, \"f1\": 0.011747430249632892, \"f2\": 0.007374631268436578, \"f0_5\": 0.02886002886002886, \"p4\": 0.022986746233599045, \"phi\": 0.046240249339339734}, {\"truth_threshold\": 23.600000351667404, \"match_probability\": 0.9999999213512251, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 10.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2021.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.004923682914820286, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9950763170851797, \"precision\": 1.0, \"recall\": 0.004923682914820286, \"specificity\": 1.0, \"npv\": 0.3616550852811118, \"accuracy\": 0.36366498740554154, \"f1\": 0.009799118079372856, \"f2\": 0.006147037128104254, \"f0_5\": 0.024142926122646065, \"p4\": 0.019243252781973526, \"phi\": 0.0421980445572598}, {\"truth_threshold\": 23.70000035315752, \"match_probability\": 0.9999999266180979, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 9.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2022.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.004431314623338257, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9955686853766618, \"precision\": 1.0, \"recall\": 0.004431314623338257, \"specificity\": 1.0, \"npv\": 0.361540890432586, \"accuracy\": 0.3633501259445844, \"f1\": 0.008823529411764706, \"f2\": 0.005533013648100332, \"f0_5\": 0.02177068214804064, \"p4\": 0.017358654565300884, \"phi\": 0.040026259314463214}, {\"truth_threshold\": 23.900000356137753, \"match_probability\": 0.9999999361173434, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 7.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2024.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0034465780403741997, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9965534219596258, \"precision\": 1.0, \"recall\": 0.0034465780403741997, \"specificity\": 1.0, \"npv\": 0.3613127169454087, \"accuracy\": 0.36272040302267, \"f1\": 0.0068694798822374874, \"f2\": 0.004304513589964334, \"f0_5\": 0.016998542982030112, \"p4\": 0.013563435077429192, \"phi\": 0.035288701817040316}, {\"truth_threshold\": 24.100000359117985, \"match_probability\": 0.9999999443869169, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 6.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2025.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0029542097488921715, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9970457902511078, \"precision\": 1.0, \"recall\": 0.0029542097488921715, \"specificity\": 1.0, \"npv\": 0.361198738170347, \"accuracy\": 0.36240554156171284, \"f1\": 0.005891016200294551, \"f2\": 0.0036900369003690036, \"f0_5\": 0.014598540145985401, \"p4\": 0.011652683870064942, \"phi\": 0.032665835877723835}, {\"truth_threshold\": 24.2000003606081, \"match_probability\": 0.9999999481111586, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 5.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2026.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.002461841457410143, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9975381585425899, \"precision\": 1.0, \"recall\": 0.002461841457410143, \"specificity\": 1.0, \"npv\": 0.36108483128350677, \"accuracy\": 0.3620906801007557, \"f1\": 0.004911591355599214, \"f2\": 0.0030754090294009104, \"f0_5\": 0.01218917601170161, \"p4\": 0.009733083985039102, \"phi\": 0.02981498964104606}, {\"truth_threshold\": 24.300000362098217, \"match_probability\": 0.999999951585999, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 4.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2027.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0019694731659281144, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9980305268340719, \"precision\": 1.0, \"recall\": 0.0019694731659281144, \"specificity\": 1.0, \"npv\": 0.3609709962168979, \"accuracy\": 0.36177581863979846, \"f1\": 0.003931203931203931, \"f2\": 0.0024606299212598425, \"f0_5\": 0.009770395701025891, \"p4\": 0.007804568825263287, \"phi\": 0.026663133550419747}, {\"truth_threshold\": 24.400000363588333, \"match_probability\": 0.9999999548281396, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 3.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2028.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0014771048744460858, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9985228951255539, \"precision\": 1.0, \"recall\": 0.0014771048744460858, \"specificity\": 1.0, \"npv\": 0.36085723290261584, \"accuracy\": 0.3614609571788413, \"f1\": 0.0029498525073746312, \"f2\": 0.0018456995201181247, \"f0_5\": 0.007342143906020558, \"p4\": 0.00586707112734875, \"phi\": 0.023087312050119223}, {\"truth_threshold\": 24.600000366568565, \"match_probability\": 0.9999999606756114, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 2.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2029.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0009847365829640572, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.999015263417036, \"precision\": 1.0, \"recall\": 0.0009847365829640572, \"specificity\": 1.0, \"npv\": 0.36074354127284186, \"accuracy\": 0.36114609571788414, \"f1\": 0.001967535661583866, \"f2\": 0.0012306177701206006, \"f0_5\": 0.004904364884747425, \"p4\": 0.003920522953249476, \"phi\": 0.018847741566547744}, {\"truth_threshold\": 25.100000374019146, \"match_probability\": 0.9999999721934579, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2030.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0004923682914820286, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.999507631708518, \"precision\": 1.0, \"recall\": 0.0004923682914820286, \"specificity\": 1.0, \"npv\": 0.3606299212598425, \"accuracy\": 0.3608312342569269, \"f1\": 0.000984251968503937, \"f2\": 0.0006153846153846154, \"f0_5\": 0.002457002457002457, \"p4\": 0.001964855681779181, \"phi\": 0.013325266908696693}]}}, {\"mode\": \"vega-lite\"});\n",
              "</script>"
            ],
            "text/plain": [
              "alt.Chart(...)"
            ]
          },
          "execution_count": 3,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "linker.evaluation.accuracy_analysis_from_labels_table(\n",
        "    labels_table, output_type=\"roc\", add_metrics=[\"f1\"]\n",
        ")"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 4,
      "metadata": {},
      "outputs": [
        {
          "data": {
            "text/html": [
              "\n",
              "<style>\n",
              "  #altair-viz-817d3fced3f2466ebbfca9831dfbd2b7.vega-embed {\n",
              "    width: 100%;\n",
              "    display: flex;\n",
              "  }\n",
              "\n",
              "  #altair-viz-817d3fced3f2466ebbfca9831dfbd2b7.vega-embed details,\n",
              "  #altair-viz-817d3fced3f2466ebbfca9831dfbd2b7.vega-embed details summary {\n",
              "    position: relative;\n",
              "  }\n",
              "</style>\n",
              "<div id=\"altair-viz-817d3fced3f2466ebbfca9831dfbd2b7\"></div>\n",
              "<script type=\"text/javascript\">\n",
              "  var VEGA_DEBUG = (typeof VEGA_DEBUG == \"undefined\") ? {} : VEGA_DEBUG;\n",
              "  (function(spec, embedOpt){\n",
              "    let outputDiv = document.currentScript.previousElementSibling;\n",
              "    if (outputDiv.id !== \"altair-viz-817d3fced3f2466ebbfca9831dfbd2b7\") {\n",
              "      outputDiv = document.getElementById(\"altair-viz-817d3fced3f2466ebbfca9831dfbd2b7\");\n",
              "    }\n",
              "    const paths = {\n",
              "      \"vega\": \"https://cdn.jsdelivr.net/npm/vega@5?noext\",\n",
              "      \"vega-lib\": \"https://cdn.jsdelivr.net/npm/vega-lib?noext\",\n",
              "      \"vega-lite\": \"https://cdn.jsdelivr.net/npm/vega-lite@5.17.0?noext\",\n",
              "      \"vega-embed\": \"https://cdn.jsdelivr.net/npm/vega-embed@6?noext\",\n",
              "    };\n",
              "\n",
              "    function maybeLoadScript(lib, version) {\n",
              "      var key = `${lib.replace(\"-\", \"\")}_version`;\n",
              "      return (VEGA_DEBUG[key] == version) ?\n",
              "        Promise.resolve(paths[lib]) :\n",
              "        new Promise(function(resolve, reject) {\n",
              "          var s = document.createElement('script');\n",
              "          document.getElementsByTagName(\"head\")[0].appendChild(s);\n",
              "          s.async = true;\n",
              "          s.onload = () => {\n",
              "            VEGA_DEBUG[key] = version;\n",
              "            return resolve(paths[lib]);\n",
              "          };\n",
              "          s.onerror = () => reject(`Error loading script: ${paths[lib]}`);\n",
              "          s.src = paths[lib];\n",
              "        });\n",
              "    }\n",
              "\n",
              "    function showError(err) {\n",
              "      outputDiv.innerHTML = `<div class=\"error\" style=\"color:red;\">${err}</div>`;\n",
              "      throw err;\n",
              "    }\n",
              "\n",
              "    function displayChart(vegaEmbed) {\n",
              "      vegaEmbed(outputDiv, spec, embedOpt)\n",
              "        .catch(err => showError(`Javascript Error: ${err.message}<br>This usually means there's a typo in your chart specification. See the javascript console for the full traceback.`));\n",
              "    }\n",
              "\n",
              "    if(typeof define === \"function\" && define.amd) {\n",
              "      requirejs.config({paths});\n",
              "      require([\"vega-embed\"], displayChart, err => showError(`Error loading script: ${err.message}`));\n",
              "    } else {\n",
              "      maybeLoadScript(\"vega\", \"5\")\n",
              "        .then(() => maybeLoadScript(\"vega-lite\", \"5.17.0\"))\n",
              "        .then(() => maybeLoadScript(\"vega-embed\", \"6\"))\n",
              "        .catch(showError)\n",
              "        .then(() => displayChart(vegaEmbed));\n",
              "    }\n",
              "  })({\"config\": {\"view\": {\"continuousWidth\": 300, \"continuousHeight\": 300}}, \"data\": {\"name\": \"data-88481be63707845e366cd370703ce8b4\"}, \"mark\": {\"type\": \"line\", \"clip\": true, \"point\": true}, \"encoding\": {\"tooltip\": [{\"field\": \"truth_threshold\", \"format\": \".4f\", \"type\": \"quantitative\"}, {\"field\": \"match_probability\", \"format\": \".4%\", \"type\": \"quantitative\"}, {\"field\": \"fp_rate\", \"format\": \".4f\", \"title\": \"FP_rate\", \"type\": \"quantitative\"}, {\"field\": \"tp_rate\", \"format\": \".4f\", \"title\": \"TP_rate\", \"type\": \"quantitative\"}, {\"field\": \"tp\", \"format\": \",.0f\", \"title\": \"TP\", \"type\": \"quantitative\"}, {\"field\": \"tn\", \"format\": \",.0f\", \"title\": \"TN\", \"type\": \"quantitative\"}, {\"field\": \"fp\", \"format\": \",.0f\", \"title\": \"FP\", \"type\": \"quantitative\"}, {\"field\": \"fn\", \"format\": \",.0f\", \"title\": \"FN\", \"type\": \"quantitative\"}, {\"field\": \"precision\", \"format\": \".4f\", \"type\": \"quantitative\"}, {\"field\": \"recall\", \"format\": \".4f\", \"type\": \"quantitative\"}, {\"field\": \"f1\", \"format\": \".4f\", \"title\": \"F1\", \"type\": \"quantitative\"}], \"x\": {\"field\": \"recall\", \"sort\": [\"-recall\"], \"title\": \"Recall\", \"type\": \"quantitative\"}, \"y\": {\"field\": \"precision\", \"sort\": [\"-precision\"], \"title\": \"Precision\", \"type\": \"quantitative\"}}, \"height\": 400, \"params\": [{\"name\": \"mouse_zoom\", \"select\": {\"type\": \"interval\", \"encodings\": [\"x\", \"y\"]}, \"bind\": \"scales\"}], \"title\": \"Precision-recall curve\", \"width\": 400, \"$schema\": \"https://vega.github.io/schema/vega-lite/v5.9.3.json\", \"datasets\": {\"data-88481be63707845e366cd370703ce8b4\": [{\"truth_threshold\": -23.800000354647636, \"match_probability\": 6.846773588489456e-08, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1055.0, \"fp\": 90.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9213973799126638, \"fp_rate\": 0.07860262008733625, \"fn_rate\": 0.2880354505169867, \"precision\": 0.94140625, \"recall\": 0.7119645494830132, \"specificity\": 0.9213973799126638, \"npv\": 0.6432926829268293, \"accuracy\": 0.7874685138539043, \"f1\": 0.8107653490328006, \"f2\": 0.7484472049689441, \"f0_5\": 0.8844036697247707, \"p4\": 0.7832976799979975, \"phi\": 0.6085442007563051}, {\"truth_threshold\": -22.70000033825636, \"match_probability\": 1.467637948991862e-07, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1077.0, \"fp\": 68.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9406113537117904, \"fp_rate\": 0.059388646288209605, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9550858652575958, \"recall\": 0.7119645494830132, \"specificity\": 0.9406113537117904, \"npv\": 0.648014440433213, \"accuracy\": 0.7943954659949622, \"f1\": 0.8157968970380818, \"f2\": 0.750155633948952, \"f0_5\": 0.8940274514653147, \"p4\": 0.7908413564901972, \"phi\": 0.6273505612520337}, {\"truth_threshold\": -21.700000323355198, \"match_probability\": 2.9352754975091214e-07, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1083.0, \"fp\": 62.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9458515283842794, \"fp_rate\": 0.05414847161572053, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9588859416445623, \"recall\": 0.7119645494830132, \"specificity\": 0.9458515283842794, \"npv\": 0.6492805755395683, \"accuracy\": 0.7962846347607053, \"f1\": 0.817179994348686, \"f2\": 0.7506229235880398, \"f0_5\": 0.896688577452561, \"p4\": 0.792886883910619, \"phi\": 0.6325043185815227}, {\"truth_threshold\": -21.600000321865082, \"match_probability\": 3.1459503204353755e-07, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1088.0, \"fp\": 57.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9502183406113537, \"fp_rate\": 0.04978165938864629, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9620758483033932, \"recall\": 0.7119645494830132, \"specificity\": 0.9502183406113537, \"npv\": 0.6503287507471608, \"accuracy\": 0.7978589420654912, \"f1\": 0.8183361629881154, \"f2\": 0.7510127765659084, \"f0_5\": 0.8989183140619172, \"p4\": 0.7945877557823284, \"phi\": 0.6368075433805553}, {\"truth_threshold\": -20.60000030696392, \"match_probability\": 6.29189872645777e-07, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1094.0, \"fp\": 51.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9554585152838428, \"fp_rate\": 0.0445414847161572, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9659318637274549, \"recall\": 0.7119645494830132, \"specificity\": 0.9554585152838428, \"npv\": 0.6515783204288267, \"accuracy\": 0.7997481108312342, \"f1\": 0.8197278911564626, \"f2\": 0.75148113501715, \"f0_5\": 0.9016086793864572, \"p4\": 0.7966244062798371, \"phi\": 0.6419817284271657}, {\"truth_threshold\": -19.000000283122063, \"match_probability\": 1.907344620533969e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1096.0, \"fp\": 49.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9572052401746725, \"fp_rate\": 0.04279475982532751, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9672240802675586, \"recall\": 0.7119645494830132, \"specificity\": 0.9572052401746725, \"npv\": 0.6519928613920285, \"accuracy\": 0.8003778337531486, \"f1\": 0.8201928530913216, \"f2\": 0.7516373843434868, \"f0_5\": 0.9025090500561728, \"p4\": 0.7973022397990062, \"phi\": 0.6437089952787838}, {\"truth_threshold\": -17.900000266730785, \"match_probability\": 4.088473825324779e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1097.0, \"fp\": 48.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9580786026200874, \"fp_rate\": 0.04192139737991266, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9678714859437751, \"recall\": 0.7119645494830132, \"specificity\": 0.9580786026200874, \"npv\": 0.6521997621878716, \"accuracy\": 0.8006926952141058, \"f1\": 0.8204255319148936, \"f2\": 0.751715533374922, \"f0_5\": 0.9029599100786811, \"p4\": 0.7976409617025867, \"phi\": 0.6445731096055997}, {\"truth_threshold\": -17.600000262260437, \"match_probability\": 5.03349696795731e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1100.0, \"fp\": 45.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9606986899563319, \"fp_rate\": 0.039301310043668124, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9698189134808853, \"recall\": 0.7119645494830132, \"specificity\": 0.9606986899563319, \"npv\": 0.6528189910979229, \"accuracy\": 0.8016372795969773, \"f1\": 0.8211243611584327, \"f2\": 0.7519500780031201, \"f0_5\": 0.9043151969981238, \"p4\": 0.7986563533248476, \"phi\": 0.6471673893914208}, {\"truth_threshold\": -17.50000026077032, \"match_probability\": 5.394766530610173e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1108.0, \"fp\": 37.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9676855895196507, \"fp_rate\": 0.032314410480349345, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9750505731625084, \"recall\": 0.7119645494830132, \"specificity\": 0.9676855895196507, \"npv\": 0.6544595392793857, \"accuracy\": 0.8041561712846348, \"f1\": 0.8229937393284007, \"f2\": 0.7525762464869367, \"f0_5\": 0.9079492653522542, \"p4\": 0.8013584652743565, \"phi\": 0.6540998665530485}, {\"truth_threshold\": -16.900000251829624, \"match_probability\": 8.176914304005986e-06, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1110.0, \"fp\": 35.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9694323144104804, \"fp_rate\": 0.03056768558951965, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9763673193787981, \"recall\": 0.7119645494830132, \"specificity\": 0.9694323144104804, \"npv\": 0.6548672566371682, \"accuracy\": 0.8047858942065491, \"f1\": 0.8234624145785877, \"f2\": 0.7527329515877147, \"f0_5\": 0.9088623507228158, \"p4\": 0.8020327397013851, \"phi\": 0.6558363061606292}, {\"truth_threshold\": -16.50000024586916, \"match_probability\": 1.0789474965962542e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1111.0, \"fp\": 34.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9703056768558952, \"fp_rate\": 0.029694323144104803, \"fn_rate\": 0.2880354505169867, \"precision\": 0.977027027027027, \"recall\": 0.7119645494830132, \"specificity\": 0.9703056768558952, \"npv\": 0.6550707547169812, \"accuracy\": 0.8051007556675063, \"f1\": 0.8236969524352037, \"f2\": 0.7528113286130779, \"f0_5\": 0.90931958244246, \"p4\": 0.8023696912661274, \"phi\": 0.6567050301458078}, {\"truth_threshold\": -15.800000235438347, \"match_probability\": 1.7527435818536736e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1120.0, \"fp\": 25.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9781659388646288, \"fp_rate\": 0.021834061135371178, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9830047586675731, \"recall\": 0.7119645494830132, \"specificity\": 0.9781659388646288, \"npv\": 0.656891495601173, \"accuracy\": 0.8079345088161209, \"f1\": 0.8258138206739006, \"f2\": 0.7535174570088587, \"f0_5\": 0.9134554643082754, \"p4\": 0.8053967630362511, \"phi\": 0.6645388735433893}, {\"truth_threshold\": -15.400000229477882, \"match_probability\": 2.312746079632102e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1121.0, \"fp\": 24.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9790393013100437, \"fp_rate\": 0.02096069868995633, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9836734693877551, \"recall\": 0.7119645494830132, \"specificity\": 0.9790393013100437, \"npv\": 0.6570926143024619, \"accuracy\": 0.8082493702770781, \"f1\": 0.8260497000856898, \"f2\": 0.7535959974984365, \"f0_5\": 0.9139173302995829, \"p4\": 0.8057325018854461, \"phi\": 0.6654110243207023}, {\"truth_threshold\": -15.20000022649765, \"match_probability\": 2.6566384864664307e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1446.0, \"tn\": 1126.0, \"fp\": 19.0, \"fn\": 585.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7119645494830132, \"tn_rate\": 0.9834061135371179, \"fp_rate\": 0.016593886462882096, \"fn_rate\": 0.2880354505169867, \"precision\": 0.9870307167235495, \"recall\": 0.7119645494830132, \"specificity\": 0.9834061135371179, \"npv\": 0.6580946814728229, \"accuracy\": 0.809823677581864, \"f1\": 0.8272311212814645, \"f2\": 0.75398894566691, \"f0_5\": 0.9162336839437334, \"p4\": 0.8074094203617235, \"phi\": 0.6697770344487317}, {\"truth_threshold\": -14.300000213086605, \"match_probability\": 4.957348695121048e-05, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1126.0, \"fp\": 19.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9834061135371179, \"fp_rate\": 0.016593886462882096, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9870218579234973, \"recall\": 0.7114721811915312, \"specificity\": 0.9834061135371179, \"npv\": 0.6577102803738317, \"accuracy\": 0.8095088161209067, \"f1\": 0.82689556509299, \"f2\": 0.7535460992907801, \"f0_5\": 0.9160644097882592, \"p4\": 0.8071048961802441, \"phi\": 0.6693357668739984}, {\"truth_threshold\": -13.200000196695328, \"match_probability\": 0.00010625707305470121, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1135.0, \"fp\": 10.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9912663755458515, \"fp_rate\": 0.008733624454148471, \"fn_rate\": 0.28852781880846873, \"precision\": 0.993127147766323, \"recall\": 0.7114721811915312, \"specificity\": 0.9912663755458515, \"npv\": 0.6595002905287624, \"accuracy\": 0.8123425692695214, \"f1\": 0.8290304073436604, \"f2\": 0.7542540975049588, \"f0_5\": 0.9202649344032607, \"p4\": 0.8101156090778194, \"phi\": 0.6772196571827369}, {\"truth_threshold\": -12.800000190734863, \"match_probability\": 0.00014020228918616167, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1137.0, \"fp\": 8.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9930131004366812, \"fp_rate\": 0.0069868995633187774, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9944941500344116, \"recall\": 0.7114721811915312, \"specificity\": 0.9930131004366812, \"npv\": 0.6598955310504934, \"accuracy\": 0.8129722921914357, \"f1\": 0.8295063145809415, \"f2\": 0.7544116111517176, \"f0_5\": 0.9212036210633686, \"p4\": 0.8107834022242488, \"phi\": 0.6789756245799222}, {\"truth_threshold\": -12.400000184774399, \"match_probability\": 0.00018498974370122882, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1138.0, \"fp\": 7.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.993886462882096, \"fp_rate\": 0.00611353711790393, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9951790633608816, \"recall\": 0.7114721811915312, \"specificity\": 0.993886462882096, \"npv\": 0.660092807424594, \"accuracy\": 0.8132871536523929, \"f1\": 0.8297444731553258, \"f2\": 0.7544903926482874, \"f0_5\": 0.9216736828677127, \"p4\": 0.8111171302195401, \"phi\": 0.6798541595642643}, {\"truth_threshold\": -11.700000174343586, \"match_probability\": 0.0003004820136373637, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1139.0, \"fp\": 6.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.994759825327511, \"fp_rate\": 0.005240174672489083, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9958649207443143, \"recall\": 0.7114721811915312, \"specificity\": 0.994759825327511, \"npv\": 0.6602898550724637, \"accuracy\": 0.8136020151133502, \"f1\": 0.8299827685238369, \"f2\": 0.7545691906005222, \"f0_5\": 0.9221442246330568, \"p4\": 0.8114507463687338, \"phi\": 0.680733063624895}, {\"truth_threshold\": -11.300000168383121, \"match_probability\": 0.00039645033391533577, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1142.0, \"fp\": 3.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9973799126637555, \"fp_rate\": 0.0026200873362445414, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9979281767955801, \"recall\": 0.7114721811915312, \"specificity\": 0.9973799126637555, \"npv\": 0.6608796296296297, \"accuracy\": 0.8145465994962217, \"f1\": 0.8306984765737281, \"f2\": 0.7548056832427915, \"f0_5\": 0.9235587370573949, \"p4\": 0.81245092776752, \"phi\": 0.683372001937977}, {\"truth_threshold\": -10.900000162422657, \"match_probability\": 0.0005230530993675534, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1445.0, \"tn\": 1143.0, \"fp\": 2.0, \"fn\": 586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7114721811915312, \"tn_rate\": 0.9982532751091703, \"fp_rate\": 0.0017467248908296944, \"fn_rate\": 0.28852781880846873, \"precision\": 0.9986178299930891, \"recall\": 0.7114721811915312, \"specificity\": 0.9982532751091703, \"npv\": 0.6610757663389243, \"accuracy\": 0.8148614609571788, \"f1\": 0.8309373202990225, \"f2\": 0.7548845470692718, \"f0_5\": 0.9240312060365775, \"p4\": 0.8127841005555416, \"phi\": 0.6842523939858662}, {\"truth_threshold\": -10.700000159442425, \"match_probability\": 0.0006007835088396779, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1444.0, \"tn\": 1143.0, \"fp\": 2.0, \"fn\": 587.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7109798129000492, \"tn_rate\": 0.9982532751091703, \"fp_rate\": 0.0017467248908296944, \"fn_rate\": 0.28902018709995075, \"precision\": 0.9986168741355463, \"recall\": 0.7109798129000492, \"specificity\": 0.9982532751091703, \"npv\": 0.6606936416184971, \"accuracy\": 0.8145465994962217, \"f1\": 0.8306010928961749, \"f2\": 0.754440961337513, \"f0_5\": 0.9238643634037108, \"p4\": 0.8124788095466353, \"phi\": 0.6838163737767555}, {\"truth_threshold\": -10.200000151991844, \"match_probability\": 0.0008494248089972806, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1443.0, \"tn\": 1143.0, \"fp\": 2.0, \"fn\": 588.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7104874446085672, \"tn_rate\": 0.9982532751091703, \"fp_rate\": 0.0017467248908296944, \"fn_rate\": 0.2895125553914328, \"precision\": 0.9986159169550173, \"recall\": 0.7104874446085672, \"specificity\": 0.9982532751091703, \"npv\": 0.6603119584055459, \"accuracy\": 0.8142317380352645, \"f1\": 0.830264672036824, \"f2\": 0.7539972828926743, \"f0_5\": 0.9236973498911791, \"p4\": 0.8121735251016357, \"phi\": 0.6833805796370901}, {\"truth_threshold\": -9.100000135600567, \"match_probability\": 0.0018190150448253225, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1443.0, \"tn\": 1144.0, \"fp\": 1.0, \"fn\": 588.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7104874446085672, \"tn_rate\": 0.9991266375545852, \"fp_rate\": 0.0008733624454148472, \"fn_rate\": 0.2895125553914328, \"precision\": 0.9993074792243767, \"recall\": 0.7104874446085672, \"specificity\": 0.9991266375545852, \"npv\": 0.6605080831408776, \"accuracy\": 0.8145465994962217, \"f1\": 0.8305035971223022, \"f2\": 0.7540760869565217, \"f0_5\": 0.9241706161137441, \"p4\": 0.8125064984715595, \"phi\": 0.6842619488798015}, {\"truth_threshold\": -8.600000128149986, \"match_probability\": 0.0025705389597152823, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1440.0, \"tn\": 1144.0, \"fp\": 1.0, \"fn\": 591.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7090103397341211, \"tn_rate\": 0.9991266375545852, \"fp_rate\": 0.0008733624454148472, \"fn_rate\": 0.29098966026587886, \"precision\": 0.9993060374739764, \"recall\": 0.7090103397341211, \"specificity\": 0.9991266375545852, \"npv\": 0.6593659942363113, \"accuracy\": 0.8136020151133502, \"f1\": 0.8294930875576036, \"f2\": 0.7527443805541035, \"f0_5\": 0.9236690186016677, \"p4\": 0.811590547208778, \"phi\": 0.6829568226176045}, {\"truth_threshold\": -8.400000125169754, \"match_probability\": 0.0029516456585356845, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1439.0, \"tn\": 1144.0, \"fp\": 1.0, \"fn\": 592.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7085179714426391, \"tn_rate\": 0.9991266375545852, \"fp_rate\": 0.0008733624454148472, \"fn_rate\": 0.2914820285573609, \"precision\": 0.9993055555555556, \"recall\": 0.7085179714426391, \"specificity\": 0.9991266375545852, \"npv\": 0.6589861751152074, \"accuracy\": 0.8132871536523929, \"f1\": 0.829155862863728, \"f2\": 0.7523002927645337, \"f0_5\": 0.923501476062123, \"p4\": 0.8112852415850365, \"phi\": 0.6825222299358593}, {\"truth_threshold\": -7.600000113248825, \"match_probability\": 0.0051278965144870335, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1439.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 592.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7085179714426391, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2914820285573609, \"precision\": 1.0, \"recall\": 0.7085179714426391, \"specificity\": 1.0, \"npv\": 0.6591824985607369, \"accuracy\": 0.8136020151133502, \"f1\": 0.8293948126801153, \"f2\": 0.7523789605772248, \"f0_5\": 0.9239758571978939, \"p4\": 0.8116179257342173, \"phi\": 0.6834051848579609}, {\"truth_threshold\": -7.400000110268593, \"match_probability\": 0.005885918232687788, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1437.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 594.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.707533234859675, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.29246676514032494, \"precision\": 1.0, \"recall\": 0.707533234859675, \"specificity\": 1.0, \"npv\": 0.6584243818286372, \"accuracy\": 0.8129722921914357, \"f1\": 0.828719723183391, \"f2\": 0.7514904298713524, \"f0_5\": 0.9236405707674509, \"p4\": 0.811007239776182, \"phi\": 0.6825372757481436}, {\"truth_threshold\": -6.800000101327896, \"match_probability\": 0.00889438522932807, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1436.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 595.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.707040866568193, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.292959133431807, \"precision\": 1.0, \"recall\": 0.707040866568193, \"specificity\": 1.0, \"npv\": 0.6580459770114943, \"accuracy\": 0.8126574307304786, \"f1\": 0.8283818863570811, \"f2\": 0.7510460251046025, \"f0_5\": 0.9234726688102894, \"p4\": 0.8107019041426428, \"phi\": 0.6821036562194343}, {\"truth_threshold\": -6.70000009983778, \"match_probability\": 0.009526684411466419, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1428.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 603.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7031019202363368, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2968980797636632, \"precision\": 1.0, \"recall\": 0.7031019202363368, \"specificity\": 1.0, \"npv\": 0.6550343249427918, \"accuracy\": 0.8101385390428212, \"f1\": 0.8256721595836947, \"f2\": 0.7474874371859297, \"f0_5\": 0.9221232080588919, \"p4\": 0.8082593661032169, \"phi\": 0.6786426833673147}, {\"truth_threshold\": -6.400000095367432, \"match_probability\": 0.011702953955477532, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1424.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 607.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7011324470704087, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.29886755292959133, \"precision\": 1.0, \"recall\": 0.7011324470704087, \"specificity\": 1.0, \"npv\": 0.6535388127853882, \"accuracy\": 0.8088790931989924, \"f1\": 0.8243125904486251, \"f2\": 0.7457059069962296, \"f0_5\": 0.9214442862689272, \"p4\": 0.8070381719383619, \"phi\": 0.6769174743376838}, {\"truth_threshold\": -6.3000000938773155, \"match_probability\": 0.012532388771145032, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1423.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 608.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7006400787789266, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.29935992122107336, \"precision\": 1.0, \"recall\": 0.7006400787789266, \"specificity\": 1.0, \"npv\": 0.6531660011409013, \"accuracy\": 0.8085642317380353, \"f1\": 0.8239722061378112, \"f2\": 0.74526029119095, \"f0_5\": 0.9212741162760585, \"p4\": 0.8067328787708493, \"phi\": 0.6764867171608602}, {\"truth_threshold\": -5.700000084936619, \"match_probability\": 0.01887356650421064, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1422.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 609.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.7001477104874446, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.2998522895125554, \"precision\": 1.0, \"recall\": 0.7001477104874446, \"specificity\": 1.0, \"npv\": 0.6527936145952109, \"accuracy\": 0.8082493702770781, \"f1\": 0.8236316246741964, \"f2\": 0.7448145820238844, \"f0_5\": 0.9211037699183832, \"p4\": 0.8064275873033679, \"phi\": 0.676056177162564}, {\"truth_threshold\": -5.600000083446503, \"match_probability\": 0.02020082327925431, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1405.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 626.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6917774495322502, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3082225504677499, \"precision\": 1.0, \"recall\": 0.6917774495322502, \"specificity\": 1.0, \"npv\": 0.6465273856578204, \"accuracy\": 0.802896725440806, \"f1\": 0.8178114086146682, \"f2\": 0.737223213348725, \"f0_5\": 0.9181806299830088, \"p4\": 0.8012376730750075, \"phi\": 0.6687698153349331}, {\"truth_threshold\": -5.300000078976154, \"match_probability\": 0.024754544222716376, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1393.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 638.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6858690300344658, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.31413096996553425, \"precision\": 1.0, \"recall\": 0.6858690300344658, \"specificity\": 1.0, \"npv\": 0.6421761076836792, \"accuracy\": 0.7991183879093199, \"f1\": 0.8136682242990654, \"f2\": 0.7318482715141326, \"f0_5\": 0.9160857556227805, \"p4\": 0.7975738525329583, \"phi\": 0.6636630953189379}, {\"truth_threshold\": -4.90000007301569, \"match_probability\": 0.032407497325934585, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1391.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 640.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6848842934515017, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3151157065484983, \"precision\": 1.0, \"recall\": 0.6848842934515017, \"specificity\": 1.0, \"npv\": 0.6414565826330533, \"accuracy\": 0.7984886649874056, \"f1\": 0.8129748684979544, \"f2\": 0.7309511297950604, \"f0_5\": 0.9157340355497038, \"p4\": 0.7969631540364932, \"phi\": 0.6628148598035907}, {\"truth_threshold\": -4.800000071525574, \"match_probability\": 0.03465289308554322, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1390.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 641.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6843919251600197, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3156080748399803, \"precision\": 1.0, \"recall\": 0.6843919251600197, \"specificity\": 1.0, \"npv\": 0.641097424412094, \"accuracy\": 0.7981738035264484, \"f1\": 0.8126278865828706, \"f2\": 0.7305024174900148, \"f0_5\": 0.9155578975102094, \"p4\": 0.7966577964206586, \"phi\": 0.6623910480286727}, {\"truth_threshold\": -4.500000067055225, \"match_probability\": 0.04232371044088178, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1389.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 642.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6838995568685377, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.31610044313146235, \"precision\": 1.0, \"recall\": 0.6838995568685377, \"specificity\": 1.0, \"npv\": 0.6407386681589256, \"accuracy\": 0.7978589420654912, \"f1\": 0.8122807017543859, \"f2\": 0.7300536108483129, \"f0_5\": 0.9153815737445631, \"p4\": 0.7963524329131265, \"phi\": 0.6619674396995868}, {\"truth_threshold\": -4.300000064074993, \"match_probability\": 0.048313119674570026, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1388.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 643.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6834071885770556, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3165928114229444, \"precision\": 1.0, \"recall\": 0.6834071885770556, \"specificity\": 1.0, \"npv\": 0.6403803131991052, \"accuracy\": 0.797544080604534, \"f1\": 0.8119333138344546, \"f2\": 0.7296047098402019, \"f0_5\": 0.9152050639588554, \"p4\": 0.7960470632785187, \"phi\": 0.6615440344100268}, {\"truth_threshold\": -4.200000062584877, \"match_probability\": 0.05160178526561565, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1382.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 649.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6804529788281635, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.31954702117183653, \"precision\": 1.0, \"recall\": 0.6804529788281635, \"specificity\": 1.0, \"npv\": 0.6382385730211817, \"accuracy\": 0.7956549118387909, \"f1\": 0.8098447113975974, \"f2\": 0.7269093204292026, \"f0_5\": 0.9141420822860167, \"p4\": 0.7942147035798486, \"phi\": 0.6590078438192518}, {\"truth_threshold\": -3.8000000566244125, \"match_probability\": 0.06698457743861425, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1379.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 652.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6789758739537174, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3210241260462826, \"precision\": 1.0, \"recall\": 0.6789758739537174, \"specificity\": 1.0, \"npv\": 0.6371730662214803, \"accuracy\": 0.7947103274559194, \"f1\": 0.8087976539589443, \"f2\": 0.7255603493633589, \"f0_5\": 0.9136080561812641, \"p4\": 0.7932984218488349, \"phi\": 0.6577424568153551}, {\"truth_threshold\": -3.6000000536441803, \"match_probability\": 0.0761862214703254, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1377.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 654.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6779911373707533, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.32200886262924666, \"precision\": 1.0, \"recall\": 0.6779911373707533, \"specificity\": 1.0, \"npv\": 0.6364647026125625, \"accuracy\": 0.7940806045340051, \"f1\": 0.8080985915492958, \"f2\": 0.7246605620461004, \"f0_5\": 0.9132510943095902, \"p4\": 0.7926875252637993, \"phi\": 0.6568998611817706}, {\"truth_threshold\": -3.300000049173832, \"match_probability\": 0.09217307161544283, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1375.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 656.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6770064007877893, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3229935992122107, \"precision\": 1.0, \"recall\": 0.6770064007877893, \"specificity\": 1.0, \"npv\": 0.6357579122709606, \"accuracy\": 0.7934508816120907, \"f1\": 0.8073987081620669, \"f2\": 0.7237603958311402, \"f0_5\": 0.9128933740539105, \"p4\": 0.7920765927688658, \"phi\": 0.6560580583751121}, {\"truth_threshold\": -3.200000047683716, \"match_probability\": 0.09813940308831819, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1374.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 657.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6765140324963073, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.32348596750369274, \"precision\": 1.0, \"recall\": 0.6765140324963073, \"specificity\": 1.0, \"npv\": 0.6354051054384018, \"accuracy\": 0.7931360201511335, \"f1\": 0.8070484581497798, \"f2\": 0.7233101705622236, \"f0_5\": 0.912714228776405, \"p4\": 0.7917711124557005, \"phi\": 0.6556374532841107}, {\"truth_threshold\": -3.1000000461935997, \"match_probability\": 0.10444750015659417, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1370.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 661.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6745445593303792, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3254554406696209, \"precision\": 1.0, \"recall\": 0.6745445593303792, \"specificity\": 1.0, \"npv\": 0.6339977851605758, \"accuracy\": 0.7918765743073047, \"f1\": 0.8056453984122317, \"f2\": 0.7215083210448704, \"f0_5\": 0.9119957395819465, \"p4\": 0.7905490918185237, \"phi\": 0.6539569990508374}, {\"truth_threshold\": -2.7000000402331352, \"match_probability\": 0.13336855415354743, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1363.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 668.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.671097981290005, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3289020187099951, \"precision\": 1.0, \"recall\": 0.671097981290005, \"specificity\": 1.0, \"npv\": 0.631549917264203, \"accuracy\": 0.7896725440806045, \"f1\": 0.803182086034178, \"f2\": 0.7183514282702645, \"f0_5\": 0.9107309902445543, \"p4\": 0.7884101358393227, \"phi\": 0.6510237127477586}, {\"truth_threshold\": -2.600000038743019, \"match_probability\": 0.1415855743659812, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1356.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 675.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6676514032496307, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.33234859675036926, \"precision\": 1.0, \"recall\": 0.6676514032496307, \"specificity\": 1.0, \"npv\": 0.6291208791208791, \"accuracy\": 0.7874685138539043, \"f1\": 0.8007085916740478, \"f2\": 0.7151898734177216, \"f0_5\": 0.9094567404426559, \"p4\": 0.7862705739141664, \"phi\": 0.6480998671182523}, {\"truth_threshold\": -2.500000037252903, \"match_probability\": 0.15022110152606716, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1355.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 676.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6671590349581487, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3328409650418513, \"precision\": 1.0, \"recall\": 0.6671590349581487, \"specificity\": 1.0, \"npv\": 0.628775398132894, \"accuracy\": 0.7871536523929471, \"f1\": 0.8003544004725339, \"f2\": 0.7147378415444667, \"f0_5\": 0.9092739229633606, \"p4\": 0.7859648678428045, \"phi\": 0.6476829377278417}, {\"truth_threshold\": -2.400000035762787, \"match_probability\": 0.1592855907727143, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1351.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 680.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6651895617922206, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3348104382077794, \"precision\": 1.0, \"recall\": 0.6651895617922206, \"specificity\": 1.0, \"npv\": 0.6273972602739726, \"accuracy\": 0.7858942065491183, \"f1\": 0.7989355410999409, \"f2\": 0.7129287598944591, \"f0_5\": 0.9085406859448554, \"p4\": 0.7847418977635621, \"phi\": 0.6460171117170842}, {\"truth_threshold\": -2.3000000342726707, \"match_probability\": 0.16878839957195682, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1346.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 685.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6627277203348104, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3372722796651896, \"precision\": 1.0, \"recall\": 0.6627277203348104, \"specificity\": 1.0, \"npv\": 0.6256830601092896, \"accuracy\": 0.7843198992443325, \"f1\": 0.7971572401539828, \"f2\": 0.7106652587117213, \"f0_5\": 0.9076196898179366, \"p4\": 0.7832128384966869, \"phi\": 0.6439390561833764}, {\"truth_threshold\": -2.2000000327825546, \"match_probability\": 0.1787376058900962, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1344.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 687.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6617429837518464, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.33825701624815363, \"precision\": 1.0, \"recall\": 0.6617429837518464, \"specificity\": 1.0, \"npv\": 0.625, \"accuracy\": 0.7836901763224181, \"f1\": 0.7964444444444444, \"f2\": 0.7097591888466414, \"f0_5\": 0.907249898744431, \"p4\": 0.7826011005327976, \"phi\": 0.6431091391396206}, {\"truth_threshold\": -2.1000000312924385, \"match_probability\": 0.18913982061899084, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1340.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 691.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6597735105859183, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34022648941408173, \"precision\": 1.0, \"recall\": 0.6597735105859183, \"specificity\": 1.0, \"npv\": 0.6236383442265795, \"accuracy\": 0.7824307304785895, \"f1\": 0.7950163156333432, \"f2\": 0.7079459002535926, \"f0_5\": 0.9065079150317954, \"p4\": 0.7813774176935412, \"phi\": 0.6414515256091918}, {\"truth_threshold\": -2.0000000298023224, \"match_probability\": 0.19999999669481672, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1338.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 693.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6587887740029542, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3412112259970458, \"precision\": 1.0, \"recall\": 0.6587887740029542, \"specificity\": 1.0, \"npv\": 0.6229597388465724, \"accuracy\": 0.781801007556675, \"f1\": 0.7943009795191451, \"f2\": 0.7070386810399493, \"f0_5\": 0.9061357171881349, \"p4\": 0.7807654687830268, \"phi\": 0.6406238230099892}, {\"truth_threshold\": -1.700000025331974, \"match_probability\": 0.2353489599091234, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1335.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 696.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6573116691285081, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34268833087149186, \"precision\": 1.0, \"recall\": 0.6573116691285081, \"specificity\": 1.0, \"npv\": 0.6219445953286258, \"accuracy\": 0.7808564231738035, \"f1\": 0.7932263814616756, \"f2\": 0.7056771328893118, \"f0_5\": 0.9055759055759056, \"p4\": 0.7798474053553527, \"phi\": 0.6393836407517114}, {\"truth_threshold\": -1.600000023841858, \"match_probability\": 0.24805074388621665, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1333.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 698.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6563269325455441, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3436730674544559, \"precision\": 1.0, \"recall\": 0.6563269325455441, \"specificity\": 1.0, \"npv\": 0.6212696690179056, \"accuracy\": 0.7802267002518891, \"f1\": 0.7925089179548157, \"f2\": 0.7047689542138099, \"f0_5\": 0.9052016840961564, \"p4\": 0.7792352667284765, \"phi\": 0.6385577625791636}, {\"truth_threshold\": -1.4000000208616257, \"match_probability\": 0.2747995717943022, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1332.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 699.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6558345642540621, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34416543574593794, \"precision\": 1.0, \"recall\": 0.6558345642540621, \"specificity\": 1.0, \"npv\": 0.6209327548806941, \"accuracy\": 0.779911838790932, \"f1\": 0.792149866190901, \"f2\": 0.7043147208121827, \"f0_5\": 0.9050142682429678, \"p4\": 0.778929167747172, \"phi\": 0.6381450953570468}, {\"truth_threshold\": -1.3000000193715096, \"match_probability\": 0.2888262766358852, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1327.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 704.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6533727227966519, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3466272772033481, \"precision\": 1.0, \"recall\": 0.6533727227966519, \"specificity\": 1.0, \"npv\": 0.6192536506219578, \"accuracy\": 0.7783375314861462, \"f1\": 0.7903513996426444, \"f2\": 0.7020421119458259, \"f0_5\": 0.904074124540128, \"p4\": 0.7773983659139201, \"phi\": 0.6360844627945531}, {\"truth_threshold\": -1.2000000178813934, \"match_probability\": 0.3032695424040186, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1322.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 709.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6509108813392418, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.34908911866075826, \"precision\": 1.0, \"recall\": 0.6509108813392418, \"specificity\": 1.0, \"npv\": 0.6175836030204962, \"accuracy\": 0.7767632241813602, \"f1\": 0.7885475693408888, \"f2\": 0.6997670971839932, \"f0_5\": 0.9031288427380789, \"p4\": 0.775867028931697, \"phi\": 0.6340283016890773}, {\"truth_threshold\": -1.1000000163912773, \"match_probability\": 0.318111997717226, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1320.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 711.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6499261447562777, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3500738552437223, \"precision\": 1.0, \"recall\": 0.6499261447562777, \"specificity\": 1.0, \"npv\": 0.6169181034482759, \"accuracy\": 0.7761335012594458, \"f1\": 0.7878245299910475, \"f2\": 0.6988564167725541, \"f0_5\": 0.9027492819039803, \"p4\": 0.7752543370502095, \"phi\": 0.6332070787700438}, {\"truth_threshold\": -1.0000000149011612, \"match_probability\": 0.33333333103806717, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1319.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 712.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6494337764647957, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.35056622353520434, \"precision\": 1.0, \"recall\": 0.6494337764647957, \"specificity\": 1.0, \"npv\": 0.6165858912224017, \"accuracy\": 0.7758186397984886, \"f1\": 0.7874626865671642, \"f2\": 0.6984009319072328, \"f0_5\": 0.9025591898179828, \"p4\": 0.7749479564070448, \"phi\": 0.6327967318590355}, {\"truth_threshold\": -0.9000000134110451, \"match_probability\": 0.34891031813411577, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1316.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 715.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6479566715903495, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3520433284096504, \"precision\": 1.0, \"recall\": 0.6479566715903495, \"specificity\": 1.0, \"npv\": 0.6155913978494624, \"accuracy\": 0.7748740554156172, \"f1\": 0.7863758589781894, \"f2\": 0.6970338983050848, \"f0_5\": 0.9019876627827279, \"p4\": 0.774028672532505, \"phi\": 0.6315667448577293}, {\"truth_threshold\": -0.7000000104308128, \"match_probability\": 0.38102425962470177, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1314.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 717.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6469719350073855, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.35302806499261447, \"precision\": 1.0, \"recall\": 0.6469719350073855, \"specificity\": 1.0, \"npv\": 0.6149301825993555, \"accuracy\": 0.7742443324937027, \"f1\": 0.7856502242152467, \"f2\": 0.6961220597584233, \"f0_5\": 0.901605599011939, \"p4\": 0.7734156957074743, \"phi\": 0.6307476279232052}, {\"truth_threshold\": -0.6000000089406967, \"match_probability\": 0.3975010577814427, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1309.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 722.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6445100935499753, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3554899064500246, \"precision\": 1.0, \"recall\": 0.6445100935499753, \"specificity\": 1.0, \"npv\": 0.6132833422603107, \"accuracy\": 0.7726700251889169, \"f1\": 0.7838323353293413, \"f2\": 0.6938407717587194, \"f0_5\": 0.9006467593229668, \"p4\": 0.7718828151071816, \"phi\": 0.6287028744111437}, {\"truth_threshold\": -0.5000000074505806, \"match_probability\": 0.41421356112001384, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1306.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 725.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6430329886755293, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.35696701132447073, \"precision\": 1.0, \"recall\": 0.6430329886755293, \"specificity\": 1.0, \"npv\": 0.6122994652406417, \"accuracy\": 0.7717254408060453, \"f1\": 0.7827389871141744, \"f2\": 0.6924708377518558, \"f0_5\": 0.9000689179875948, \"p4\": 0.7709627754494935, \"phi\": 0.627478091329186}, {\"truth_threshold\": -0.4000000059604645, \"match_probability\": 0.4311259267559445, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1300.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 731.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6400787789266371, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3599212210733629, \"precision\": 1.0, \"recall\": 0.6400787789266371, \"specificity\": 1.0, \"npv\": 0.6103411513859275, \"accuracy\": 0.7698362720403022, \"f1\": 0.7805463824677275, \"f2\": 0.6897283531409168, \"f0_5\": 0.8989074816761167, \"p4\": 0.7691219621523272, \"phi\": 0.6250331342479231}, {\"truth_threshold\": -0.30000000447034836, \"match_probability\": 0.4482004805735527, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1297.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 734.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.638601674052191, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.36139832594780896, \"precision\": 1.0, \"recall\": 0.638601674052191, \"specificity\": 1.0, \"npv\": 0.6093666844065992, \"accuracy\": 0.7688916876574308, \"f1\": 0.7794471153846154, \"f2\": 0.6883558008703959, \"f0_5\": 0.8983238675716858, \"p4\": 0.7682011740290052, \"phi\": 0.6238129405308033}, {\"truth_threshold\": -0.20000000298023224, \"match_probability\": 0.4653980381052749, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1284.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 747.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6322008862629247, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.36779911373707536, \"precision\": 1.0, \"recall\": 0.6322008862629247, \"specificity\": 1.0, \"npv\": 0.6051797040169133, \"accuracy\": 0.7647984886649875, \"f1\": 0.7746606334841629, \"f2\": 0.6823979591836735, \"f0_5\": 0.895772289660946, \"p4\": 0.7642079467115986, \"phi\": 0.6185427594175095}, {\"truth_threshold\": 0.0, \"match_probability\": 0.5, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1283.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 748.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6317085179714427, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3682914820285574, \"precision\": 1.0, \"recall\": 0.6317085179714427, \"specificity\": 1.0, \"npv\": 0.6048600105652404, \"accuracy\": 0.7644836272040302, \"f1\": 0.7742908871454436, \"f2\": 0.6819389816094398, \"f0_5\": 0.8955744799664945, \"p4\": 0.7639005528137026, \"phi\": 0.6181385126768588}, {\"truth_threshold\": 0.10000000149011612, \"match_probability\": 0.5173217450900928, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1279.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 752.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6297390448055146, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3702609551944855, \"precision\": 1.0, \"recall\": 0.6297390448055146, \"specificity\": 1.0, \"npv\": 0.6035846072746441, \"accuracy\": 0.7632241813602015, \"f1\": 0.7728096676737161, \"f2\": 0.6801020950760396, \"f0_5\": 0.8947810270043375, \"p4\": 0.7626706427097226, \"phi\": 0.6165231496419628}, {\"truth_threshold\": 0.20000000298023224, \"match_probability\": 0.5346019618947252, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1272.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 759.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6262924667651403, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.37370753323485967, \"precision\": 1.0, \"recall\": 0.6262924667651403, \"specificity\": 1.0, \"npv\": 0.6013655462184874, \"accuracy\": 0.7610201511335013, \"f1\": 0.7702089009990918, \"f2\": 0.6768837803320562, \"f0_5\": 0.8933839022334598, \"p4\": 0.7605169691954441, \"phi\": 0.6137024615957984}, {\"truth_threshold\": 0.30000000447034836, \"match_probability\": 0.5517995194264473, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1271.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 760.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6258000984736583, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3741999015263417, \"precision\": 1.0, \"recall\": 0.6258000984736583, \"specificity\": 1.0, \"npv\": 0.6010498687664042, \"accuracy\": 0.760705289672544, \"f1\": 0.7698364627498486, \"f2\": 0.6764236295902075, \"f0_5\": 0.893183415319747, \"p4\": 0.7602091587940459, \"phi\": 0.6133001443515198}, {\"truth_threshold\": 0.5000000074505806, \"match_probability\": 0.5857864388799862, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1268.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 763.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6243229935992122, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.37567700640078777, \"precision\": 1.0, \"recall\": 0.6243229935992122, \"specificity\": 1.0, \"npv\": 0.600104821802935, \"accuracy\": 0.7597607052896725, \"f1\": 0.7687177932706881, \"f2\": 0.6750425894378195, \"f0_5\": 0.892580599746586, \"p4\": 0.7592855072439135, \"phi\": 0.6120941421230318}, {\"truth_threshold\": 0.6000000089406967, \"match_probability\": 0.6024989422185573, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1266.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 765.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6233382570162481, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3766617429837518, \"precision\": 1.0, \"recall\": 0.6233382570162481, \"specificity\": 1.0, \"npv\": 0.599476439790576, \"accuracy\": 0.7591309823677582, \"f1\": 0.7679708826205641, \"f2\": 0.6741214057507987, \"f0_5\": 0.8921775898520085, \"p4\": 0.7586695530830421, \"phi\": 0.6112909283650163}, {\"truth_threshold\": 0.7000000104308128, \"match_probability\": 0.6189757403752982, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1255.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 776.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6179222058099458, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.38207779419005417, \"precision\": 1.0, \"recall\": 0.6179222058099458, \"specificity\": 1.0, \"npv\": 0.5960437272254034, \"accuracy\": 0.7556675062972292, \"f1\": 0.7638466220328667, \"f2\": 0.6690478729075594, \"f0_5\": 0.8899446886966388, \"p4\": 0.7552790297360157, \"phi\": 0.6068843832941353}, {\"truth_threshold\": 0.800000011920929, \"match_probability\": 0.6351831076021942, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1249.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 782.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6149679960610537, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3850320039389463, \"precision\": 1.0, \"recall\": 0.6149679960610537, \"specificity\": 1.0, \"npv\": 0.5941878567721848, \"accuracy\": 0.7537783375314862, \"f1\": 0.7615853658536585, \"f2\": 0.6662754721007148, \"f0_5\": 0.8887149565959869, \"p4\": 0.7534275803790252, \"phi\": 0.6044886397303119}, {\"truth_threshold\": 0.9000000134110451, \"match_probability\": 0.6510896818658842, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1246.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 785.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6134908911866076, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3865091088133924, \"precision\": 1.0, \"recall\": 0.6134908911866076, \"specificity\": 1.0, \"npv\": 0.5932642487046632, \"accuracy\": 0.7528337531486146, \"f1\": 0.7604516325907843, \"f2\": 0.6648879402347919, \"f0_5\": 0.8880969351389879, \"p4\": 0.7525012807216611, \"phi\": 0.6032928083832734}, {\"truth_threshold\": 1.0000000149011612, \"match_probability\": 0.6666666689619328, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1241.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 790.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6110290497291975, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3889709502708026, \"precision\": 1.0, \"recall\": 0.6110290497291975, \"specificity\": 1.0, \"npv\": 0.5917312661498708, \"accuracy\": 0.7512594458438288, \"f1\": 0.758557457212714, \"f2\": 0.6625734116390817, \"f0_5\": 0.8870621872766261, \"p4\": 0.7509565686139796, \"phi\": 0.6013027467512604}, {\"truth_threshold\": 1.1000000163912773, \"match_probability\": 0.681888002282774, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1238.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 793.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6095519448547514, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.39044805514524866, \"precision\": 1.0, \"recall\": 0.6095519448547514, \"specificity\": 1.0, \"npv\": 0.5908152734778122, \"accuracy\": 0.7503148614609572, \"f1\": 0.757418170694402, \"f2\": 0.6611835077974791, \"f0_5\": 0.8864384934841758, \"p4\": 0.7500292006663175, \"phi\": 0.6001104889920623}, {\"truth_threshold\": 1.2000000178813934, \"match_probability\": 0.6967304575959814, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1232.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 799.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.6065977351058592, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.3934022648941408, \"precision\": 1.0, \"recall\": 0.6065977351058592, \"specificity\": 1.0, \"npv\": 0.5889917695473251, \"accuracy\": 0.7484256926952141, \"f1\": 0.7551333129022372, \"f2\": 0.6584010260795211, \"f0_5\": 0.8851846529673804, \"p4\": 0.748173210499427, \"phi\": 0.5977299335012423}, {\"truth_threshold\": 1.3000000193715096, \"match_probability\": 0.7111737233641148, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1217.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 814.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5992122107336287, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.40078778926637126, \"precision\": 1.0, \"recall\": 0.5992122107336287, \"specificity\": 1.0, \"npv\": 0.5844818785094436, \"accuracy\": 0.7437027707808564, \"f1\": 0.749384236453202, \"f2\": 0.6514291831709667, \"f0_5\": 0.8820118857805479, \"p4\": 0.7435255360881731, \"phi\": 0.5918012154054669}, {\"truth_threshold\": 1.5000000223517418, \"match_probability\": 0.7387961280260511, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1216.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 815.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5987198424421467, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4012801575578533, \"precision\": 1.0, \"recall\": 0.5987198424421467, \"specificity\": 1.0, \"npv\": 0.5841836734693877, \"accuracy\": 0.7433879093198993, \"f1\": 0.7489990760702187, \"f2\": 0.6509635974304069, \"f0_5\": 0.8817984046410442, \"p4\": 0.7432152820546354, \"phi\": 0.5914070991600171}, {\"truth_threshold\": 1.600000023841858, \"match_probability\": 0.7519492561137834, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1213.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 818.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5972427375677006, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.40275726243229937, \"precision\": 1.0, \"recall\": 0.5972427375677006, \"specificity\": 1.0, \"npv\": 0.5832908813041263, \"accuracy\": 0.7424433249370277, \"f1\": 0.747842170160296, \"f2\": 0.6495662418335654, \"f0_5\": 0.8811564724684005, \"p4\": 0.742284201345094, \"phi\": 0.590225586321326}, {\"truth_threshold\": 1.700000025331974, \"match_probability\": 0.7646510400908766, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1200.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 831.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5908419497784343, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4091580502215657, \"precision\": 1.0, \"recall\": 0.5908419497784343, \"specificity\": 1.0, \"npv\": 0.5794534412955465, \"accuracy\": 0.7383501259445844, \"f1\": 0.7428040854224698, \"f2\": 0.6435006435006435, \"f0_5\": 0.8783487044356609, \"p4\": 0.7382438098538624, \"phi\": 0.5851199886013844}, {\"truth_threshold\": 1.8000000268220901, \"match_probability\": 0.7768953900182098, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1195.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 836.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5883801083210242, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4116198916789759, \"precision\": 1.0, \"recall\": 0.5883801083210242, \"specificity\": 1.0, \"npv\": 0.5779909136799596, \"accuracy\": 0.7367758186397985, \"f1\": 0.7408555486670799, \"f2\": 0.6411632149372251, \"f0_5\": 0.8772573777712523, \"p4\": 0.7366872427429624, \"phi\": 0.5831623756721471}, {\"truth_threshold\": 1.9000000283122063, \"match_probability\": 0.7886787621992872, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1190.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 841.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.585918266863614, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.414081733136386, \"precision\": 1.0, \"recall\": 0.585918266863614, \"specificity\": 1.0, \"npv\": 0.5765357502517623, \"accuracy\": 0.7352015113350125, \"f1\": 0.7389009624340267, \"f2\": 0.6388232767876315, \"f0_5\": 0.8761596230304816, \"p4\": 0.7351291857832581, \"phi\": 0.5812080759697219}, {\"truth_threshold\": 2.0000000298023224, \"match_probability\": 0.8000000033051833, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1189.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 842.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.585425898572132, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.41457410142786805, \"precision\": 1.0, \"recall\": 0.585425898572132, \"specificity\": 1.0, \"npv\": 0.5762455963764469, \"accuracy\": 0.7348866498740554, \"f1\": 0.7385093167701864, \"f2\": 0.6383549876516698, \"f0_5\": 0.8759392957123914, \"p4\": 0.7348173920177556, \"phi\": 0.5808176099748659}, {\"truth_threshold\": 2.1000000312924385, \"match_probability\": 0.8108601793810092, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1183.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 848.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5824716888232397, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4175283111767602, \"precision\": 1.0, \"recall\": 0.5824716888232397, \"specificity\": 1.0, \"npv\": 0.57451078775715, \"accuracy\": 0.7329974811083123, \"f1\": 0.7361543248288737, \"f2\": 0.6355431395723649, \"f0_5\": 0.8746118586426143, \"p4\": 0.732945325152551, \"phi\": 0.578477543896111}, {\"truth_threshold\": 2.2000000327825546, \"match_probability\": 0.8212623941099038, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1179.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 852.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5805022156573116, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4194977843426883, \"precision\": 1.0, \"recall\": 0.5805022156573116, \"specificity\": 1.0, \"npv\": 0.5733600400600901, \"accuracy\": 0.7317380352644837, \"f1\": 0.7345794392523365, \"f2\": 0.6336665591744598, \"f0_5\": 0.8737216540684749, \"p4\": 0.7316960140750485, \"phi\": 0.5769200755947459}, {\"truth_threshold\": 2.3000000342726707, \"match_probability\": 0.8312116004280432, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1164.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 867.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5731166912850812, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.42688330871491875, \"precision\": 1.0, \"recall\": 0.5731166912850812, \"specificity\": 1.0, \"npv\": 0.5690854870775348, \"accuracy\": 0.7270151133501259, \"f1\": 0.7286384976525822, \"f2\": 0.6266149870801033, \"f0_5\": 0.8703454463885151, \"p4\": 0.7270016744799519, \"phi\": 0.571097532311457}, {\"truth_threshold\": 2.400000035762787, \"match_probability\": 0.8407144092272857, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1151.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 880.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5667159034958149, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.43328409650418515, \"precision\": 1.0, \"recall\": 0.5667159034958149, \"specificity\": 1.0, \"npv\": 0.5654320987654321, \"accuracy\": 0.7229219143576826, \"f1\": 0.7234443746071653, \"f2\": 0.6204851752021563, \"f0_5\": 0.8673700075357951, \"p4\": 0.7229205464573797, \"phi\": 0.5660736371863528}, {\"truth_threshold\": 2.500000037252903, \"match_probability\": 0.8497788984739328, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1148.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 883.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5652387986213688, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4347612013786312, \"precision\": 1.0, \"recall\": 0.5652387986213688, \"specificity\": 1.0, \"npv\": 0.564595660749507, \"accuracy\": 0.7219773299748111, \"f1\": 0.7222396980182447, \"f2\": 0.6190681622088007, \"f0_5\": 0.8666767325985203, \"p4\": 0.7219769863862414, \"phi\": 0.5649171381617742}, {\"truth_threshold\": 2.600000038743019, \"match_probability\": 0.8584144256340188, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1147.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 884.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5647464303298868, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.43525356967011325, \"precision\": 1.0, \"recall\": 0.5647464303298868, \"specificity\": 1.0, \"npv\": 0.5643173977328734, \"accuracy\": 0.7216624685138538, \"f1\": 0.7218376337319069, \"f2\": 0.6185956207528853, \"f0_5\": 0.8664450823387219, \"p4\": 0.7216623155682867, \"phi\": 0.5645318732743893}, {\"truth_threshold\": 2.7000000402331352, \"match_probability\": 0.8666314458464526, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1144.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 887.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5632693254554406, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4367306745445593, \"precision\": 1.0, \"recall\": 0.5632693254554406, \"specificity\": 1.0, \"npv\": 0.5634842519685039, \"accuracy\": 0.7207178841309824, \"f1\": 0.7206299212598425, \"f2\": 0.6171773845489857, \"f0_5\": 0.8657484486151051, \"p4\": 0.7207178457145617, \"phi\": 0.5633767784627467}, {\"truth_threshold\": 2.8000000417232513, \"match_probability\": 0.8744413378412453, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1136.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 895.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5593303791235844, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.44066962087641554, \"precision\": 1.0, \"recall\": 0.5593303791235844, \"specificity\": 1.0, \"npv\": 0.5612745098039216, \"accuracy\": 0.718198992443325, \"f1\": 0.7173981686138301, \"f2\": 0.6133909287257019, \"f0_5\": 0.8638783269961977, \"p4\": 0.7181958416012424, \"phi\": 0.560301601247963}, {\"truth_threshold\": 2.9000000432133675, \"match_probability\": 0.8818562391739494, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1135.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 896.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5588380108321024, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.44116198916789756, \"precision\": 1.0, \"recall\": 0.5588380108321024, \"specificity\": 1.0, \"npv\": 0.560999510044096, \"accuracy\": 0.7178841309823678, \"f1\": 0.7169930511686671, \"f2\": 0.612917161680527, \"f0_5\": 0.8636432810835489, \"p4\": 0.717880234989325, \"phi\": 0.5599177174110734}, {\"truth_threshold\": 3.1000000461935997, \"match_probability\": 0.8955524998434058, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1109.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 922.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5460364352535697, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.45396356474643035, \"precision\": 1.0, \"recall\": 0.5460364352535697, \"specificity\": 1.0, \"npv\": 0.5539429124334785, \"accuracy\": 0.7096977329974811, \"f1\": 0.7063694267515923, \"f2\": 0.6005631972273368, \"f0_5\": 0.8574300293799288, \"p4\": 0.7096451676361052, \"phi\": 0.5499754660338557}, {\"truth_threshold\": 3.200000047683716, \"match_probability\": 0.9018605969116819, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1105.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 926.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5440669620876416, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.45593303791235845, \"precision\": 1.0, \"recall\": 0.5440669620876416, \"specificity\": 1.0, \"npv\": 0.5528730082085949, \"accuracy\": 0.7084382871536524, \"f1\": 0.704719387755102, \"f2\": 0.5986564091450861, \"f0_5\": 0.8564563633545187, \"p4\": 0.7083729914338502, \"phi\": 0.5484523115060287}, {\"truth_threshold\": 3.300000049173832, \"match_probability\": 0.9078269283845571, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1103.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 928.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5430822255046776, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4569177744953225, \"precision\": 1.0, \"recall\": 0.5430822255046776, \"specificity\": 1.0, \"npv\": 0.5523396044380126, \"accuracy\": 0.707808564231738, \"f1\": 0.7038927887683472, \"f2\": 0.597702395144684, \"f0_5\": 0.8559677169020643, \"p4\": 0.7077363543593591, \"phi\": 0.5476913561601727}, {\"truth_threshold\": 3.400000050663948, \"match_probability\": 0.9134653434169965, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1101.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 930.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5420974889217134, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.45790251107828656, \"precision\": 1.0, \"recall\": 0.5420974889217134, \"specificity\": 1.0, \"npv\": 0.5518072289156627, \"accuracy\": 0.7071788413098237, \"f1\": 0.7030651340996169, \"f2\": 0.5967479674796748, \"f0_5\": 0.8554778554778555, \"p4\": 0.7070993471091068, \"phi\": 0.5469308120448416}, {\"truth_threshold\": 3.500000052154064, \"match_probability\": 0.9187896995557598, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1094.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 937.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5386509108813392, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.46134908911866074, \"precision\": 1.0, \"recall\": 0.5386509108813392, \"specificity\": 1.0, \"npv\": 0.5499519692603266, \"accuracy\": 0.7049748110831234, \"f1\": 0.70016, \"f2\": 0.5934042091559991, \"f0_5\": 0.8537537068830966, \"p4\": 0.7048668608108392, \"phi\": 0.544272109503198}, {\"truth_threshold\": 3.6000000536441803, \"match_probability\": 0.9238137785296746, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1093.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 938.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5381585425898572, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.46184145741014276, \"precision\": 1.0, \"recall\": 0.5381585425898572, \"specificity\": 1.0, \"npv\": 0.5496879500720115, \"accuracy\": 0.7046599496221663, \"f1\": 0.6997439180537772, \"f2\": 0.5929261147878919, \"f0_5\": 0.8535061689832891, \"p4\": 0.7045475528853299, \"phi\": 0.5438926972206558}, {\"truth_threshold\": 3.7000000551342964, \"match_probability\": 0.9285512128432143, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1089.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 942.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5361890694239291, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4638109305760709, \"precision\": 1.0, \"recall\": 0.5361890694239291, \"specificity\": 1.0, \"npv\": 0.5486344034499281, \"accuracy\": 0.7034005037783375, \"f1\": 0.698076923076923, \"f2\": 0.5910126994464344, \"f0_5\": 0.8525129168623767, \"p4\": 0.7032693518148777, \"phi\": 0.5423760413585481}, {\"truth_threshold\": 3.8000000566244125, \"match_probability\": 0.9330154225613858, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1084.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 947.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.533727227966519, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.46627277203348105, \"precision\": 1.0, \"recall\": 0.533727227966519, \"specificity\": 1.0, \"npv\": 0.5473231357552581, \"accuracy\": 0.7018261964735516, \"f1\": 0.6959871589085073, \"f2\": 0.5886185925282363, \"f0_5\": 0.8512643317103816, \"p4\": 0.701669388939399, \"phi\": 0.5404824326919393}, {\"truth_threshold\": 3.9000000581145287, \"match_probability\": 0.9372195616099515, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1074.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 957.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5288035450516987, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4711964549483013, \"precision\": 1.0, \"recall\": 0.5288035450516987, \"specificity\": 1.0, \"npv\": 0.5447193149381542, \"accuracy\": 0.6986775818639799, \"f1\": 0.6917874396135266, \"f2\": 0.58382257012394, \"f0_5\": 0.8487434803224277, \"p4\": 0.698461896288862, \"phi\": 0.5367024359898404}, {\"truth_threshold\": 4.000000059604645, \"match_probability\": 0.9411764728755594, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1067.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 964.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5253569670113245, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.47464303298867555, \"precision\": 1.0, \"recall\": 0.5253569670113245, \"specificity\": 1.0, \"npv\": 0.5429113323850165, \"accuracy\": 0.6964735516372796, \"f1\": 0.6888315041962556, \"f2\": 0.5804591448155805, \"f0_5\": 0.8469598348944277, \"p4\": 0.6962104664501566, \"phi\": 0.5340620291107292}, {\"truth_threshold\": 4.200000062584877, \"match_probability\": 0.9483982147343843, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1030.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1001.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5071393402264894, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.49286065977351057, \"precision\": 1.0, \"recall\": 0.5071393402264894, \"specificity\": 1.0, \"npv\": 0.5335507921714818, \"accuracy\": 0.684823677581864, \"f1\": 0.6729826853969291, \"f2\": 0.5625955866287962, \"f0_5\": 0.8372622337831247, \"p4\": 0.6842191143036372, \"phi\": 0.5201774665622936}, {\"truth_threshold\": 4.300000064074993, \"match_probability\": 0.9516868803254299, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1025.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1006.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5046774987690793, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.49532250123092075, \"precision\": 1.0, \"recall\": 0.5046774987690793, \"specificity\": 1.0, \"npv\": 0.5323105532310554, \"accuracy\": 0.6832493702770781, \"f1\": 0.6708115183246073, \"f2\": 0.5601705104382992, \"f0_5\": 0.8359158375468928, \"p4\": 0.6825861647803277, \"phi\": 0.518309905918297}, {\"truth_threshold\": 4.400000065565109, \"match_probability\": 0.9547759482410569, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1020.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1011.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.5022156573116692, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.4977843426883309, \"precision\": 1.0, \"recall\": 0.5022156573116692, \"specificity\": 1.0, \"npv\": 0.5310760667903525, \"accuracy\": 0.6816750629722922, \"f1\": 0.6686332350049164, \"f2\": 0.5577427821522309, \"f0_5\": 0.8345606283750614, \"p4\": 0.6809500591436524, \"phi\": 0.5164443009324556}, {\"truth_threshold\": 4.500000067055225, \"match_probability\": 0.9576762895591182, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1014.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1017.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.49926144756277696, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5007385524372231, \"precision\": 1.0, \"recall\": 0.49926144756277696, \"specificity\": 1.0, \"npv\": 0.5296022201665125, \"accuracy\": 0.6797858942065491, \"f1\": 0.6660098522167488, \"f2\": 0.5548260013131976, \"f0_5\": 0.8329226219812715, \"p4\": 0.6789824824414316, \"phi\": 0.5142081009404593}, {\"truth_threshold\": 4.6000000685453415, \"match_probability\": 0.9603983391922627, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1010.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1021.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.49729197439684886, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5027080256031512, \"precision\": 1.0, \"recall\": 0.49729197439684886, \"specificity\": 1.0, \"npv\": 0.528624192059095, \"accuracy\": 0.6785264483627204, \"f1\": 0.6642551792173627, \"f2\": 0.5528793518721261, \"f0_5\": 0.8318234228298468, \"p4\": 0.6776681406756905, \"phi\": 0.5127188003018871}, {\"truth_threshold\": 4.700000070035458, \"match_probability\": 0.9629520927573305, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1005.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1026.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4948301329394387, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5051698670605613, \"precision\": 1.0, \"recall\": 0.4948301329394387, \"specificity\": 1.0, \"npv\": 0.5274067250115154, \"accuracy\": 0.6769521410579346, \"f1\": 0.6620553359683794, \"f2\": 0.5504436411436083, \"f0_5\": 0.8304412493802678, \"p4\": 0.6760222065562214, \"phi\": 0.5108588257538497}, {\"truth_threshold\": 4.800000071525574, \"match_probability\": 0.9653471069144568, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 998.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1033.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4913835548990645, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5086164451009355, \"precision\": 1.0, \"recall\": 0.4913835548990645, \"specificity\": 1.0, \"npv\": 0.5257116620752984, \"accuracy\": 0.6747481108312342, \"f1\": 0.6589633542423242, \"f2\": 0.5470291602718702, \"f0_5\": 0.8284907853229287, \"p4\": 0.6737121749549234, \"phi\": 0.508257872897662}, {\"truth_threshold\": 4.90000007301569, \"match_probability\": 0.9675925026740654, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 995.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1036.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4899064500246184, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5100935499753816, \"precision\": 1.0, \"recall\": 0.4899064500246184, \"specificity\": 1.0, \"npv\": 0.5249885373681797, \"accuracy\": 0.6738035264483627, \"f1\": 0.6576338400528751, \"f2\": 0.5455642066016011, \"f0_5\": 0.8276493095990684, \"p4\": 0.6727200795968197, \"phi\": 0.5071442306145872}, {\"truth_threshold\": 5.000000074505806, \"match_probability\": 0.969696971214501, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 986.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1045.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4854751354012802, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5145248645987198, \"precision\": 1.0, \"recall\": 0.4854751354012802, \"specificity\": 1.0, \"npv\": 0.5228310502283106, \"accuracy\": 0.6709697732997482, \"f1\": 0.6536294332117998, \"f2\": 0.5411635565312843, \"f0_5\": 0.8251046025104602, \"p4\": 0.6697361249633867, \"phi\": 0.5038069817912239}, {\"truth_threshold\": 5.100000075995922, \"match_probability\": 0.9716687817966767, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 979.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1052.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.48202855736090594, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.517971442639094, \"precision\": 1.0, \"recall\": 0.48202855736090594, \"specificity\": 1.0, \"npv\": 0.5211652253072372, \"accuracy\": 0.6687657430730478, \"f1\": 0.6504983388704318, \"f2\": 0.5377348126991102, \"f0_5\": 0.8231040860938288, \"p4\": 0.6674071352914174, \"phi\": 0.5012150453662769}, {\"truth_threshold\": 5.200000077486038, \"match_probability\": 0.9735157914041783, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 974.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1057.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4795667159034958, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5204332840965041, \"precision\": 1.0, \"recall\": 0.4795667159034958, \"specificity\": 1.0, \"npv\": 0.5199818346957311, \"accuracy\": 0.667191435768262, \"f1\": 0.648252911813644, \"f2\": 0.5352824796658606, \"f0_5\": 0.8216635734773072, \"p4\": 0.6657391023001616, \"phi\": 0.4993655783036174}, {\"truth_threshold\": 5.300000078976154, \"match_probability\": 0.9752454557772836, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 965.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1066.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4751354012801576, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5248645987198425, \"precision\": 1.0, \"recall\": 0.4751354012801576, \"specificity\": 1.0, \"npv\": 0.5178652193577566, \"accuracy\": 0.6643576826196473, \"f1\": 0.644192256341789, \"f2\": 0.5308614809109913, \"f0_5\": 0.8190460023765065, \"p4\": 0.6627270219044649, \"phi\": 0.4960404205390772}, {\"truth_threshold\": 5.4000000804662704, \"match_probability\": 0.9768648415470134, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 961.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1070.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4731659281142294, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5268340718857706, \"precision\": 1.0, \"recall\": 0.4731659281142294, \"specificity\": 1.0, \"npv\": 0.5169300225733634, \"accuracy\": 0.6630982367758187, \"f1\": 0.642379679144385, \"f2\": 0.5288937809576224, \"f0_5\": 0.8178723404255319, \"p4\": 0.661384263989902, \"phi\": 0.49456412516582227}, {\"truth_threshold\": 5.500000081956387, \"match_probability\": 0.9783806392104205, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 946.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1085.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.465780403741999, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.534219596258001, \"precision\": 1.0, \"recall\": 0.465780403741999, \"specificity\": 1.0, \"npv\": 0.5134529147982063, \"accuracy\": 0.658375314861461, \"f1\": 0.6355391333557272, \"f2\": 0.5214994487320838, \"f0_5\": 0.8134135855546002, \"p4\": 0.656325954359785, \"phi\": 0.48903609882831217}, {\"truth_threshold\": 5.600000083446503, \"match_probability\": 0.9797991767207457, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 927.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1104.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4564254062038405, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5435745937961596, \"precision\": 1.0, \"recall\": 0.4564254062038405, \"specificity\": 1.0, \"npv\": 0.509115162294353, \"accuracy\": 0.6523929471032746, \"f1\": 0.6267748478701826, \"f2\": 0.5120981107059993, \"f0_5\": 0.8076319916361735, \"p4\": 0.64986435329492, \"phi\": 0.482050925478558}, {\"truth_threshold\": 5.700000084936619, \"match_probability\": 0.9811264334957893, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 926.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1105.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.45593303791235845, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5440669620876416, \"precision\": 1.0, \"recall\": 0.45593303791235845, \"specificity\": 1.0, \"npv\": 0.5088888888888888, \"accuracy\": 0.6520780856423174, \"f1\": 0.6263104497801826, \"f2\": 0.5116022099447514, \"f0_5\": 0.8073234524847428, \"p4\": 0.6495225157687339, \"phi\": 0.48168377289561637}, {\"truth_threshold\": 5.800000086426735, \"match_probability\": 0.9823680546749124, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 924.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1107.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4549483013293944, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5450516986706057, \"precision\": 1.0, \"recall\": 0.4549483013293944, \"specificity\": 1.0, \"npv\": 0.5084369449378331, \"accuracy\": 0.6514483627204031, \"f1\": 0.6253807106598985, \"f2\": 0.5106100795755968, \"f0_5\": 0.8067050811943426, \"p4\": 0.6488383014404575, \"phi\": 0.4809496069575002}, {\"truth_threshold\": 5.900000087916851, \"match_probability\": 0.9835293654795508, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 917.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1114.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.45150172328902016, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5484982767109798, \"precision\": 1.0, \"recall\": 0.45150172328902016, \"specificity\": 1.0, \"npv\": 0.5068614431164232, \"accuracy\": 0.6492443324937027, \"f1\": 0.6221166892808684, \"f2\": 0.507134166574494, \"f0_5\": 0.8045271100193017, \"p4\": 0.6464378241531998, \"phi\": 0.4783814534822862}, {\"truth_threshold\": 6.000000089406967, \"match_probability\": 0.9846153855541349, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 909.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1122.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.44756277695716395, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.552437223042836, \"precision\": 1.0, \"recall\": 0.44756277695716395, \"specificity\": 1.0, \"npv\": 0.5050727834142038, \"accuracy\": 0.6467254408060453, \"f1\": 0.6183673469387755, \"f2\": 0.503155097974095, \"f0_5\": 0.8020116463737427, \"p4\": 0.6436833004319238, \"phi\": 0.4754490272472384}, {\"truth_threshold\": 6.100000090897083, \"match_probability\": 0.985630843183972, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 900.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1131.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4431314623338257, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5568685376661743, \"precision\": 1.0, \"recall\": 0.4431314623338257, \"specificity\": 1.0, \"npv\": 0.5030755711775043, \"accuracy\": 0.6438916876574308, \"f1\": 0.6141248720573184, \"f2\": 0.49867021276595747, \"f0_5\": 0.7991475759190197, \"p4\": 0.6405698927488327, \"phi\": 0.4721531674364921}, {\"truth_threshold\": 6.200000092387199, \"match_probability\": 0.9865801893041345, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 894.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1137.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4401772525849335, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5598227474150664, \"precision\": 1.0, \"recall\": 0.4401772525849335, \"specificity\": 1.0, \"npv\": 0.5017528483786152, \"accuracy\": 0.6420025188916877, \"f1\": 0.6112820512820513, \"f2\": 0.49567531603459747, \"f0_5\": 0.7972177635098984, \"p4\": 0.6384854939205781, \"phi\": 0.4699576473215044}, {\"truth_threshold\": 6.3000000938773155, \"match_probability\": 0.987467611228855, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 892.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1139.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.43919251600196946, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5608074839980305, \"precision\": 1.0, \"recall\": 0.43919251600196946, \"specificity\": 1.0, \"npv\": 0.5013134851138353, \"accuracy\": 0.6413727959697733, \"f1\": 0.61033185083818, \"f2\": 0.49467613132209404, \"f0_5\": 0.7965708162171816, \"p4\": 0.6377891010276633, \"phi\": 0.46922609777468816}, {\"truth_threshold\": 6.400000095367432, \"match_probability\": 0.9882970460445225, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 887.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1144.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4367306745445593, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5632693254554406, \"precision\": 1.0, \"recall\": 0.4367306745445593, \"specificity\": 1.0, \"npv\": 0.5002184359982526, \"accuracy\": 0.6397984886649875, \"f1\": 0.6079506511309116, \"f2\": 0.4921762290533792, \"f0_5\": 0.7949453307044273, \"p4\": 0.6360445825846852, \"phi\": 0.46739783372748034}, {\"truth_threshold\": 6.500000096857548, \"match_probability\": 0.9890721936212699, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 884.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1147.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.43525356967011325, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5647464303298868, \"precision\": 1.0, \"recall\": 0.43525356967011325, \"specificity\": 1.0, \"npv\": 0.49956369982547993, \"accuracy\": 0.6388539042821159, \"f1\": 0.6065180102915952, \"f2\": 0.49067495559502666, \"f0_5\": 0.793964433267469, \"p4\": 0.63499541952982, \"phi\": 0.4663012798895679}, {\"truth_threshold\": 6.600000098347664, \"match_probability\": 0.9897965292084853, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 876.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1155.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.43131462333825704, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.568685376661743, \"precision\": 1.0, \"recall\": 0.43131462333825704, \"specificity\": 1.0, \"npv\": 0.49782608695652175, \"accuracy\": 0.6363350125944585, \"f1\": 0.6026831785345718, \"f2\": 0.4866666666666667, \"f0_5\": 0.7913279132791328, \"p4\": 0.6321884993464733, \"phi\": 0.46337853983930954}, {\"truth_threshold\": 6.70000009983778, \"match_probability\": 0.9904733155885336, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 866.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1165.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4263909404234367, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5736090595765633, \"precision\": 1.0, \"recall\": 0.4263909404234367, \"specificity\": 1.0, \"npv\": 0.49567099567099565, \"accuracy\": 0.6331863979848866, \"f1\": 0.597859855022437, \"f2\": 0.48164627363737483, \"f0_5\": 0.7879890809827116, \"p4\": 0.628660687467854, \"phi\": 0.45972776942966703}, {\"truth_threshold\": 6.800000101327896, \"match_probability\": 0.9911056147706719, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 856.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1175.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.42146725750861647, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5785327424913835, \"precision\": 1.0, \"recall\": 0.42146725750861647, \"specificity\": 1.0, \"npv\": 0.49353448275862066, \"accuracy\": 0.6300377833753149, \"f1\": 0.5930031174229303, \"f2\": 0.4766146993318486, \"f0_5\": 0.7846012832263978, \"p4\": 0.6251109156992851, \"phi\": 0.45607962565127746}, {\"truth_threshold\": 6.900000102818012, \"match_probability\": 0.9916962992137202, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 851.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1180.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4190054160512063, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5809945839487937, \"precision\": 1.0, \"recall\": 0.4190054160512063, \"specificity\": 1.0, \"npv\": 0.4924731182795699, \"accuracy\": 0.628463476070529, \"f1\": 0.5905621096460791, \"f2\": 0.4740947075208914, \"f0_5\": 0.7828886844526219, \"p4\": 0.6233275653061159, \"phi\": 0.45425642958439905}, {\"truth_threshold\": 7.000000104308128, \"match_probability\": 0.9922480625716311, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 847.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1184.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4170359428852782, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5829640571147218, \"precision\": 1.0, \"recall\": 0.4170359428852782, \"specificity\": 1.0, \"npv\": 0.49162730785744957, \"accuracy\": 0.6272040302267002, \"f1\": 0.5886031966643502, \"f2\": 0.4720766915616988, \"f0_5\": 0.78150950359845, \"p4\": 0.621896736471326, \"phi\": 0.4527982529565263}, {\"truth_threshold\": 7.1000001057982445, \"match_probability\": 0.9927634299608046, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 834.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1197.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.41063515509601184, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5893648449039882, \"precision\": 1.0, \"recall\": 0.41063515509601184, \"specificity\": 1.0, \"npv\": 0.4888983774551665, \"accuracy\": 0.6231108312342569, \"f1\": 0.5821989528795811, \"f2\": 0.4655056932350971, \"f0_5\": 0.7769703745108999, \"p4\": 0.6172204525656357, \"phi\": 0.44806122466967707}, {\"truth_threshold\": 7.200000107288361, \"match_probability\": 0.9932447677519157, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 831.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1200.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4091580502215657, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5908419497784343, \"precision\": 1.0, \"recall\": 0.4091580502215657, \"specificity\": 1.0, \"npv\": 0.488272921108742, \"accuracy\": 0.6221662468513854, \"f1\": 0.5807127882599581, \"f2\": 0.4639865996649916, \"f0_5\": 0.7759103641456583, \"p4\": 0.6161355181490583, \"phi\": 0.44696845120974843}, {\"truth_threshold\": 7.300000108778477, \"match_probability\": 0.9936942928922654, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 829.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1202.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.40817331363860165, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5918266863613983, \"precision\": 1.0, \"recall\": 0.40817331363860165, \"specificity\": 1.0, \"npv\": 0.4878568385172561, \"accuracy\": 0.621536523929471, \"f1\": 0.5797202797202797, \"f2\": 0.4629733050374176, \"f0_5\": 0.7752010473162521, \"p4\": 0.6154109979823547, \"phi\": 0.44624000533215374}, {\"truth_threshold\": 7.400000110268593, \"match_probability\": 0.9941140817673122, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 827.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1204.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.4071885770556376, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5928114229443624, \"precision\": 1.0, \"recall\": 0.4071885770556376, \"specificity\": 1.0, \"npv\": 0.4874414644529587, \"accuracy\": 0.6209068010075567, \"f1\": 0.5787263820853744, \"f2\": 0.46195955759133056, \"f0_5\": 0.7744896047949054, \"p4\": 0.6146854858236389, \"phi\": 0.4455116118672065}, {\"truth_threshold\": 7.500000111758709, \"match_probability\": 0.9945060786121668, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 817.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1214.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.40226489414081734, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.5977351058591827, \"precision\": 1.0, \"recall\": 0.40226489414081734, \"specificity\": 1.0, \"npv\": 0.4853751589656634, \"accuracy\": 0.6177581863979849, \"f1\": 0.5737359550561798, \"f2\": 0.4568840174477128, \"f0_5\": 0.7709001698433666, \"p4\": 0.611042815748838, \"phi\": 0.4418703281958464}, {\"truth_threshold\": 7.600000113248825, \"match_probability\": 0.9948721034855129, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 805.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1226.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.396356474643033, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.603643525356967, \"precision\": 1.0, \"recall\": 0.396356474643033, \"specificity\": 1.0, \"npv\": 0.48291859974694223, \"accuracy\": 0.6139798488664987, \"f1\": 0.5677009873060649, \"f2\": 0.45077836263859333, \"f0_5\": 0.7665206627309084, \"p4\": 0.6066374884822956, \"phi\": 0.43750190140758005}, {\"truth_threshold\": 7.700000114738941, \"match_probability\": 0.9952138598197071, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 803.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1228.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3953717380600689, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.604628261939931, \"precision\": 1.0, \"recall\": 0.3953717380600689, \"specificity\": 1.0, \"npv\": 0.48251158870627897, \"accuracy\": 0.6133501259445844, \"f1\": 0.5666901905434015, \"f2\": 0.4497591576117397, \"f0_5\": 0.7657829486934961, \"p4\": 0.6058995526108901, \"phi\": 0.436773906570581}, {\"truth_threshold\": 7.800000116229057, \"match_probability\": 0.9955329415617687, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 796.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1235.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.39192516001969474, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6080748399803053, \"precision\": 1.0, \"recall\": 0.39192516001969474, \"specificity\": 1.0, \"npv\": 0.4810924369747899, \"accuracy\": 0.6111460957178841, \"f1\": 0.5631411390166254, \"f2\": 0.4461883408071749, \"f0_5\": 0.763183125599233, \"p4\": 0.6033082263812478, \"phi\": 0.4342260129766634}, {\"truth_threshold\": 7.900000117719173, \"match_probability\": 0.99583083992065, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 794.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1237.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3909404234367307, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6090595765632694, \"precision\": 1.0, \"recall\": 0.3909404234367307, \"specificity\": 1.0, \"npv\": 0.480688497061293, \"accuracy\": 0.6105163727959698, \"f1\": 0.5621238938053097, \"f2\": 0.44516707782013903, \"f0_5\": 0.7624351834069522, \"p4\": 0.6025653750424809, \"phi\": 0.43349805603059816}, {\"truth_threshold\": 8.00000011920929, \"match_probability\": 0.9961089497366072, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 785.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1246.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3865091088133924, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6134908911866076, \"precision\": 1.0, \"recall\": 0.3865091088133924, \"specificity\": 1.0, \"npv\": 0.47887913007109995, \"accuracy\": 0.6076826196473551, \"f1\": 0.5575284090909091, \"f2\": 0.4405657200583679, \"f0_5\": 0.7590408044865596, \"p4\": 0.5992086772176372, \"phi\": 0.4302222051371983}, {\"truth_threshold\": 8.100000120699406, \"match_probability\": 0.9963685754887298, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 772.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1259.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.38010832102412606, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6198916789758739, \"precision\": 1.0, \"recall\": 0.38010832102412606, \"specificity\": 1.0, \"npv\": 0.4762895174708819, \"accuracy\": 0.6035894206549118, \"f1\": 0.5508383874420264, \"f2\": 0.43390287769784175, \"f0_5\": 0.7540535260793124, \"p4\": 0.5943189937980553, \"phi\": 0.4254898457157915}, {\"truth_threshold\": 8.200000122189522, \"match_probability\": 0.9966109369567457, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 765.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1266.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3766617429837518, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6233382570162481, \"precision\": 1.0, \"recall\": 0.3766617429837518, \"specificity\": 1.0, \"npv\": 0.47490667772708417, \"accuracy\": 0.6013853904282116, \"f1\": 0.5472103004291845, \"f2\": 0.43030712116098546, \"f0_5\": 0.7513258691809075, \"p4\": 0.591665315716949, \"phi\": 0.42294110344976693}, {\"truth_threshold\": 8.300000123679638, \"match_probability\": 0.9968371745531442, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 753.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1278.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3707533234859675, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6292466765140325, \"precision\": 1.0, \"recall\": 0.3707533234859675, \"specificity\": 1.0, \"npv\": 0.4725546842756913, \"accuracy\": 0.5976070528967254, \"f1\": 0.540948275862069, \"f2\": 0.42412977357215276, \"f0_5\": 0.7465794170136824, \"p4\": 0.5870811305491775, \"phi\": 0.41857044774335733}, {\"truth_threshold\": 8.400000125169754, \"match_probability\": 0.9970483543414643, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 749.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1282.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3687838503200394, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6312161496799606, \"precision\": 1.0, \"recall\": 0.3687838503200394, \"specificity\": 1.0, \"npv\": 0.4717758549649773, \"accuracy\": 0.5963476070528967, \"f1\": 0.5388489208633094, \"f2\": 0.4220669446635862, \"f0_5\": 0.7449771235329222, \"p4\": 0.5855429896383528, \"phi\": 0.41711307373662215}, {\"truth_threshold\": 8.50000012665987, \"match_probability\": 0.997245472756309, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 745.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1286.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.36681437715411125, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6331856228458888, \"precision\": 1.0, \"recall\": 0.36681437715411125, \"specificity\": 1.0, \"npv\": 0.4709995886466475, \"accuracy\": 0.595088161209068, \"f1\": 0.5367435158501441, \"f2\": 0.4200022550456647, \"f0_5\": 0.7433645978846538, \"p4\": 0.5839996987663111, \"phi\": 0.415655411066983}, {\"truth_threshold\": 8.600000128149986, \"match_probability\": 0.9974294610402847, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 736.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1295.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.362383062530773, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.637616937469227, \"precision\": 1.0, \"recall\": 0.362383062530773, \"specificity\": 1.0, \"npv\": 0.4692622950819672, \"accuracy\": 0.5922544080604534, \"f1\": 0.5319840983014095, \"f2\": 0.4153498871331828, \"f0_5\": 0.7396984924623116, \"p4\": 0.5805081133675749, \"phi\": 0.4123744749884776}, {\"truth_threshold\": 8.700000129640102, \"match_probability\": 0.997601189412643, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 732.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1299.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3604135893648449, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6395864106351551, \"precision\": 1.0, \"recall\": 0.3604135893648449, \"specificity\": 1.0, \"npv\": 0.468494271685761, \"accuracy\": 0.5909949622166247, \"f1\": 0.5298588490770901, \"f2\": 0.4132791327913279, \"f0_5\": 0.7380520266182699, \"p4\": 0.5789476138201, \"phi\": 0.41091568728284633}, {\"truth_threshold\": 8.800000131130219, \"match_probability\": 0.997761470983937, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 718.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1313.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3535204332840965, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6464795667159035, \"precision\": 1.0, \"recall\": 0.3535204332840965, \"specificity\": 1.0, \"npv\": 0.46582587469487385, \"accuracy\": 0.5865869017632241, \"f1\": 0.5223717715532921, \"f2\": 0.4060167382945035, \"f0_5\": 0.7322047725882113, \"p4\": 0.5734425262692389, \"phi\": 0.40580656113113184}, {\"truth_threshold\": 8.900000132620335, \"match_probability\": 0.9979110654305032, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 708.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1323.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.34859675036927623, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6514032496307238, \"precision\": 1.0, \"recall\": 0.34859675036927623, \"specificity\": 1.0, \"npv\": 0.4639384116693679, \"accuracy\": 0.5834382871536524, \"f1\": 0.5169769989047097, \"f2\": 0.4008152173913043, \"f0_5\": 0.7279457125231339, \"p4\": 0.5694677651256314, \"phi\": 0.4021534814960889}, {\"truth_threshold\": 9.00000013411045, \"match_probability\": 0.9980506824420605, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 704.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1327.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3466272772033481, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6533727227966519, \"precision\": 1.0, \"recall\": 0.3466272772033481, \"specificity\": 1.0, \"npv\": 0.46318770226537215, \"accuracy\": 0.5821788413098237, \"f1\": 0.5148080438756856, \"f2\": 0.3987313094698686, \"f0_5\": 0.7262224056117186, \"p4\": 0.5678676238912578, \"phi\": 0.4006912677739821}, {\"truth_threshold\": 9.100000135600567, \"match_probability\": 0.9981809849551747, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 700.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1331.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.34465780403741997, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.65534219596258, \"precision\": 1.0, \"recall\": 0.34465780403741997, \"specificity\": 1.0, \"npv\": 0.4624394184168013, \"accuracy\": 0.5809193954659949, \"f1\": 0.5126327352618089, \"f2\": 0.39664551223934724, \"f0_5\": 0.724487683709377, \"p4\": 0.5662615170898467, \"phi\": 0.39922844895106907}, {\"truth_threshold\": 9.200000137090683, \"match_probability\": 0.9983025921847976, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 690.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1341.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3397341211225997, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6602658788774003, \"precision\": 1.0, \"recall\": 0.3397341211225997, \"specificity\": 1.0, \"npv\": 0.46057924376508447, \"accuracy\": 0.5777707808564232, \"f1\": 0.5071664829106945, \"f2\": 0.39142273655547993, \"f0_5\": 0.720100187852223, \"p4\": 0.5622196307198002, \"phi\": 0.3955685586441907}, {\"truth_threshold\": 9.300000138580799, \"match_probability\": 0.9984160824655384, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 684.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1347.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.33677991137370755, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6632200886262924, \"precision\": 1.0, \"recall\": 0.33677991137370755, \"specificity\": 1.0, \"npv\": 0.45947030497592295, \"accuracy\": 0.5758816120906801, \"f1\": 0.5038674033149171, \"f2\": 0.388283378746594, \"f0_5\": 0.7174323473882945, \"p4\": 0.5597758409315445, \"phi\": 0.3933705232838903}, {\"truth_threshold\": 9.400000140070915, \"match_probability\": 0.9985219959137808, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 661.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1370.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3254554406696209, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6745445593303792, \"precision\": 1.0, \"recall\": 0.3254554406696209, \"specificity\": 1.0, \"npv\": 0.4552683896620278, \"accuracy\": 0.568639798488665, \"f1\": 0.49108469539375926, \"f2\": 0.3762094479225953, \"f0_5\": 0.706951871657754, \"p4\": 0.550272647956958, \"phi\": 0.38492801194561554}, {\"truth_threshold\": 9.500000141561031, \"match_probability\": 0.9986208369212233, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 655.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1376.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3225012309207287, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6774987690792713, \"precision\": 1.0, \"recall\": 0.3225012309207287, \"specificity\": 1.0, \"npv\": 0.45418484728282427, \"accuracy\": 0.5667506297229219, \"f1\": 0.48771407297096053, \"f2\": 0.3730493222462695, \"f0_5\": 0.7041496452375833, \"p4\": 0.5477568608833787, \"phi\": 0.38272074978272863}, {\"truth_threshold\": 9.600000143051147, \"match_probability\": 0.9987130764898899, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 641.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1390.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3156080748399803, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6843919251600197, \"precision\": 1.0, \"recall\": 0.3156080748399803, \"specificity\": 1.0, \"npv\": 0.4516765285996055, \"accuracy\": 0.5623425692695214, \"f1\": 0.47979041916167664, \"f2\": 0.3656588705077011, \"f0_5\": 0.6974972796517954, \"p4\": 0.5418247722541304, \"phi\": 0.3775615971490305}, {\"truth_threshold\": 9.700000144541264, \"match_probability\": 0.9987991544181472, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 627.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1404.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3087149187592319, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.691285081240768, \"precision\": 1.0, \"recall\": 0.3087149187592319, \"specificity\": 1.0, \"npv\": 0.4491957630443311, \"accuracy\": 0.5579345088161209, \"f1\": 0.4717832957110609, \"f2\": 0.35824477202605415, \"f0_5\": 0.6906807666886979, \"p4\": 0.535802646044032, \"phi\": 0.3723888203144958}, {\"truth_threshold\": 9.80000014603138, \"match_probability\": 0.9988794813467569, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 625.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1406.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.30773018217626785, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6922698178237322, \"precision\": 1.0, \"recall\": 0.30773018217626785, \"specificity\": 1.0, \"npv\": 0.448843590748726, \"accuracy\": 0.5573047858942065, \"f1\": 0.47063253012048195, \"f2\": 0.35718367813464397, \"f0_5\": 0.689693224453763, \"p4\": 0.5349347426333179, \"phi\": 0.37164865121476715}, {\"truth_threshold\": 9.900000147521496, \"match_probability\": 0.9989544406735176, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 620.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1411.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3052683407188577, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6947316592811423, \"precision\": 1.0, \"recall\": 0.3052683407188577, \"specificity\": 1.0, \"npv\": 0.44796557120500785, \"accuracy\": 0.5557304785894207, \"f1\": 0.46774801961523954, \"f2\": 0.3545288197621226, \"f0_5\": 0.6872090445577477, \"p4\": 0.5327564744189275, \"phi\": 0.36979684506621746}, {\"truth_threshold\": 10.000000149011612, \"match_probability\": 0.9990243903445719, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 616.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1415.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3032988675529296, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6967011324470704, \"precision\": 1.0, \"recall\": 0.3032988675529296, \"specificity\": 1.0, \"npv\": 0.447265625, \"accuracy\": 0.554471032745592, \"f1\": 0.4654325651681148, \"f2\": 0.3524027459954233, \"f0_5\": 0.6852057842046718, \"p4\": 0.5310049980284561, \"phi\": 0.36831393885903}, {\"truth_threshold\": 10.100000150501728, \"match_probability\": 0.9990896645300149, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 614.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1417.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.3023141309699655, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.6976858690300345, \"precision\": 1.0, \"recall\": 0.3023141309699655, \"specificity\": 1.0, \"npv\": 0.44691647150663544, \"accuracy\": 0.5538413098236776, \"f1\": 0.46427221172022687, \"f2\": 0.3513389791714351, \"f0_5\": 0.6841987965232895, \"p4\": 0.5301262691052692, \"phi\": 0.367571985738429}, {\"truth_threshold\": 10.200000151991844, \"match_probability\": 0.9991505751910027, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 600.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1431.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.29542097488921715, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7045790251107829, \"precision\": 1.0, \"recall\": 0.29542097488921715, \"specificity\": 1.0, \"npv\": 0.44448757763975155, \"accuracy\": 0.549433249370277, \"f1\": 0.45610034207525657, \"f2\": 0.343878954607978, \"f0_5\": 0.6770480704129993, \"p4\": 0.5239180605880351, \"phi\": 0.3623685327294328}, {\"truth_threshold\": 10.30000015348196, \"match_probability\": 0.9992074135451509, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 592.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1439.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.2914820285573609, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7085179714426391, \"precision\": 1.0, \"recall\": 0.2914820285573609, \"specificity\": 1.0, \"npv\": 0.44311145510835914, \"accuracy\": 0.5469143576826196, \"f1\": 0.45139153640869234, \"f2\": 0.33960532354290957, \"f0_5\": 0.6728802000454649, \"p4\": 0.5203244044266416, \"phi\": 0.3593870139723867}, {\"truth_threshold\": 10.400000154972076, \"match_probability\": 0.9992604514366183, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 590.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1441.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.29049729197439683, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7095027080256031, \"precision\": 1.0, \"recall\": 0.29049729197439683, \"specificity\": 1.0, \"npv\": 0.44276875483372, \"accuracy\": 0.5462846347607053, \"f1\": 0.45020984357115607, \"f2\": 0.33853568969474407, \"f0_5\": 0.6718287406057846, \"p4\": 0.5194206063238911, \"phi\": 0.3586406617354916}, {\"truth_threshold\": 10.500000156462193, \"match_probability\": 0.9993099426168967, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 570.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1461.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.28064992614475626, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7193500738552437, \"precision\": 1.0, \"recall\": 0.28064992614475626, \"specificity\": 1.0, \"npv\": 0.43937068303914045, \"accuracy\": 0.5399874055415617, \"f1\": 0.43829296424452135, \"f2\": 0.3278122843340235, \"f0_5\": 0.6610995128740431, \"p4\": 0.5102600262107828, \"phi\": 0.35115431044642736}, {\"truth_threshold\": 10.600000157952309, \"match_probability\": 0.9993561239419685, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 565.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1466.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.2781880846873461, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7218119153126539, \"precision\": 1.0, \"recall\": 0.2781880846873461, \"specificity\": 1.0, \"npv\": 0.4385292991191114, \"accuracy\": 0.5384130982367759, \"f1\": 0.4352850539291217, \"f2\": 0.32512371964552883, \"f0_5\": 0.6583546958750874, \"p4\": 0.5079339530793631, \"phi\": 0.3492758591732757}, {\"truth_threshold\": 10.700000159442425, \"match_probability\": 0.9993992164911604, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 543.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1488.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.2673559822747415, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7326440177252584, \"precision\": 1.0, \"recall\": 0.2673559822747415, \"specificity\": 1.0, \"npv\": 0.4348651728066844, \"accuracy\": 0.5314861460957179, \"f1\": 0.4219114219114219, \"f2\": 0.31325718241606093, \"f0_5\": 0.6459671663097787, \"p4\": 0.49751894698684695, \"phi\": 0.3409747870925449}, {\"truth_threshold\": 10.800000160932541, \"match_probability\": 0.9994394266126935, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 530.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1501.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.26095519448547516, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7390448055145249, \"precision\": 1.0, \"recall\": 0.26095519448547516, \"specificity\": 1.0, \"npv\": 0.4327286470143613, \"accuracy\": 0.5273929471032746, \"f1\": 0.4139008199921905, \"f2\": 0.30621677836838457, \"f0_5\": 0.6384003854492893, \"p4\": 0.49121990458507664, \"phi\": 0.3360398610895279}, {\"truth_threshold\": 10.900000162422657, \"match_probability\": 0.9994769469006325, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 513.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1518.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.25258493353028066, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7474150664697193, \"precision\": 1.0, \"recall\": 0.25258493353028066, \"specificity\": 1.0, \"npv\": 0.42996620352985354, \"accuracy\": 0.5220403022670025, \"f1\": 0.4033018867924528, \"f2\": 0.29697811740187563, \"f0_5\": 0.6282145481263777, \"p4\": 0.4828102344574534, \"phi\": 0.32954966991161616}, {\"truth_threshold\": 11.000000163912773, \"match_probability\": 0.9995119571076428, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 508.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1523.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.25012309207287053, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7498769079271295, \"precision\": 1.0, \"recall\": 0.25012309207287053, \"specificity\": 1.0, \"npv\": 0.42916041979010494, \"accuracy\": 0.5204659949622166, \"f1\": 0.40015754233950374, \"f2\": 0.2942539388322521, \"f0_5\": 0.6251538272212651, \"p4\": 0.48029802489967816, \"phi\": 0.32763231097251716}, {\"truth_threshold\": 11.10000016540289, \"match_probability\": 0.9995446249976983, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 502.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1529.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.24716888232397832, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7528311176760216, \"precision\": 1.0, \"recall\": 0.24716888232397832, \"specificity\": 1.0, \"npv\": 0.4281974569932685, \"accuracy\": 0.5185768261964736, \"f1\": 0.3963679431504145, \"f2\": 0.2909807558543937, \"f0_5\": 0.6214409507303789, \"p4\": 0.4772593147830282, \"phi\": 0.3253261238495857}, {\"truth_threshold\": 11.200000166893005, \"match_probability\": 0.9995751071426191, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 499.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1532.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.24569177744953224, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7543082225504677, \"precision\": 1.0, \"recall\": 0.24569177744953224, \"specificity\": 1.0, \"npv\": 0.42771759432200224, \"accuracy\": 0.517632241813602, \"f1\": 0.39446640316205533, \"f2\": 0.2893424562217326, \"f0_5\": 0.6195679165631984, \"p4\": 0.47572994995033335, \"phi\": 0.32417078214331824}, {\"truth_threshold\": 11.300000168383121, \"match_probability\": 0.9996035496660847, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 493.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1538.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.2427375677006401, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.75726243229936, \"precision\": 1.0, \"recall\": 0.2427375677006401, \"specificity\": 1.0, \"npv\": 0.42676108833395454, \"accuracy\": 0.5157430730478589, \"f1\": 0.3906497622820919, \"f2\": 0.28606243472206105, \"f0_5\": 0.6157881588808394, \"p4\": 0.47265083362918403, \"phi\": 0.32185547777140927}, {\"truth_threshold\": 11.400000169873238, \"match_probability\": 0.99963008893853, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 487.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1544.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.2397833579517479, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7602166420482521, \"precision\": 1.0, \"recall\": 0.2397833579517479, \"specificity\": 1.0, \"npv\": 0.42580885087393083, \"accuracy\": 0.5138539042821159, \"f1\": 0.3868149324861001, \"f2\": 0.2827778422947393, \"f0_5\": 0.6119628047248052, \"p4\": 0.46954403414712603, \"phi\": 0.31953384188239936}, {\"truth_threshold\": 11.500000171363354, \"match_probability\": 0.999654852226126, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 482.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1549.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.23732151649433778, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7626784835056623, \"precision\": 1.0, \"recall\": 0.23732151649433778, \"specificity\": 1.0, \"npv\": 0.425018559762435, \"accuracy\": 0.5122795969773299, \"f1\": 0.3836052526860326, \"f2\": 0.2800371833604462, \"f0_5\": 0.6087395807021975, \"p4\": 0.46693344428033184, \"phi\": 0.3175941579139333}, {\"truth_threshold\": 11.60000017285347, \"match_probability\": 0.9996779582968373, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 478.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1553.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.23535204332840964, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7646479566715904, \"precision\": 1.0, \"recall\": 0.23535204332840964, \"specificity\": 1.0, \"npv\": 0.42438843587842845, \"accuracy\": 0.5110201511335013, \"f1\": 0.3810282981267437, \"f2\": 0.27784236224133924, \"f0_5\": 0.6061374587877251, \"p4\": 0.4648305783799129, \"phi\": 0.31603905699918783}, {\"truth_threshold\": 11.700000174343586, \"match_probability\": 0.9996995179863626, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 471.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1560.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.23190546528803546, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7680945347119645, \"precision\": 1.0, \"recall\": 0.23190546528803546, \"specificity\": 1.0, \"npv\": 0.4232902033271719, \"accuracy\": 0.5088161209068011, \"f1\": 0.3764988009592326, \"f2\": 0.27399650959860383, \"f0_5\": 0.6015325670498084, \"p4\": 0.46111916274416753, \"phi\": 0.3133102480839957}, {\"truth_threshold\": 11.800000175833702, \"match_probability\": 0.9997196347265854, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 463.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1568.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.22796651895617923, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7720334810438207, \"precision\": 1.0, \"recall\": 0.22796651895617923, \"specificity\": 1.0, \"npv\": 0.4220420199041651, \"accuracy\": 0.5062972292191436, \"f1\": 0.37129109863672816, \"f2\": 0.2695935716781181, \"f0_5\": 0.5961885140355395, \"p4\": 0.45682744333981634, \"phi\": 0.3101797061878598}, {\"truth_threshold\": 11.900000177323818, \"match_probability\": 0.9997384050389891, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 445.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1586.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.2191038897095027, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7808961102904973, \"precision\": 1.0, \"recall\": 0.2191038897095027, \"specificity\": 1.0, \"npv\": 0.4192603441962651, \"accuracy\": 0.5006297229219143, \"f1\": 0.3594507269789984, \"f2\": 0.25965690278912357, \"f0_5\": 0.583836263447914, \"p4\": 0.44696743745394574, \"phi\": 0.3030867404132794}, {\"truth_threshold\": 12.000000178813934, \"match_probability\": 0.9997559189953416, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 416.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1615.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.2048252092565239, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7951747907434761, \"precision\": 1.0, \"recall\": 0.2048252092565239, \"specificity\": 1.0, \"npv\": 0.4148550724637681, \"accuracy\": 0.49149874055415615, \"f1\": 0.340008173273396, \"f2\": 0.24355971896955503, \"f0_5\": 0.5629228687415426, \"p4\": 0.43044577914486043, \"phi\": 0.29150090399263207}, {\"truth_threshold\": 12.200000181794167, \"match_probability\": 0.9997875084304283, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 409.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1622.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.20137863121614968, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.7986213687838504, \"precision\": 1.0, \"recall\": 0.20137863121614968, \"specificity\": 1.0, \"npv\": 0.4138055655945067, \"accuracy\": 0.4892947103274559, \"f1\": 0.33524590163934426, \"f2\": 0.23965779913277863, \"f0_5\": 0.5576765748568312, \"p4\": 0.4263317832731881, \"phi\": 0.28867212956751886}, {\"truth_threshold\": 12.300000183284283, \"match_probability\": 0.9998017355340825, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 400.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1631.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.19694731659281142, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8030526834071886, \"precision\": 1.0, \"recall\": 0.19694731659281142, \"specificity\": 1.0, \"npv\": 0.41246397694524495, \"accuracy\": 0.4864609571788413, \"f1\": 0.3290826820238585, \"f2\": 0.2346316283435007, \"f0_5\": 0.5508124483613329, \"p4\": 0.42096603893246504, \"phi\": 0.2850152161737426}, {\"truth_threshold\": 12.400000184774399, \"match_probability\": 0.9998150102562988, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 399.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1632.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.1964549483013294, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8035450516986706, \"precision\": 1.0, \"recall\": 0.1964549483013294, \"specificity\": 1.0, \"npv\": 0.41231544832553113, \"accuracy\": 0.48614609571788414, \"f1\": 0.32839506172839505, \"f2\": 0.23407250967969026, \"f0_5\": 0.5500413564929694, \"p4\": 0.42036442533034846, \"phi\": 0.28460746667055603}, {\"truth_threshold\": 12.500000186264515, \"match_probability\": 0.9998273963279586, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 392.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1639.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.1930083702609552, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8069916297390448, \"precision\": 1.0, \"recall\": 0.1930083702609552, \"specificity\": 1.0, \"npv\": 0.4112787356321839, \"accuracy\": 0.4839420654911839, \"f1\": 0.32356582748658685, \"f2\": 0.23015500234852043, \"f0_5\": 0.5445957210336204, \"p4\": 0.4161220886855334, \"phi\": 0.2817449883979377}, {\"truth_threshold\": 12.600000187754631, \"match_probability\": 0.9998389532181915, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 384.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1647.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.18906942392909898, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.810930576070901, \"precision\": 1.0, \"recall\": 0.18906942392909898, \"specificity\": 1.0, \"npv\": 0.4101002865329513, \"accuracy\": 0.48142317380352645, \"f1\": 0.31801242236024846, \"f2\": 0.22566995768688294, \"f0_5\": 0.5382674516400336, \"p4\": 0.41120574947340216, \"phi\": 0.27845542718349653}, {\"truth_threshold\": 12.700000189244747, \"match_probability\": 0.9998497364189812, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 383.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1648.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.18857705563761692, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.811422944362383, \"precision\": 1.0, \"recall\": 0.18857705563761692, \"specificity\": 1.0, \"npv\": 0.40995345506623704, \"accuracy\": 0.4811083123425693, \"f1\": 0.31731565865782935, \"f2\": 0.22510873398377806, \"f0_5\": 0.5374684254841425, \"p4\": 0.4105860003688893, \"phi\": 0.2780428303424835}, {\"truth_threshold\": 12.800000190734863, \"match_probability\": 0.9998597977108138, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 379.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1652.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.18660758247168882, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8133924175283112, \"precision\": 1.0, \"recall\": 0.18660758247168882, \"specificity\": 1.0, \"npv\": 0.40936717912048626, \"accuracy\": 0.47984886649874053, \"f1\": 0.3145228215767635, \"f2\": 0.22286251911090205, \"f0_5\": 0.5342542994079503, \"p4\": 0.40809522958677485, \"phi\": 0.27638925384126056}, {\"truth_threshold\": 12.90000019222498, \"match_probability\": 0.9998691854106266, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 373.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1658.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.18365337272279667, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8163466272772033, \"precision\": 1.0, \"recall\": 0.18365337272279667, \"specificity\": 1.0, \"npv\": 0.4084909026043525, \"accuracy\": 0.47795969773299746, \"f1\": 0.3103161397670549, \"f2\": 0.21948923149346827, \"f0_5\": 0.529378370706784, \"p4\": 0.40432322000651333, \"phi\": 0.27389912739888156}, {\"truth_threshold\": 13.000000193715096, \"match_probability\": 0.9998779446032292, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 370.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1661.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.18217626784835056, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8178237321516494, \"precision\": 1.0, \"recall\": 0.18217626784835056, \"specificity\": 1.0, \"npv\": 0.4080541696364932, \"accuracy\": 0.4770151133501259, \"f1\": 0.3082049146189088, \"f2\": 0.21780080056510479, \"f0_5\": 0.5269154087154657, \"p4\": 0.40242079403659214, \"phi\": 0.2726495657512296}, {\"truth_threshold\": 13.100000195205212, \"match_probability\": 0.9998861173572945, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 369.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1662.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.18168389955686853, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8183161004431314, \"precision\": 1.0, \"recall\": 0.18168389955686853, \"specificity\": 1.0, \"npv\": 0.4079087994299964, \"accuracy\": 0.47670025188916876, \"f1\": 0.3075, \"f2\": 0.21723772518544684, \"f0_5\": 0.5260906757912746, \"p4\": 0.40178418596158894, \"phi\": 0.27223236645190135}, {\"truth_threshold\": 13.200000196695328, \"match_probability\": 0.9998937429269453, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 354.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1677.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.17429837518463812, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8257016248153619, \"precision\": 1.0, \"recall\": 0.17429837518463812, \"specificity\": 1.0, \"npv\": 0.4057406094968108, \"accuracy\": 0.47197732997481107, \"f1\": 0.2968553459119497, \"f2\": 0.20877565463552725, \"f0_5\": 0.5134899912967799, \"p4\": 0.3920831758418028, \"phi\": 0.2659321886904984}, {\"truth_threshold\": 13.300000198185444, \"match_probability\": 0.9999008579398913, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 346.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1685.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.17035942885278188, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8296405711472181, \"precision\": 1.0, \"recall\": 0.17035942885278188, \"specificity\": 1.0, \"npv\": 0.4045936395759717, \"accuracy\": 0.46945843828715367, \"f1\": 0.29112326461926796, \"f2\": 0.20425029515938606, \"f0_5\": 0.5065885797950219, \"p4\": 0.3867889182734259, \"phi\": 0.262538266459636}, {\"truth_threshold\": 13.40000019967556, \"match_probability\": 0.999907496573012, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 335.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1696.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.16494337764647957, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8350566223535204, \"precision\": 1.0, \"recall\": 0.16494337764647957, \"specificity\": 1.0, \"npv\": 0.40302710313269974, \"accuracy\": 0.4659949622166247, \"f1\": 0.28317836010143704, \"f2\": 0.19801394963943728, \"f0_5\": 0.4968851972708395, \"p4\": 0.3793659349517108, \"phi\": 0.2578306647274206}, {\"truth_threshold\": 13.500000201165676, \"match_probability\": 0.9999136907162209, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 325.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1706.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.16001969473165928, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8399803052683407, \"precision\": 1.0, \"recall\": 0.16001969473165928, \"specificity\": 1.0, \"npv\": 0.4016134689582603, \"accuracy\": 0.4628463476070529, \"f1\": 0.2758913412563667, \"f2\": 0.1923304533080838, \"f0_5\": 0.48784148904232966, \"p4\": 0.37246767025663613, \"phi\": 0.2535075239570288}, {\"truth_threshold\": 13.600000202655792, \"match_probability\": 0.9999194701253888, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 324.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1707.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.15952732644017725, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8404726735598228, \"precision\": 1.0, \"recall\": 0.15952732644017725, \"specificity\": 1.0, \"npv\": 0.4014726507713885, \"accuracy\": 0.46253148614609574, \"f1\": 0.2751592356687898, \"f2\": 0.19176136363636365, \"f0_5\": 0.48692515779981965, \"p4\": 0.3717697318528885, \"phi\": 0.25307283263205194}, {\"truth_threshold\": 13.700000204145908, \"match_probability\": 0.9999248625650565, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 322.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1709.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.1585425898572132, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8414574101427869, \"precision\": 1.0, \"recall\": 0.1585425898572132, \"specificity\": 1.0, \"npv\": 0.40119131044148565, \"accuracy\": 0.46190176322418136, \"f1\": 0.27369315767105823, \"f2\": 0.1906227800142079, \"f0_5\": 0.4850858692377222, \"p4\": 0.37036934721259873, \"phi\": 0.2522021201052885}, {\"truth_threshold\": 13.800000205636024, \"match_probability\": 0.999929893941616, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 315.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1716.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.155096011816839, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.844903988183161, \"precision\": 1.0, \"recall\": 0.155096011816839, \"specificity\": 1.0, \"npv\": 0.400209716882209, \"accuracy\": 0.4596977329974811, \"f1\": 0.26854219948849106, \"f2\": 0.18663348738002133, \"f0_5\": 0.4785779398359161, \"p4\": 0.36541997841978086, \"phi\": 0.2491403840784887}, {\"truth_threshold\": 13.90000020712614, \"match_probability\": 0.9999345884275949, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 313.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1718.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.15411127523387494, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8458887247661251, \"precision\": 1.0, \"recall\": 0.15411127523387494, \"specificity\": 1.0, \"npv\": 0.39993014320642684, \"accuracy\": 0.45906801007556675, \"f1\": 0.26706484641638223, \"f2\": 0.18549247362806684, \"f0_5\": 0.4766981419433445, \"p4\": 0.3639919317161557, \"phi\": 0.24826144359124447}, {\"truth_threshold\": 14.100000210106373, \"match_probability\": 0.9999430554367367, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 307.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1724.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.15115706548498276, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8488429345150172, \"precision\": 1.0, \"recall\": 0.15115706548498276, \"specificity\": 1.0, \"npv\": 0.399093760892297, \"accuracy\": 0.45717884130982367, \"f1\": 0.262617621899059, \"f2\": 0.18206618431977226, \"f0_5\": 0.4710033752684873, \"p4\": 0.35966979322171594, \"phi\": 0.24561319539032303}, {\"truth_threshold\": 14.200000211596489, \"match_probability\": 0.9999468686412301, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 306.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1725.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.15066469719350073, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8493353028064993, \"precision\": 1.0, \"recall\": 0.15066469719350073, \"specificity\": 1.0, \"npv\": 0.3989547038327526, \"accuracy\": 0.4568639798488665, \"f1\": 0.26187419768934533, \"f2\": 0.18149466192170818, \"f0_5\": 0.4700460829493088, \"p4\": 0.35894382186502344, \"phi\": 0.24517012388723966}, {\"truth_threshold\": 14.300000213086605, \"match_probability\": 0.9999504265130488, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 303.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1728.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14918759231905465, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8508124076809453, \"precision\": 1.0, \"recall\": 0.14918759231905465, \"specificity\": 1.0, \"npv\": 0.39853811347024015, \"accuracy\": 0.45591939546599497, \"f1\": 0.2596401028277635, \"f2\": 0.17977928088287648, \"f0_5\": 0.4671600370027752, \"p4\": 0.3567561397717773, \"phi\": 0.24383794125607963}, {\"truth_threshold\": 14.400000214576721, \"match_probability\": 0.9999537461476637, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 300.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1731.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14771048744460857, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8522895125553914, \"precision\": 1.0, \"recall\": 0.14771048744460857, \"specificity\": 1.0, \"npv\": 0.3981223922114047, \"accuracy\": 0.45497481108312343, \"f1\": 0.2574002574002574, \"f2\": 0.17806267806267806, \"f0_5\": 0.46425255338904364, \"p4\": 0.3545536533347784, \"phi\": 0.24250124250436372}, {\"truth_threshold\": 14.500000216066837, \"match_probability\": 0.9999568434961527, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 298.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1733.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14672575086164452, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8532742491383555, \"precision\": 1.0, \"recall\": 0.14672575086164452, \"specificity\": 1.0, \"npv\": 0.39784572619874914, \"accuracy\": 0.45434508816120905, \"f1\": 0.2559038213825676, \"f2\": 0.17691759677036334, \"f0_5\": 0.4623022029165374, \"p4\": 0.35307700645624607, \"phi\": 0.24160755969879688}, {\"truth_threshold\": 14.600000217556953, \"match_probability\": 0.9999597334417798, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 295.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1736.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14524864598719842, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8547513540128016, \"precision\": 1.0, \"recall\": 0.14524864598719842, \"specificity\": 1.0, \"npv\": 0.3974314474140923, \"accuracy\": 0.4534005037783375, \"f1\": 0.25365434221840066, \"f2\": 0.17519895474521915, \"f0_5\": 0.4593584553098723, \"p4\": 0.3508494030029032, \"phi\": 0.2402631465906275}, {\"truth_threshold\": 14.70000021904707, \"match_probability\": 0.9999624298714548, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 294.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1737.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.1447562776957164, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8552437223042836, \"precision\": 1.0, \"recall\": 0.1447562776957164, \"specificity\": 1.0, \"npv\": 0.397293546148508, \"accuracy\": 0.45308564231738035, \"f1\": 0.25290322580645164, \"f2\": 0.17462580185317178, \"f0_5\": 0.4583723105706268, \"p4\": 0.35010346944394827, \"phi\": 0.23981395892022075}, {\"truth_threshold\": 14.800000220537186, \"match_probability\": 0.9999649457424121, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 292.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1739.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14377154111275234, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8562284588872476, \"precision\": 1.0, \"recall\": 0.14377154111275234, \"specificity\": 1.0, \"npv\": 0.39701803051317613, \"accuracy\": 0.452455919395466, \"f1\": 0.2513990529487731, \"f2\": 0.17347908745247148, \"f0_5\": 0.4563926226945921, \"p4\": 0.3486064578319283, \"phi\": 0.2389139889090404}, {\"truth_threshold\": 15.000000223517418, \"match_probability\": 0.9999694833578969, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 290.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1741.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14278680452978829, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8572131954702117, \"precision\": 1.0, \"recall\": 0.14278680452978829, \"specificity\": 1.0, \"npv\": 0.39674289674289676, \"accuracy\": 0.45182619647355166, \"f1\": 0.24989228780697975, \"f2\": 0.17233182790587118, \"f0_5\": 0.4544030084612974, \"p4\": 0.3471025353224262, \"phi\": 0.23801187038845348}, {\"truth_threshold\": 15.100000225007534, \"match_probability\": 0.9999715269079685, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 289.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1742.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14229443623830626, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8577055637616937, \"precision\": 1.0, \"recall\": 0.14229443623830626, \"specificity\": 1.0, \"npv\": 0.39660547280914443, \"accuracy\": 0.45151133501259444, \"f1\": 0.24913793103448276, \"f2\": 0.17175799358136218, \"f0_5\": 0.45340445560087855, \"p4\": 0.346347962973042, \"phi\": 0.2375599969742467}, {\"truth_threshold\": 15.20000022649765, \"match_probability\": 0.9999734336151354, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 288.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1743.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14180206794682423, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8581979320531757, \"precision\": 1.0, \"recall\": 0.14180206794682423, \"specificity\": 1.0, \"npv\": 0.39646814404432135, \"accuracy\": 0.4511964735516373, \"f1\": 0.24838292367399742, \"f2\": 0.17118402282453637, \"f0_5\": 0.4524033930254477, \"p4\": 0.34559163939876736, \"phi\": 0.2371075762191587}, {\"truth_threshold\": 15.300000227987766, \"match_probability\": 0.9999752126423825, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 285.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1746.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.14032496307237813, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8596750369276218, \"precision\": 1.0, \"recall\": 0.14032496307237813, \"specificity\": 1.0, \"npv\": 0.3960567277758561, \"accuracy\": 0.45025188916876574, \"f1\": 0.24611398963730569, \"f2\": 0.16946129147342134, \"f0_5\": 0.44938505203405865, \"p4\": 0.34331208180785255, \"phi\": 0.2357469951021941}, {\"truth_threshold\": 15.400000229477882, \"match_probability\": 0.9999768725392036, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 281.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1750.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.13835548990645002, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8616445100935499, \"precision\": 1.0, \"recall\": 0.13835548990645002, \"specificity\": 1.0, \"npv\": 0.3955094991364421, \"accuracy\": 0.44899244332493704, \"f1\": 0.2430795847750865, \"f2\": 0.16716240333135038, \"f0_5\": 0.44532488114104596, \"p4\": 0.34024766819652713, \"phi\": 0.23392501045351508}, {\"truth_threshold\": 15.500000230967999, \"match_probability\": 0.9999784212826682, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 277.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1754.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.13638601674052192, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8636139832594781, \"precision\": 1.0, \"recall\": 0.13638601674052192, \"specificity\": 1.0, \"npv\": 0.39496378061400483, \"accuracy\": 0.4477329974811083, \"f1\": 0.24003466204506066, \"f2\": 0.16486132603261516, \"f0_5\": 0.4412233195285123, \"p4\": 0.3371541980967607, \"phi\": 0.23209381033263574}, {\"truth_threshold\": 15.600000232458115, \"match_probability\": 0.9999798663157408, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 273.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1758.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.1344165435745938, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8655834564254062, \"precision\": 1.0, \"recall\": 0.1344165435745938, \"specificity\": 1.0, \"npv\": 0.39441956596624184, \"accuracy\": 0.4464735516372796, \"f1\": 0.23697916666666666, \"f2\": 0.1625580564487317, \"f0_5\": 0.43707973102785785, \"p4\": 0.33403113499095954, \"phi\": 0.23025315367085358}, {\"truth_threshold\": 15.70000023394823, \"match_probability\": 0.9999812145830361, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 271.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1760.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.13343180699162974, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8665681930083703, \"precision\": 1.0, \"recall\": 0.13343180699162974, \"specificity\": 1.0, \"npv\": 0.39414802065404475, \"accuracy\": 0.44584382871536526, \"f1\": 0.23544743701129453, \"f2\": 0.16140559857057774, \"f0_5\": 0.434991974317817, \"p4\": 0.33245833489583054, \"phi\": 0.22932920140715485}, {\"truth_threshold\": 15.800000235438347, \"match_probability\": 0.9999824725641815, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 268.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1763.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.13195470211718366, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8680452978828164, \"precision\": 1.0, \"recall\": 0.13195470211718366, \"specificity\": 1.0, \"npv\": 0.3937414030261348, \"accuracy\": 0.4448992443324937, \"f1\": 0.23314484558503698, \"f2\": 0.15967588179218303, \"f0_5\": 0.43184015468901066, \"p4\": 0.3300848527615133, \"phi\": 0.22793865303523134}, {\"truth_threshold\": 15.900000236928463, \"match_probability\": 0.9999836463049459, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 266.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1765.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.1309699655342196, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8690300344657804, \"precision\": 1.0, \"recall\": 0.1309699655342196, \"specificity\": 1.0, \"npv\": 0.39347079037800686, \"accuracy\": 0.44426952141057935, \"f1\": 0.23160644318676535, \"f2\": 0.15852205005959474, \"f0_5\": 0.4297253634894992, \"p4\": 0.32849290653450874, \"phi\": 0.2270084929127756}, {\"truth_threshold\": 16.00000023841858, \"match_probability\": 0.9999847414462861, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 261.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1770.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.12850812407680945, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8714918759231906, \"precision\": 1.0, \"recall\": 0.12850812407680945, \"specificity\": 1.0, \"npv\": 0.3927958833619211, \"accuracy\": 0.44269521410579343, \"f1\": 0.22774869109947643, \"f2\": 0.1556350626118068, \"f0_5\": 0.424390243902439, \"p4\": 0.3244788273615635, \"phi\": 0.22467189881232083}, {\"truth_threshold\": 16.20000024139881, \"match_probability\": 0.9999867166312594, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 255.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1776.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.1255539143279173, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8744460856720827, \"precision\": 1.0, \"recall\": 0.1255539143279173, \"specificity\": 1.0, \"npv\": 0.3919890448476549, \"accuracy\": 0.44080604534005036, \"f1\": 0.2230971128608924, \"f2\": 0.15216612960973863, \"f0_5\": 0.41789577187807275, \"p4\": 0.3195960922748543, \"phi\": 0.2218462507104517}, {\"truth_threshold\": 16.300000242888927, \"match_probability\": 0.9999876061677141, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 254.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1777.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.12506154603643527, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8749384539635647, \"precision\": 1.0, \"recall\": 0.12506154603643527, \"specificity\": 1.0, \"npv\": 0.391854893908282, \"accuracy\": 0.4404911838790932, \"f1\": 0.2223194748358862, \"f2\": 0.1515874910479828, \"f0_5\": 0.4168034131933049, \"p4\": 0.31877518775297364, \"phi\": 0.22137294065470842}, {\"truth_threshold\": 16.400000244379044, \"match_probability\": 0.9999884361359999, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 249.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1782.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.12259970457902511, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8774002954209749, \"precision\": 1.0, \"recall\": 0.12259970457902511, \"specificity\": 1.0, \"npv\": 0.3911855141783396, \"accuracy\": 0.4389168765743073, \"f1\": 0.21842105263157896, \"f2\": 0.14869222500895737, \"f0_5\": 0.41129831516352827, \"p4\": 0.3146395889340626, \"phi\": 0.21899595538241903}, {\"truth_threshold\": 16.50000024586916, \"match_probability\": 0.9999892105250341, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 242.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1789.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.11915312653865091, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.880846873461349, \"precision\": 1.0, \"recall\": 0.11915312653865091, \"specificity\": 1.0, \"npv\": 0.39025221540558963, \"accuracy\": 0.43671284634760704, \"f1\": 0.21293444786625604, \"f2\": 0.14463303848912265, \"f0_5\": 0.40346782260753583, \"p4\": 0.30876091977525716, \"phi\": 0.21563805694777322}, {\"truth_threshold\": 16.600000247359276, \"match_probability\": 0.9999899330566321, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 236.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1795.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.11619891678975874, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8838010832102413, \"precision\": 1.0, \"recall\": 0.11619891678975874, \"specificity\": 1.0, \"npv\": 0.38945578231292516, \"accuracy\": 0.43482367758186397, \"f1\": 0.20820467578297308, \"f2\": 0.14114832535885166, \"f0_5\": 0.39663865546218485, \"p4\": 0.30363716807944324, \"phi\": 0.21273067489732173}, {\"truth_threshold\": 16.700000248849392, \"match_probability\": 0.9999906072033913, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 232.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1799.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.11422944362383063, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8857705563761694, \"precision\": 1.0, \"recall\": 0.11422944362383063, \"specificity\": 1.0, \"npv\": 0.3889266304347826, \"accuracy\": 0.43356423173803527, \"f1\": 0.20503756076005303, \"f2\": 0.13882240306366683, \"f0_5\": 0.39202433254477864, \"p4\": 0.3001765927459395, \"phi\": 0.21077683128146796}, {\"truth_threshold\": 16.800000250339508, \"match_probability\": 0.9999912362053778, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 221.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1810.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.10881339241752831, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8911866075824717, \"precision\": 1.0, \"recall\": 0.10881339241752831, \"specificity\": 1.0, \"npv\": 0.38747884940778343, \"accuracy\": 0.4301007556675063, \"f1\": 0.19626998223801065, \"f2\": 0.1324146195326543, \"f0_5\": 0.379073756432247, \"p4\": 0.290469029799348, \"phi\": 0.20533603700788008}, {\"truth_threshold\": 16.900000251829624, \"match_probability\": 0.999991823085696, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 215.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1816.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.10585918266863614, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.8941408173313639, \"precision\": 1.0, \"recall\": 0.10585918266863614, \"specificity\": 1.0, \"npv\": 0.386693684566025, \"accuracy\": 0.4282115869017632, \"f1\": 0.19145146927871773, \"f2\": 0.12891233960906584, \"f0_5\": 0.37184365271532344, \"p4\": 0.2850517881231799, \"phi\": 0.20232418884375344}, {\"truth_threshold\": 17.00000025331974, \"match_probability\": 0.9999923706650156, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 209.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1822.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.10290497291974397, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.897095027080256, \"precision\": 1.0, \"recall\": 0.10290497291974397, \"specificity\": 1.0, \"npv\": 0.38591169531513314, \"accuracy\": 0.4263224181360202, \"f1\": 0.18660714285714286, \"f2\": 0.12540501620064803, \"f0_5\": 0.36449250087199164, \"p4\": 0.27954493418624105, \"phi\": 0.19927928280635765}, {\"truth_threshold\": 17.100000254809856, \"match_probability\": 0.9999928815751264, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 203.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1828.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0999507631708518, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9000492368291482, \"precision\": 1.0, \"recall\": 0.0999507631708518, \"specificity\": 1.0, \"npv\": 0.3851328624285234, \"accuracy\": 0.4244332493702771, \"f1\": 0.18173679498657117, \"f2\": 0.12189263840518794, \"f0_5\": 0.3570172353148083, \"p4\": 0.2739457211855208, \"phi\": 0.19619970316467247}, {\"truth_threshold\": 17.200000256299973, \"match_probability\": 0.999993358271586, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 193.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1838.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.09502708025603152, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9049729197439685, \"precision\": 1.0, \"recall\": 0.09502708025603152, \"specificity\": 1.0, \"npv\": 0.38384177003017095, \"accuracy\": 0.42128463476070527, \"f1\": 0.1735611510791367, \"f2\": 0.11602741373091259, \"f0_5\": 0.3442739921512665, \"p4\": 0.26440064321915874, \"phi\": 0.19098524206407744}, {\"truth_threshold\": 17.30000025779009, \"match_probability\": 0.999993803045519, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 178.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1853.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.08764155588380108, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9123584441161989, \"precision\": 1.0, \"recall\": 0.08764155588380108, \"specificity\": 1.0, \"npv\": 0.3819212808539026, \"accuracy\": 0.41656171284634763, \"f1\": 0.16115889542779538, \"f2\": 0.10720308359431463, \"f0_5\": 0.32446226759022967, \"p4\": 0.24955621152340898, \"phi\": 0.18295402504227723}, {\"truth_threshold\": 17.400000259280205, \"match_probability\": 0.9999942180346287, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 160.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1871.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.07877892663712457, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9212210733628754, \"precision\": 1.0, \"recall\": 0.07877892663712457, \"specificity\": 1.0, \"npv\": 0.3796419098143236, \"accuracy\": 0.4108942065491184, \"f1\": 0.1460520310360566, \"f2\": 0.09657170449058426, \"f0_5\": 0.2995132909022838, \"p4\": 0.23084277458650382, \"phi\": 0.17293866589528342}, {\"truth_threshold\": 17.50000026077032, \"match_probability\": 0.9999946052334694, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 156.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1875.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.07680945347119646, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9231905465288035, \"precision\": 1.0, \"recall\": 0.07680945347119646, \"specificity\": 1.0, \"npv\": 0.3791390728476821, \"accuracy\": 0.40963476070528965, \"f1\": 0.14266117969821673, \"f2\": 0.09420289855072464, \"f0_5\": 0.2937853107344633, \"p4\": 0.2265418036022582, \"phi\": 0.1706501244506039}, {\"truth_threshold\": 17.600000262260437, \"match_probability\": 0.999994966503032, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 153.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1878.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.07533234859675036, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9246676514032496, \"precision\": 1.0, \"recall\": 0.07533234859675036, \"specificity\": 1.0, \"npv\": 0.3787628183923255, \"accuracy\": 0.4086901763224181, \"f1\": 0.1401098901098901, \"f2\": 0.09242479159115621, \"f0_5\": 0.28944381384790013, \"p4\": 0.22328051634216844, \"phi\": 0.16891741375778377}, {\"truth_threshold\": 17.700000263750553, \"match_probability\": 0.9999953035796879, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 149.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1882.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.07336287543082226, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9266371245691778, \"precision\": 1.0, \"recall\": 0.07336287543082226, \"specificity\": 1.0, \"npv\": 0.3782623059134457, \"accuracy\": 0.4074307304785894, \"f1\": 0.13669724770642203, \"f2\": 0.09005197630847335, \"f0_5\": 0.2835934526075371, \"p4\": 0.218883751244496, \"phi\": 0.16658454438783837}, {\"truth_threshold\": 17.80000026524067, \"match_probability\": 0.9999956180835331, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 144.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1887.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.07090103397341212, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9290989660265879, \"precision\": 1.0, \"recall\": 0.07090103397341212, \"specificity\": 1.0, \"npv\": 0.3776385224274406, \"accuracy\": 0.40585642317380355, \"f1\": 0.13241379310344828, \"f2\": 0.08708272859216255, \"f0_5\": 0.2761795166858458, \"p4\": 0.21330828694544357, \"phi\": 0.16363056471300563}, {\"truth_threshold\": 17.900000266730785, \"match_probability\": 0.9999959115261747, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 134.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1897.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.06597735105859183, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9340226489414082, \"precision\": 1.0, \"recall\": 0.06597735105859183, \"specificity\": 1.0, \"npv\": 0.37639710716633795, \"accuracy\": 0.4027078085642317, \"f1\": 0.12378752886836028, \"f2\": 0.08113344635504965, \"f0_5\": 0.2610050642773666, \"p4\": 0.2018827078967218, \"phi\": 0.15758706824150226}, {\"truth_threshold\": 18.0000002682209, \"match_probability\": 0.9999961853179954, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 132.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1899.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.06499261447562776, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9350073855243722, \"precision\": 1.0, \"recall\": 0.06499261447562776, \"specificity\": 1.0, \"npv\": 0.37614980289093297, \"accuracy\": 0.4020780856423174, \"f1\": 0.12205270457697642, \"f2\": 0.07994186046511628, \"f0_5\": 0.25791324736225085, \"p4\": 0.1995522156019492, \"phi\": 0.15635523376073404}, {\"truth_threshold\": 18.100000269711018, \"match_probability\": 0.999996440774932, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 131.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1900.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.06450024618414574, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9354997538158543, \"precision\": 1.0, \"recall\": 0.06450024618414574, \"specificity\": 1.0, \"npv\": 0.3760262725779967, \"accuracy\": 0.4017632241813602, \"f1\": 0.1211840888066605, \"f2\": 0.07934585099939431, \"f0_5\": 0.2563600782778865, \"p4\": 0.19838115580714064, \"phi\": 0.15573627436466905}, {\"truth_threshold\": 18.200000271201134, \"match_probability\": 0.9999966791247992, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 128.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1903.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.06302314130969966, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9369768586903003, \"precision\": 1.0, \"recall\": 0.06302314130969966, \"specificity\": 1.0, \"npv\": 0.37565616797900264, \"accuracy\": 0.40081863979848864, \"f1\": 0.11857341361741547, \"f2\": 0.07755695588948133, \"f0_5\": 0.25167125442390875, \"p4\": 0.19484445248024185, \"phi\": 0.15386692873519298}, {\"truth_threshold\": 18.30000027269125, \"match_probability\": 0.999996901513191, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 112.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1919.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.055145248645987195, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9448547513540128, \"precision\": 1.0, \"recall\": 0.055145248645987195, \"specificity\": 1.0, \"npv\": 0.37369451697127937, \"accuracy\": 0.3957808564231738, \"f1\": 0.10452636490900606, \"f2\": 0.06799417192812045, \"f0_5\": 0.22589753933037515, \"p4\": 0.17536236689966953, \"phi\": 0.14355304613982695}, {\"truth_threshold\": 18.400000274181366, \"match_probability\": 0.9999971090089864, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 91.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1940.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0448055145248646, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9551944854751354, \"precision\": 1.0, \"recall\": 0.0448055145248646, \"specificity\": 1.0, \"npv\": 0.3711507293354943, \"accuracy\": 0.3891687657430731, \"f1\": 0.08576814326107446, \"f2\": 0.05538648813146683, \"f0_5\": 0.18997912317327767, \"p4\": 0.1480768274225295, \"phi\": 0.1289558040343884}, {\"truth_threshold\": 18.500000275671482, \"match_probability\": 0.9999973026094866, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 77.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1954.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0379123584441162, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9620876415558838, \"precision\": 1.0, \"recall\": 0.0379123584441162, \"specificity\": 1.0, \"npv\": 0.36947402387867057, \"accuracy\": 0.38476070528967254, \"f1\": 0.0730550284629981, \"f2\": 0.04694549445189611, \"f0_5\": 0.16460025651988028, \"p4\": 0.12868698840481557, \"phi\": 0.11835384078718403}, {\"truth_threshold\": 18.600000277161598, \"match_probability\": 0.999997483245208, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 75.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1956.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.03692762186115214, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9630723781388478, \"precision\": 1.0, \"recall\": 0.03692762186115214, \"specificity\": 1.0, \"npv\": 0.3692357304095453, \"accuracy\": 0.38413098236775817, \"f1\": 0.07122507122507123, \"f2\": 0.045737285034760336, \"f0_5\": 0.16087516087516088, \"p4\": 0.12583247247071236, \"phi\": 0.11676899173235163}, {\"truth_threshold\": 18.700000278651714, \"match_probability\": 0.9999976517843541, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 73.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1958.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.035942885278188084, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9640571147218119, \"precision\": 1.0, \"recall\": 0.035942885278188084, \"specificity\": 1.0, \"npv\": 0.36899774411859493, \"accuracy\": 0.38350125944584385, \"f1\": 0.06939163498098859, \"f2\": 0.04452848603147493, \"f0_5\": 0.1571244080929832, \"p4\": 0.12295600444839334, \"phi\": 0.11516441978651591}, {\"truth_threshold\": 18.900000281631947, \"match_probability\": 0.9999979557589296, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 71.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1960.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.034958148695224026, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9650418513047759, \"precision\": 1.0, \"recall\": 0.034958148695224026, \"specificity\": 1.0, \"npv\": 0.3687600644122383, \"accuracy\": 0.38287153652392947, \"f1\": 0.0675547098001903, \"f2\": 0.043319097010372176, \"f0_5\": 0.15334773218142547, \"p4\": 0.1200573002429353, \"phi\": 0.11353928467532028}, {\"truth_threshold\": 19.000000283122063, \"match_probability\": 0.9999980926553794, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 69.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1962.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.033973412112259974, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.96602658788774, \"precision\": 1.0, \"recall\": 0.033973412112259974, \"specificity\": 1.0, \"npv\": 0.3685226906984229, \"accuracy\": 0.3822418136020151, \"f1\": 0.06571428571428571, \"f2\": 0.04210911753936287, \"f0_5\": 0.14954486345903772, \"p4\": 0.11713607088211223, \"phi\": 0.11189268628385163}, {\"truth_threshold\": 19.10000028461218, \"match_probability\": 0.9999982203843173, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 66.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1965.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.03249630723781388, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9675036927621861, \"precision\": 1.0, \"recall\": 0.03249630723781388, \"specificity\": 1.0, \"npv\": 0.36816720257234725, \"accuracy\": 0.38129722921914355, \"f1\": 0.06294706723891273, \"f2\": 0.040293040293040296, \"f0_5\": 0.1437908496732026, \"p4\": 0.11271134775969976, \"phi\": 0.10938041200177231}, {\"truth_threshold\": 19.200000286102295, \"match_probability\": 0.9999983395596597, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 65.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1966.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.032003938946331856, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9679960610536681, \"precision\": 1.0, \"recall\": 0.032003938946331856, \"specificity\": 1.0, \"npv\": 0.3680488588878174, \"accuracy\": 0.3809823677581864, \"f1\": 0.06202290076335878, \"f2\": 0.03968738551715716, \"f0_5\": 0.14185945002182454, \"p4\": 0.1112248557850375, \"phi\": 0.10853116238718177}, {\"truth_threshold\": 19.400000289082527, \"match_probability\": 0.9999985545024187, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 63.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1968.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0310192023633678, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9689807976366323, \"precision\": 1.0, \"recall\": 0.0310192023633678, \"specificity\": 1.0, \"npv\": 0.3678123996145198, \"accuracy\": 0.380352644836272, \"f1\": 0.06017191977077364, \"f2\": 0.03847563209967021, \"f0_5\": 0.13797634691195795, \"p4\": 0.10823426675729327, \"phi\": 0.1068140779831886}, {\"truth_threshold\": 19.500000290572643, \"match_probability\": 0.9999986513029383, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 62.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1969.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.03052683407188577, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9694731659281143, \"precision\": 1.0, \"recall\": 0.03052683407188577, \"specificity\": 1.0, \"npv\": 0.36769428387925496, \"accuracy\": 0.38003778337531485, \"f1\": 0.05924510272336359, \"f2\": 0.03786953334962131, \"f0_5\": 0.13602457218078104, \"p4\": 0.10673009231574156, \"phi\": 0.1059459408998895}, {\"truth_threshold\": 19.60000029206276, \"match_probability\": 0.9999987416210334, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 61.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1970.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.03003446578040374, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9699655342195963, \"precision\": 1.0, \"recall\": 0.03003446578040374, \"specificity\": 1.0, \"npv\": 0.36757624398073835, \"accuracy\": 0.3797229219143577, \"f1\": 0.058317399617590825, \"f2\": 0.03726328649969456, \"f0_5\": 0.13406593406593406, \"p4\": 0.10521994576679723, \"phi\": 0.10507119548919591}, {\"truth_threshold\": 19.700000293552876, \"match_probability\": 0.9999988258908107, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 60.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1971.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.029542097488921712, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9704579025110783, \"precision\": 1.0, \"recall\": 0.029542097488921712, \"specificity\": 1.0, \"npv\": 0.36745827984595636, \"accuracy\": 0.37940806045340053, \"f1\": 0.05738880918220947, \"f2\": 0.036656891495601175, \"f0_5\": 0.13210039630118892, \"p4\": 0.10370378756573473, \"phi\": 0.10418967475868576}, {\"truth_threshold\": 19.80000029504299, \"match_probability\": 0.9999989045173057, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 53.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1978.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.026095519448547513, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9739044805514525, \"precision\": 1.0, \"recall\": 0.026095519448547513, \"specificity\": 1.0, \"npv\": 0.36663464617355107, \"accuracy\": 0.37720403022670024, \"f1\": 0.0508637236084453, \"f2\": 0.03240797358444417, \"f0_5\": 0.1181453410610789, \"p4\": 0.0929189583154697, \"phi\": 0.09781370834261033}, {\"truth_threshold\": 19.900000296533108, \"match_probability\": 0.9999989778784306, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 46.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1985.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.022648941408173313, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9773510585918267, \"precision\": 1.0, \"recall\": 0.022648941408173313, \"specificity\": 1.0, \"npv\": 0.365814696485623, \"accuracy\": 0.375, \"f1\": 0.04429465575349061, \"f2\": 0.028151774785801713, \"f0_5\": 0.1038374717832957, \"p4\": 0.08182335429924091, \"phi\": 0.0910237091474061}, {\"truth_threshold\": 21.200000315904617, \"match_probability\": 0.9999995848894065, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 45.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1986.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.022156573116691284, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9778434268833087, \"precision\": 1.0, \"recall\": 0.022156573116691284, \"specificity\": 1.0, \"npv\": 0.3656978601085915, \"accuracy\": 0.37468513853904284, \"f1\": 0.04335260115606936, \"f2\": 0.027543150936467132, \"f0_5\": 0.10176390773405698, \"p4\": 0.08021203063702596, \"phi\": 0.09001450647597614}, {\"truth_threshold\": 21.500000320374966, \"match_probability\": 0.9999996628254004, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 44.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1987.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.021664204825209258, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9783357951747907, \"precision\": 1.0, \"recall\": 0.021664204825209258, \"specificity\": 1.0, \"npv\": 0.365581098339719, \"accuracy\": 0.3743702770780856, \"f1\": 0.042409638554216866, \"f2\": 0.02693437806072478, \"f0_5\": 0.09968282736746716, \"p4\": 0.07859401270561604, \"phi\": 0.08899451553133284}, {\"truth_threshold\": 21.600000321865082, \"match_probability\": 0.999999685404968, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 43.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1988.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.02117183653372723, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9788281634662728, \"precision\": 1.0, \"recall\": 0.02117183653372723, \"specificity\": 1.0, \"npv\": 0.3654644111075646, \"accuracy\": 0.37405541561712846, \"f1\": 0.041465766634522665, \"f2\": 0.026325456103832495, \"f0_5\": 0.09759418974126191, \"p4\": 0.07696925450739521, \"phi\": 0.08796336038865413}, {\"truth_threshold\": 21.700000323355198, \"match_probability\": 0.9999997064724503, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 42.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1989.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0206794682422452, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9793205317577548, \"precision\": 1.0, \"recall\": 0.0206794682422452, \"specificity\": 1.0, \"npv\": 0.3653477983407786, \"accuracy\": 0.3737405541561713, \"f1\": 0.04052098408104197, \"f2\": 0.025716385011021307, \"f0_5\": 0.09549795361527967, \"p4\": 0.0753377096255321, \"phi\": 0.08692064307839845}, {\"truth_threshold\": 21.90000032633543, \"match_probability\": 0.9999997444694171, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 41.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1990.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.02018709995076317, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9798129000492368, \"precision\": 1.0, \"recall\": 0.02018709995076317, \"specificity\": 1.0, \"npv\": 0.3652312599681021, \"accuracy\": 0.3734256926952141, \"f1\": 0.03957528957528957, \"f2\": 0.02510716472749541, \"f0_5\": 0.09339407744874716, \"p4\": 0.07369933121919056, \"phi\": 0.08586594173547067}, {\"truth_threshold\": 22.000000327825546, \"match_probability\": 0.9999997615815319, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 35.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1996.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.017232890201871, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.982767109798129, \"precision\": 1.0, \"recall\": 0.017232890201871, \"specificity\": 1.0, \"npv\": 0.36453358802929003, \"accuracy\": 0.371536523929471, \"f1\": 0.03388189738625363, \"f2\": 0.02144870694938105, \"f0_5\": 0.08060801473975127, \"p4\": 0.06372287901796007, \"phi\": 0.07925886257954269}, {\"truth_threshold\": 22.100000329315662, \"match_probability\": 0.9999997775477002, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 34.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 1997.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.01674052191038897, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.983259478089611, \"precision\": 1.0, \"recall\": 0.01674052191038897, \"specificity\": 1.0, \"npv\": 0.364417568427753, \"accuracy\": 0.37122166246851385, \"f1\": 0.03292978208232446, \"f2\": 0.02083844079431233, \"f0_5\": 0.07844946931241348, \"p4\": 0.06203531774376609, \"phi\": 0.07810595552706255}, {\"truth_threshold\": 22.20000033080578, \"match_probability\": 0.9999997924446623, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 31.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2000.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.015263417035942885, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9847365829640571, \"precision\": 1.0, \"recall\": 0.015263417035942885, \"specificity\": 1.0, \"npv\": 0.3640699523052464, \"accuracy\": 0.3702770780856423, \"f1\": 0.030067895247332686, \"f2\": 0.01900674432863274, \"f0_5\": 0.07192575406032482, \"p4\": 0.0569290852372513, \"phi\": 0.0745449630242769}, {\"truth_threshold\": 22.300000332295895, \"match_probability\": 0.9999998063440199, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 27.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2004.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.013293943870014771, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9867060561299852, \"precision\": 1.0, \"recall\": 0.013293943870014771, \"specificity\": 1.0, \"npv\": 0.3636074944426802, \"accuracy\": 0.3690176322418136, \"f1\": 0.026239067055393587, \"f2\": 0.016562384983437616, \"f0_5\": 0.06311360448807854, \"p4\": 0.050017230584043997, \"phi\": 0.06952537394245138}, {\"truth_threshold\": 22.40000033378601, \"match_probability\": 0.9999998193125794, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 26.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2005.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.012801575578532743, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9871984244214672, \"precision\": 1.0, \"recall\": 0.012801575578532743, \"specificity\": 1.0, \"npv\": 0.3634920634920635, \"accuracy\": 0.36870277078085645, \"f1\": 0.025279533300923675, \"f2\": 0.015950920245398775, \"f0_5\": 0.06088992974238876, \"p4\": 0.048270424636238894, \"phi\": 0.0682148893057115}, {\"truth_threshold\": 22.600000336766243, \"match_probability\": 0.9999998427024609, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 25.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2006.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.012309207287050714, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9876907927129492, \"precision\": 1.0, \"recall\": 0.012309207287050714, \"specificity\": 1.0, \"npv\": 0.3633767058076801, \"accuracy\": 0.36838790931989923, \"f1\": 0.024319066147859923, \"f2\": 0.015339305436249846, \"f0_5\": 0.05865790708587518, \"p4\": 0.04651597386980402, \"phi\": 0.06687958728246145}, {\"truth_threshold\": 22.800000339746475, \"match_probability\": 0.9999998630645361, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 23.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2008.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.011324470704086657, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9886755292959133, \"precision\": 1.0, \"recall\": 0.011324470704086657, \"specificity\": 1.0, \"npv\": 0.36314620995876945, \"accuracy\": 0.3677581863979849, \"f1\": 0.022395326192794548, \"f2\": 0.014115625383576776, \"f0_5\": 0.054168629298162976, \"p4\": 0.042983917959230976, \"phi\": 0.06412829809045448}, {\"truth_threshold\": 23.000000342726707, \"match_probability\": 0.999999880790753, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 21.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2010.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0103397341211226, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9896602658788775, \"precision\": 1.0, \"recall\": 0.0103397341211226, \"specificity\": 1.0, \"npv\": 0.3629160063391442, \"accuracy\": 0.36712846347607053, \"f1\": 0.02046783625730994, \"f2\": 0.01289134438305709, \"f0_5\": 0.04964539007092199, \"p4\": 0.03942061774542593, \"phi\": 0.06125728539403615}, {\"truth_threshold\": 23.100000344216824, \"match_probability\": 0.9999998887738388, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 20.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2011.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.009847365829640572, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9901526341703595, \"precision\": 1.0, \"recall\": 0.009847365829640572, \"specificity\": 1.0, \"npv\": 0.36280101394169834, \"accuracy\": 0.36681360201511337, \"f1\": 0.019502681618722574, \"f2\": 0.012278978388998035, \"f0_5\": 0.04737091425864519, \"p4\": 0.03762710959306445, \"phi\": 0.05977151752840505}, {\"truth_threshold\": 23.20000034570694, \"match_probability\": 0.9999998962223214, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 17.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2014.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.008370260955194485, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9916297390448056, \"precision\": 1.0, \"recall\": 0.008370260955194485, \"specificity\": 1.0, \"npv\": 0.3624564735675847, \"accuracy\": 0.36586901763224183, \"f1\": 0.0166015625, \"f2\": 0.010440977766859108, \"f0_5\": 0.040495474035254886, \"p4\": 0.03219846095822884, \"phi\": 0.05508044361350257}, {\"truth_threshold\": 23.400000348687172, \"match_probability\": 0.9999999096562825, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 15.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2016.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.007385524372230428, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9926144756277696, \"precision\": 1.0, \"recall\": 0.007385524372230428, \"specificity\": 1.0, \"npv\": 0.3622271433090794, \"accuracy\": 0.36523929471032746, \"f1\": 0.01466275659824047, \"f2\": 0.009214891264283081, \"f0_5\": 0.035868005738880916, \"p4\": 0.028538670521671944, \"phi\": 0.05172269709897784}, {\"truth_threshold\": 23.500000350177288, \"match_probability\": 0.9999999157063305, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 12.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2019.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.005908419497784343, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9940915805022157, \"precision\": 1.0, \"recall\": 0.005908419497784343, \"specificity\": 1.0, \"npv\": 0.36188369152970923, \"accuracy\": 0.3642947103274559, \"f1\": 0.011747430249632892, \"f2\": 0.007374631268436578, \"f0_5\": 0.02886002886002886, \"p4\": 0.022986746233599045, \"phi\": 0.046240249339339734}, {\"truth_threshold\": 23.600000351667404, \"match_probability\": 0.9999999213512251, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 10.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2021.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.004923682914820286, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9950763170851797, \"precision\": 1.0, \"recall\": 0.004923682914820286, \"specificity\": 1.0, \"npv\": 0.3616550852811118, \"accuracy\": 0.36366498740554154, \"f1\": 0.009799118079372856, \"f2\": 0.006147037128104254, \"f0_5\": 0.024142926122646065, \"p4\": 0.019243252781973526, \"phi\": 0.0421980445572598}, {\"truth_threshold\": 23.70000035315752, \"match_probability\": 0.9999999266180979, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 9.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2022.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.004431314623338257, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9955686853766618, \"precision\": 1.0, \"recall\": 0.004431314623338257, \"specificity\": 1.0, \"npv\": 0.361540890432586, \"accuracy\": 0.3633501259445844, \"f1\": 0.008823529411764706, \"f2\": 0.005533013648100332, \"f0_5\": 0.02177068214804064, \"p4\": 0.017358654565300884, \"phi\": 0.040026259314463214}, {\"truth_threshold\": 23.900000356137753, \"match_probability\": 0.9999999361173434, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 7.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2024.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0034465780403741997, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9965534219596258, \"precision\": 1.0, \"recall\": 0.0034465780403741997, \"specificity\": 1.0, \"npv\": 0.3613127169454087, \"accuracy\": 0.36272040302267, \"f1\": 0.0068694798822374874, \"f2\": 0.004304513589964334, \"f0_5\": 0.016998542982030112, \"p4\": 0.013563435077429192, \"phi\": 0.035288701817040316}, {\"truth_threshold\": 24.100000359117985, \"match_probability\": 0.9999999443869169, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 6.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2025.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0029542097488921715, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9970457902511078, \"precision\": 1.0, \"recall\": 0.0029542097488921715, \"specificity\": 1.0, \"npv\": 0.361198738170347, \"accuracy\": 0.36240554156171284, \"f1\": 0.005891016200294551, \"f2\": 0.0036900369003690036, \"f0_5\": 0.014598540145985401, \"p4\": 0.011652683870064942, \"phi\": 0.032665835877723835}, {\"truth_threshold\": 24.2000003606081, \"match_probability\": 0.9999999481111586, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 5.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2026.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.002461841457410143, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9975381585425899, \"precision\": 1.0, \"recall\": 0.002461841457410143, \"specificity\": 1.0, \"npv\": 0.36108483128350677, \"accuracy\": 0.3620906801007557, \"f1\": 0.004911591355599214, \"f2\": 0.0030754090294009104, \"f0_5\": 0.01218917601170161, \"p4\": 0.009733083985039102, \"phi\": 0.02981498964104606}, {\"truth_threshold\": 24.300000362098217, \"match_probability\": 0.999999951585999, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 4.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2027.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0019694731659281144, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9980305268340719, \"precision\": 1.0, \"recall\": 0.0019694731659281144, \"specificity\": 1.0, \"npv\": 0.3609709962168979, \"accuracy\": 0.36177581863979846, \"f1\": 0.003931203931203931, \"f2\": 0.0024606299212598425, \"f0_5\": 0.009770395701025891, \"p4\": 0.007804568825263287, \"phi\": 0.026663133550419747}, {\"truth_threshold\": 24.400000363588333, \"match_probability\": 0.9999999548281396, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 3.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2028.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0014771048744460858, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.9985228951255539, \"precision\": 1.0, \"recall\": 0.0014771048744460858, \"specificity\": 1.0, \"npv\": 0.36085723290261584, \"accuracy\": 0.3614609571788413, \"f1\": 0.0029498525073746312, \"f2\": 0.0018456995201181247, \"f0_5\": 0.007342143906020558, \"p4\": 0.00586707112734875, \"phi\": 0.023087312050119223}, {\"truth_threshold\": 24.600000366568565, \"match_probability\": 0.9999999606756114, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 2.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2029.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0009847365829640572, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.999015263417036, \"precision\": 1.0, \"recall\": 0.0009847365829640572, \"specificity\": 1.0, \"npv\": 0.36074354127284186, \"accuracy\": 0.36114609571788414, \"f1\": 0.001967535661583866, \"f2\": 0.0012306177701206006, \"f0_5\": 0.004904364884747425, \"p4\": 0.003920522953249476, \"phi\": 0.018847741566547744}, {\"truth_threshold\": 25.100000374019146, \"match_probability\": 0.9999999721934579, \"total_clerical_labels\": 3176.0, \"p\": 2031.0, \"n\": 1145.0, \"tp\": 1.0, \"tn\": 1145.0, \"fp\": 0.0, \"fn\": 2030.0, \"P_rate\": 0.6394836272040302, \"N_rate\": 0.36051636934280396, \"tp_rate\": 0.0004923682914820286, \"tn_rate\": 1.0, \"fp_rate\": 0.0, \"fn_rate\": 0.999507631708518, \"precision\": 1.0, \"recall\": 0.0004923682914820286, \"specificity\": 1.0, \"npv\": 0.3606299212598425, \"accuracy\": 0.3608312342569269, \"f1\": 0.000984251968503937, \"f2\": 0.0006153846153846154, \"f0_5\": 0.002457002457002457, \"p4\": 0.001964855681779181, \"phi\": 0.013325266908696693}]}}, {\"mode\": \"vega-lite\"});\n",
              "</script>"
            ],
            "text/plain": [
              "alt.Chart(...)"
            ]
          },
          "execution_count": 4,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "linker.evaluation.accuracy_analysis_from_labels_table(\n",
        "    labels_table, output_type=\"precision_recall\", add_metrics=[\"f1\"]\n",
        ")"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": 7,
      "metadata": {},
      "outputs": [
        {
          "data": {
            "text/html": [
              "<div>\n",
              "<style scoped>\n",
              "    .dataframe tbody tr th:only-of-type {\n",
              "        vertical-align: middle;\n",
              "    }\n",
              "\n",
              "    .dataframe tbody tr th {\n",
              "        vertical-align: top;\n",
              "    }\n",
              "\n",
              "    .dataframe thead th {\n",
              "        text-align: right;\n",
              "    }\n",
              "</style>\n",
              "<table border=\"1\" class=\"dataframe\">\n",
              "  <thead>\n",
              "    <tr style=\"text-align: right;\">\n",
              "      <th></th>\n",
              "      <th>truth_threshold</th>\n",
              "      <th>match_probability</th>\n",
              "      <th>total_clerical_labels</th>\n",
              "      <th>p</th>\n",
              "      <th>n</th>\n",
              "      <th>tp</th>\n",
              "      <th>tn</th>\n",
              "      <th>fp</th>\n",
              "      <th>fn</th>\n",
              "      <th>P_rate</th>\n",
              "      <th>...</th>\n",
              "      <th>precision</th>\n",
              "      <th>recall</th>\n",
              "      <th>specificity</th>\n",
              "      <th>npv</th>\n",
              "      <th>accuracy</th>\n",
              "      <th>f1</th>\n",
              "      <th>f2</th>\n",
              "      <th>f0_5</th>\n",
              "      <th>p4</th>\n",
              "      <th>phi</th>\n",
              "    </tr>\n",
              "  </thead>\n",
              "  <tbody>\n",
              "    <tr>\n",
              "      <th>0</th>\n",
              "      <td>-23.8</td>\n",
              "      <td>6.846774e-08</td>\n",
              "      <td>3176.0</td>\n",
              "      <td>2031.0</td>\n",
              "      <td>1145.0</td>\n",
              "      <td>1446.0</td>\n",
              "      <td>1055.0</td>\n",
              "      <td>90.0</td>\n",
              "      <td>585.0</td>\n",
              "      <td>0.639484</td>\n",
              "      <td>...</td>\n",
              "      <td>0.941406</td>\n",
              "      <td>0.711965</td>\n",
              "      <td>0.921397</td>\n",
              "      <td>0.643293</td>\n",
              "      <td>0.787469</td>\n",
              "      <td>0.810765</td>\n",
              "      <td>0.748447</td>\n",
              "      <td>0.884404</td>\n",
              "      <td>0.783298</td>\n",
              "      <td>0.608544</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1</th>\n",
              "      <td>-22.7</td>\n",
              "      <td>1.467638e-07</td>\n",
              "      <td>3176.0</td>\n",
              "      <td>2031.0</td>\n",
              "      <td>1145.0</td>\n",
              "      <td>1446.0</td>\n",
              "      <td>1077.0</td>\n",
              "      <td>68.0</td>\n",
              "      <td>585.0</td>\n",
              "      <td>0.639484</td>\n",
              "      <td>...</td>\n",
              "      <td>0.955086</td>\n",
              "      <td>0.711965</td>\n",
              "      <td>0.940611</td>\n",
              "      <td>0.648014</td>\n",
              "      <td>0.794395</td>\n",
              "      <td>0.815797</td>\n",
              "      <td>0.750156</td>\n",
              "      <td>0.894027</td>\n",
              "      <td>0.790841</td>\n",
              "      <td>0.627351</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>2</th>\n",
              "      <td>-21.7</td>\n",
              "      <td>2.935275e-07</td>\n",
              "      <td>3176.0</td>\n",
              "      <td>2031.0</td>\n",
              "      <td>1145.0</td>\n",
              "      <td>1446.0</td>\n",
              "      <td>1083.0</td>\n",
              "      <td>62.0</td>\n",
              "      <td>585.0</td>\n",
              "      <td>0.639484</td>\n",
              "      <td>...</td>\n",
              "      <td>0.958886</td>\n",
              "      <td>0.711965</td>\n",
              "      <td>0.945852</td>\n",
              "      <td>0.649281</td>\n",
              "      <td>0.796285</td>\n",
              "      <td>0.817180</td>\n",
              "      <td>0.750623</td>\n",
              "      <td>0.896689</td>\n",
              "      <td>0.792887</td>\n",
              "      <td>0.632504</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3</th>\n",
              "      <td>-21.6</td>\n",
              "      <td>3.145950e-07</td>\n",
              "      <td>3176.0</td>\n",
              "      <td>2031.0</td>\n",
              "      <td>1145.0</td>\n",
              "      <td>1446.0</td>\n",
              "      <td>1088.0</td>\n",
              "      <td>57.0</td>\n",
              "      <td>585.0</td>\n",
              "      <td>0.639484</td>\n",
              "      <td>...</td>\n",
              "      <td>0.962076</td>\n",
              "      <td>0.711965</td>\n",
              "      <td>0.950218</td>\n",
              "      <td>0.650329</td>\n",
              "      <td>0.797859</td>\n",
              "      <td>0.818336</td>\n",
              "      <td>0.751013</td>\n",
              "      <td>0.898918</td>\n",
              "      <td>0.794588</td>\n",
              "      <td>0.636808</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>4</th>\n",
              "      <td>-20.6</td>\n",
              "      <td>6.291899e-07</td>\n",
              "      <td>3176.0</td>\n",
              "      <td>2031.0</td>\n",
              "      <td>1145.0</td>\n",
              "      <td>1446.0</td>\n",
              "      <td>1094.0</td>\n",
              "      <td>51.0</td>\n",
              "      <td>585.0</td>\n",
              "      <td>0.639484</td>\n",
              "      <td>...</td>\n",
              "      <td>0.965932</td>\n",
              "      <td>0.711965</td>\n",
              "      <td>0.955459</td>\n",
              "      <td>0.651578</td>\n",
              "      <td>0.799748</td>\n",
              "      <td>0.819728</td>\n",
              "      <td>0.751481</td>\n",
              "      <td>0.901609</td>\n",
              "      <td>0.796624</td>\n",
              "      <td>0.641982</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>...</th>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>278</th>\n",
              "      <td>24.2</td>\n",
              "      <td>9.999999e-01</td>\n",
              "      <td>3176.0</td>\n",
              "      <td>2031.0</td>\n",
              "      <td>1145.0</td>\n",
              "      <td>5.0</td>\n",
              "      <td>1145.0</td>\n",
              "      <td>0.0</td>\n",
              "      <td>2026.0</td>\n",
              "      <td>0.639484</td>\n",
              "      <td>...</td>\n",
              "      <td>1.000000</td>\n",
              "      <td>0.002462</td>\n",
              "      <td>1.000000</td>\n",
              "      <td>0.361085</td>\n",
              "      <td>0.362091</td>\n",
              "      <td>0.004912</td>\n",
              "      <td>0.003075</td>\n",
              "      <td>0.012189</td>\n",
              "      <td>0.009733</td>\n",
              "      <td>0.029815</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>279</th>\n",
              "      <td>24.3</td>\n",
              "      <td>1.000000e+00</td>\n",
              "      <td>3176.0</td>\n",
              "      <td>2031.0</td>\n",
              "      <td>1145.0</td>\n",
              "      <td>4.0</td>\n",
              "      <td>1145.0</td>\n",
              "      <td>0.0</td>\n",
              "      <td>2027.0</td>\n",
              "      <td>0.639484</td>\n",
              "      <td>...</td>\n",
              "      <td>1.000000</td>\n",
              "      <td>0.001969</td>\n",
              "      <td>1.000000</td>\n",
              "      <td>0.360971</td>\n",
              "      <td>0.361776</td>\n",
              "      <td>0.003931</td>\n",
              "      <td>0.002461</td>\n",
              "      <td>0.009770</td>\n",
              "      <td>0.007805</td>\n",
              "      <td>0.026663</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>280</th>\n",
              "      <td>24.4</td>\n",
              "      <td>1.000000e+00</td>\n",
              "      <td>3176.0</td>\n",
              "      <td>2031.0</td>\n",
              "      <td>1145.0</td>\n",
              "      <td>3.0</td>\n",
              "      <td>1145.0</td>\n",
              "      <td>0.0</td>\n",
              "      <td>2028.0</td>\n",
              "      <td>0.639484</td>\n",
              "      <td>...</td>\n",
              "      <td>1.000000</td>\n",
              "      <td>0.001477</td>\n",
              "      <td>1.000000</td>\n",
              "      <td>0.360857</td>\n",
              "      <td>0.361461</td>\n",
              "      <td>0.002950</td>\n",
              "      <td>0.001846</td>\n",
              "      <td>0.007342</td>\n",
              "      <td>0.005867</td>\n",
              "      <td>0.023087</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>281</th>\n",
              "      <td>24.6</td>\n",
              "      <td>1.000000e+00</td>\n",
              "      <td>3176.0</td>\n",
              "      <td>2031.0</td>\n",
              "      <td>1145.0</td>\n",
              "      <td>2.0</td>\n",
              "      <td>1145.0</td>\n",
              "      <td>0.0</td>\n",
              "      <td>2029.0</td>\n",
              "      <td>0.639484</td>\n",
              "      <td>...</td>\n",
              "      <td>1.000000</td>\n",
              "      <td>0.000985</td>\n",
              "      <td>1.000000</td>\n",
              "      <td>0.360744</td>\n",
              "      <td>0.361146</td>\n",
              "      <td>0.001968</td>\n",
              "      <td>0.001231</td>\n",
              "      <td>0.004904</td>\n",
              "      <td>0.003921</td>\n",
              "      <td>0.018848</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>282</th>\n",
              "      <td>25.1</td>\n",
              "      <td>1.000000e+00</td>\n",
              "      <td>3176.0</td>\n",
              "      <td>2031.0</td>\n",
              "      <td>1145.0</td>\n",
              "      <td>1.0</td>\n",
              "      <td>1145.0</td>\n",
              "      <td>0.0</td>\n",
              "      <td>2030.0</td>\n",
              "      <td>0.639484</td>\n",
              "      <td>...</td>\n",
              "      <td>1.000000</td>\n",
              "      <td>0.000492</td>\n",
              "      <td>1.000000</td>\n",
              "      <td>0.360630</td>\n",
              "      <td>0.360831</td>\n",
              "      <td>0.000984</td>\n",
              "      <td>0.000615</td>\n",
              "      <td>0.002457</td>\n",
              "      <td>0.001965</td>\n",
              "      <td>0.013325</td>\n",
              "    </tr>\n",
              "  </tbody>\n",
              "</table>\n",
              "<p>283 rows × 25 columns</p>\n",
              "</div>"
            ],
            "text/plain": [
              "     truth_threshold  match_probability  total_clerical_labels       p  \\\n",
              "0              -23.8       6.846774e-08                 3176.0  2031.0   \n",
              "1              -22.7       1.467638e-07                 3176.0  2031.0   \n",
              "2              -21.7       2.935275e-07                 3176.0  2031.0   \n",
              "3              -21.6       3.145950e-07                 3176.0  2031.0   \n",
              "4              -20.6       6.291899e-07                 3176.0  2031.0   \n",
              "..               ...                ...                    ...     ...   \n",
              "278             24.2       9.999999e-01                 3176.0  2031.0   \n",
              "279             24.3       1.000000e+00                 3176.0  2031.0   \n",
              "280             24.4       1.000000e+00                 3176.0  2031.0   \n",
              "281             24.6       1.000000e+00                 3176.0  2031.0   \n",
              "282             25.1       1.000000e+00                 3176.0  2031.0   \n",
              "\n",
              "          n      tp      tn    fp      fn    P_rate  ...  precision    recall  \\\n",
              "0    1145.0  1446.0  1055.0  90.0   585.0  0.639484  ...   0.941406  0.711965   \n",
              "1    1145.0  1446.0  1077.0  68.0   585.0  0.639484  ...   0.955086  0.711965   \n",
              "2    1145.0  1446.0  1083.0  62.0   585.0  0.639484  ...   0.958886  0.711965   \n",
              "3    1145.0  1446.0  1088.0  57.0   585.0  0.639484  ...   0.962076  0.711965   \n",
              "4    1145.0  1446.0  1094.0  51.0   585.0  0.639484  ...   0.965932  0.711965   \n",
              "..      ...     ...     ...   ...     ...       ...  ...        ...       ...   \n",
              "278  1145.0     5.0  1145.0   0.0  2026.0  0.639484  ...   1.000000  0.002462   \n",
              "279  1145.0     4.0  1145.0   0.0  2027.0  0.639484  ...   1.000000  0.001969   \n",
              "280  1145.0     3.0  1145.0   0.0  2028.0  0.639484  ...   1.000000  0.001477   \n",
              "281  1145.0     2.0  1145.0   0.0  2029.0  0.639484  ...   1.000000  0.000985   \n",
              "282  1145.0     1.0  1145.0   0.0  2030.0  0.639484  ...   1.000000  0.000492   \n",
              "\n",
              "     specificity       npv  accuracy        f1        f2      f0_5        p4  \\\n",
              "0       0.921397  0.643293  0.787469  0.810765  0.748447  0.884404  0.783298   \n",
              "1       0.940611  0.648014  0.794395  0.815797  0.750156  0.894027  0.790841   \n",
              "2       0.945852  0.649281  0.796285  0.817180  0.750623  0.896689  0.792887   \n",
              "3       0.950218  0.650329  0.797859  0.818336  0.751013  0.898918  0.794588   \n",
              "4       0.955459  0.651578  0.799748  0.819728  0.751481  0.901609  0.796624   \n",
              "..           ...       ...       ...       ...       ...       ...       ...   \n",
              "278     1.000000  0.361085  0.362091  0.004912  0.003075  0.012189  0.009733   \n",
              "279     1.000000  0.360971  0.361776  0.003931  0.002461  0.009770  0.007805   \n",
              "280     1.000000  0.360857  0.361461  0.002950  0.001846  0.007342  0.005867   \n",
              "281     1.000000  0.360744  0.361146  0.001968  0.001231  0.004904  0.003921   \n",
              "282     1.000000  0.360630  0.360831  0.000984  0.000615  0.002457  0.001965   \n",
              "\n",
              "          phi  \n",
              "0    0.608544  \n",
              "1    0.627351  \n",
              "2    0.632504  \n",
              "3    0.636808  \n",
              "4    0.641982  \n",
              "..        ...  \n",
              "278  0.029815  \n",
              "279  0.026663  \n",
              "280  0.023087  \n",
              "281  0.018848  \n",
              "282  0.013325  \n",
              "\n",
              "[283 rows x 25 columns]"
            ]
          },
          "execution_count": 7,
          "metadata": {},
          "output_type": "execute_result"
        }
      ],
      "source": [
        "linker.evaluation.accuracy_analysis_from_labels_table(\n",
        "    labels_table, output_type=\"table\", add_metrics=[\"f1\"]\n",
        ").as_pandas_dataframe()"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {},
      "source": []
    }
  ],
  "metadata": {
    "kernelspec": {
      "display_name": "base",
      "language": "python",
      "name": "python3"
    },
    "language_info": {
      "codemirror_mode": {
        "name": "ipython",
        "version": 3
      },
      "file_extension": ".py",
      "mimetype": "text/x-python",
      "name": "python",
      "nbconvert_exporter": "python",
      "pygments_lexer": "ipython3",
      "version": "3.10.8"
    },
    "orig_nbformat": 4
  },
  "nbformat": 4,
  "nbformat_minor": 2
}
