{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import os\n",
    "import zlmdb\n",
    "import numpy as np\n",
    "from pprint import pprint\n",
    "\n",
    "from crossbar.master.database.globalschema import GlobalSchema\n",
    "from crossbar.master.database.mrealmschema import MrealmSchema\n",
    "\n",
    "print('running zlmdb v{} in {}'.format(zlmdb.__version__, os.getcwd()))\n",
    "\n",
    "DBFILE_GLOBAL = '../../cfc/.crossbar/.db-controller'\n",
    "\n",
    "gdb = zlmdb.Database(DBFILE_GLOBAL, maxsize=2**30, readonly=False)\n",
    "gschema = GlobalSchema.attach(gdb)\n",
    "\n",
    "with gdb.begin() as txn:\n",
    "    for mrealm in gschema.mrealms.select(txn, return_keys=False, limit=1):\n",
    "        mrealm_id = mrealm.oid\n",
    "              \n",
    "DBFILE_MREALM = '../../cfc/.crossbar/.db-mrealm-{}'.format(mrealm_id)\n",
    "\n",
    "db = zlmdb.Database(DBFILE_MREALM, maxsize=2**30, readonly=True)\n",
    "\n",
    "schema = MrealmSchema.attach(db)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "with db.begin() as txn:\n",
    "    cnt = schema.mnode_logs.count(txn)\n",
    "    print('{} mnodelog records stored'.format(cnt))\n",
    "\n",
    "    cnt = schema.mworker_logs.count(txn)\n",
    "    print('{} mworkerlog records stored'.format(cnt))\n",
    "    \n",
    "with gdb.begin() as txn:\n",
    "    cnt = gschema.usage.count(txn)\n",
    "    print('{} usage metering records stored. last one:\\n'.format(cnt))\n",
    "    for rec in gschema.usage.select(txn, limit=2, return_keys=False, reverse=True):\n",
    "        pprint(rec.marshal())    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "from pprint import pprint\n",
    "\n",
    "with db.begin() as txn:  \n",
    "    for rec in schema.mnode_logs.select(txn, limit=1, return_keys=False, reverse=True):\n",
    "        pprint(rec.marshal())\n",
    "    for rec in schema.mworker_logs.select(txn, limit=1, return_keys=False, reverse=True):\n",
    "        pprint(rec.marshal())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "ts_min = None\n",
    "ts_max = None\n",
    "\n",
    "with db.begin() as txn:  \n",
    "    for ts, _ in schema.mnode_logs.select(txn, return_values=False):\n",
    "        if ts_min is None or ts < ts_min:\n",
    "            ts_min = ts\n",
    "        if ts_max is None or ts > ts_max:\n",
    "            ts_max = ts\n",
    "\n",
    "print(ts_min)\n",
    "print(ts_max)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "with db.begin() as txn:  \n",
    "    for ts, _ in schema.mnode_logs.select(txn, return_values=False, limit=1):\n",
    "        ts_min = ts\n",
    "    for ts, _ in schema.mnode_logs.select(txn, return_values=False, reverse=True, limit=1):\n",
    "        ts_max = ts\n",
    "\n",
    "print(ts_min)\n",
    "print(ts_max)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "with db.begin() as txn:  \n",
    "    for rec in schema.mnode_logs.select(txn, limit=20, return_keys=False, reverse=True):\n",
    "        print(rec.timestamp, rec.node_id, rec.routers)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Metering record from log `2019-06-24T18:29 - 2019-06-24T18:34` (all clients connected before, and conntected without interruption):\n",
    "\n",
    "```\n",
    "2019-06-24T20:34:08+0200 [Container    1070] Metering processing: aggregated and stored mrealm \"a026d293-4db8-49aa-ae89-aeeef8ede03e\" usage metering data for period [\"2019-06-24T18:29:00.000000000\", \"2019-06-24T18:34:00.000000000\"[:\n",
    "{'containers': 0,\n",
    " 'controllers': 0,\n",
    " 'count': 116,\n",
    " 'guests': 0,\n",
    " 'hostmonitors': 29,\n",
    " 'marketmakers': 0,\n",
    " 'mrealm_id': 'a026d293-4db8-49aa-ae89-aeeef8ede03e',\n",
    " 'mrealms': None,\n",
    " 'msgs_call': 583,\n",
    " 'msgs_error': 0,\n",
    " 'msgs_event': 1166,\n",
    " 'msgs_invocation': 583,\n",
    " 'msgs_publish': 583,\n",
    " 'msgs_published': 583,\n",
    " 'msgs_register': 0,\n",
    " 'msgs_registered': 0,\n",
    " 'msgs_result': 583,\n",
    " 'msgs_subscribe': 0,\n",
    " 'msgs_subscribed': 0,\n",
    " 'msgs_yield': 583,\n",
    " 'nodes': 2,\n",
    " 'processed': numpy.datetime64('2019-06-24T18:34:08.738467104'),\n",
    " 'proxies': 0,\n",
    " 'pubkey': None,\n",
    " 'routers': 87,\n",
    " 'sent': numpy.datetime64('2019-06-24T18:34:00.000000000'),\n",
    " 'seq': None,\n",
    " 'sessions': 203,\n",
    " 'status': 1,\n",
    " 'status_message': None,\n",
    " 'timestamp': numpy.datetime64('2019-06-24T18:29:00.000000000'),\n",
    " 'timestamp_from': None,\n",
    " 'total': 0}\n",
    "```\n",
    "\n",
    "Metering record from log `2019-06-24T18:34 - 2019-06-24T18:39` (all clients initially disconnected, then connecting at the beginning of the interval and staying connected throughout):\n",
    "\n",
    "```\n",
    "2019-06-24T20:39:08+0200 [Container    1070] Metering processing: aggregated and stored mrealm \"a026d293-4db8-49aa-ae89-aeeef8ede03e\" usage metering data for period [\"2019-06-24T18:34:00.000000000\", \"2019-06-24T18:39:00.000000000\"[:\n",
    "{'containers': 0,\n",
    " 'controllers': 0,\n",
    " 'count': 120,\n",
    " 'guests': 0,\n",
    " 'hostmonitors': 30,\n",
    " 'marketmakers': 0,\n",
    " 'mrealm_id': 'a026d293-4db8-49aa-ae89-aeeef8ede03e',\n",
    " 'mrealms': None,\n",
    " 'msgs_call': 569,\n",
    " 'msgs_error': 0,\n",
    " 'msgs_event': 1134,\n",
    " 'msgs_invocation': 569,\n",
    " 'msgs_publish': 601,\n",
    " 'msgs_published': 569,\n",
    " 'msgs_register': 4,\n",
    " 'msgs_registered': 4,\n",
    " 'msgs_result': 569,\n",
    " 'msgs_subscribe': 4,\n",
    " 'msgs_subscribed': 4,\n",
    " 'msgs_yield': 569,\n",
    " 'nodes': 2,\n",
    " 'processed': numpy.datetime64('2019-06-24T18:39:08.729419458'),\n",
    " 'proxies': 0,\n",
    " 'pubkey': None,\n",
    " 'routers': 90,\n",
    " 'sent': numpy.datetime64('2019-06-24T18:39:00.000000000'),\n",
    " 'seq': None,\n",
    " 'sessions': 209,\n",
    " 'status': 1,\n",
    " 'status_message': None,\n",
    " 'timestamp': numpy.datetime64('2019-06-24T18:34:00.000000000'),\n",
    " 'timestamp_from': None,\n",
    " 'total': 0}\n",
    "```"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "from uuid import UUID\n",
    "from crossbar.cfxdb.log import MWorkerLog\n",
    "\n",
    "key1 = (np.datetime64('2019-06-24T18:29:00.000000000'), UUID(bytes=b'\\x00' * 16), '')\n",
    "key2 = (np.datetime64('2019-06-24T18:34:00.000000000'), UUID(bytes=b'\\xff' * 16), '')\n",
    "\n",
    "key1 = (np.datetime64('2019-06-24T18:34:00.000000000'), UUID(bytes=b'\\x00' * 16), '')\n",
    "key2 = (np.datetime64('2019-06-24T18:39:00.000000000'), UUID(bytes=b'\\xff' * 16), '')\n",
    "\n",
    "total = 0\n",
    "sessions = 0\n",
    "wres = {}\n",
    "\n",
    "res = {\n",
    "    'count': 0,\n",
    "    'total': 0,\n",
    "    'controllers': 0,\n",
    "    'hostmonitors': 0,\n",
    "    'routers': 0,\n",
    "    'containers': 0,\n",
    "    'guests': 0,\n",
    "    'proxies': 0,\n",
    "    'marketmakers': 0,\n",
    "    'sessions': 0,\n",
    "    'msgs_publish': 0,\n",
    "    'msgs_event': 0,\n",
    "    'msgs_subscribe': 0,\n",
    "}\n",
    "\n",
    "with db.begin() as txn:  \n",
    "    for (ts, node_id, worker_id), rec in schema.mworker_logs.select(txn, reverse=False, from_key=key1, to_key=key2):\n",
    "        #print(rec.timestamp, rec.node_id, rec.worker_id, rec.type, rec.router_sessions, rec.recv_subscribe)\n",
    "        total += 1\n",
    "        sessions += rec.router_sessions\n",
    "        \n",
    "        worker_type = MWorkerLog.WORKER_TYPENAMES[rec.type]\n",
    "        res['{}s'.format(worker_type)] += 1\n",
    "        res['count'] += 1\n",
    "\n",
    "        wkey = (node_id, worker_id)\n",
    "        if wkey not in wres:\n",
    "            wres[wkey] = {\n",
    "                'sessions': 0,\n",
    "                'msgs_publish_min': 0,\n",
    "                'msgs_publish_max': 0,\n",
    "                'msgs_event_min': 0,\n",
    "                'msgs_event_max': 0,\n",
    "                'msgs_subscribe_min': 0,\n",
    "                'msgs_subscribe_max': 0,\n",
    "            }\n",
    "            \n",
    "        wres[wkey]['sessions'] += rec.router_sessions\n",
    "        \n",
    "        if rec.recv_publish > wres[wkey]['msgs_publish_max']:\n",
    "            wres[wkey]['msgs_publish_max'] = rec.recv_publish\n",
    "        if not wres[wkey]['msgs_publish_min'] or rec.recv_publish < wres[wkey]['msgs_publish_min']:\n",
    "            wres[wkey]['msgs_publish_min'] = rec.recv_publish\n",
    "\n",
    "        if rec.sent_event > wres[wkey]['msgs_event_max']:\n",
    "            wres[wkey]['msgs_event_max'] = rec.sent_event\n",
    "        if not wres[wkey]['msgs_event_min'] or rec.sent_event < wres[wkey]['msgs_event_min']:\n",
    "            wres[wkey]['msgs_event_min'] = rec.sent_event\n",
    "\n",
    "        if rec.recv_subscribe > wres[wkey]['msgs_subscribe_max']:\n",
    "            wres[wkey]['msgs_subscribe_max'] = rec.recv_subscribe\n",
    "        if not wres[wkey]['msgs_subscribe_min'] or rec.recv_subscribe < wres[wkey]['msgs_subscribe_min']:\n",
    "            wres[wkey]['msgs_subscribe_min'] = rec.recv_subscribe\n",
    "            \n",
    "            \n",
    "for wkey in wres:\n",
    "    res['sessions'] += wres[wkey]['sessions']\n",
    "    res['msgs_publish'] += wres[wkey]['msgs_publish_max'] - wres[wkey]['msgs_publish_min']\n",
    "    res['msgs_event'] += wres[wkey]['msgs_event_max'] - wres[wkey]['msgs_event_min']\n",
    "    res['msgs_subscribe'] += wres[wkey]['msgs_subscribe_max'] - wres[wkey]['msgs_subscribe_min']\n",
    "\n",
    "pprint(wres)\n",
    "pprint(res)\n",
    "\n",
    "heartbeat_secs = 10\n",
    "print('=' * 100)\n",
    "# print('Total records: {}'.format(total))\n",
    "print('Sessions: {} seconds'.format(sessions * heartbeat_secs))\n",
    "print('Routers: {} seconds'.format(res['routers'] * heartbeat_secs))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "5 * 60 / 10 == 30, 30 * 7 == 210, 30 * 3 == 90, 5 * 60 / 2 * 4 == 600"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "from crossbar.master.database.globalschema import GlobalSchema\n",
    "\n",
    "DBFILE_GLOBAL = '/home/oberstet/scm/crossbario/crossbar/test/cfc/.crossbar/.db-controller'\n",
    "\n",
    "gdb = zlmdb.Database(DBFILE_GLOBAL, maxsize=2**30, readonly=False)\n",
    "\n",
    "gschema = GlobalSchema.attach(gdb)\n",
    "\n",
    "with gdb.begin() as txn:\n",
    "    cnt = gschema.mrealms.count(txn)\n",
    "    print('{} mrealms records'.format(cnt))\n",
    "\n",
    "    cnt = gschema.usage.count(txn)\n",
    "    print('{} usage records'.format(cnt))\n",
    "    \n",
    "    for mrealm in gschema.mrealms.select(txn, return_keys=False):\n",
    "        pprint(mrealm.marshal())\n",
    "        \n",
    "    for mrealm in gschema.mrealms.select(txn, return_keys=False, limit=1):\n",
    "        print(mrealm.oid)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "from pprint import pprint\n",
    "\n",
    "with gdb.begin() as txn:  \n",
    "    for rec in gschema.usage.select(txn, limit=1, return_keys=False, reverse=True):\n",
    "        pprint(rec.marshal())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "with db.begin() as txn:  \n",
    "    for rec in schema.mnode_logs.select(txn, limit=20, return_keys=False, reverse=True):\n",
    "        #pprint(rec.marshal())\n",
    "        print(rec.timestamp, rec.node_id, rec.routers, rec.cpu_freq, rec.cpu_system, rec.cpu_user)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "with db.begin() as txn:  \n",
    "    for rec in schema.mworker_logs.select(txn, return_keys=False, reverse=True):\n",
    "        if rec.worker_id == 'worker002':\n",
    "            pprint(rec.marshal())\n",
    "            break"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import uuid\n",
    "import binascii\n",
    "\n",
    "node_id = uuid.UUID('9e604eff-029b-4ce6-bbd7-962bf541fb63')\n",
    "\n",
    "with gdb.begin() as txn:\n",
    "    node = gschema.nodes[txn, node_id]\n",
    "    if node:\n",
    "        pubkey = binascii.a2b_hex(node.pubkey)\n",
    "        rec = gschema.idx_last_usage_by_pubkey[txn, pubkey]\n",
    "        if rec:\n",
    "            pprint(rec.marshal())\n",
    "        else:\n",
    "            print('node found, but no usage found!')\n",
    "    else:\n",
    "        print('node not found!')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "from uuid import UUID\n",
    "import numpy as np\n",
    "\n",
    "key1 = (np.datetime64('2019-06-23T00:00:00.00000000'), UUID(bytes=b'\\x00' * 16))\n",
    "key2 = (np.datetime64('2019-06-24T00:00:00.00000000'), UUID(bytes=b'\\xff' * 16))\n",
    "\n",
    "with db.begin() as txn:  \n",
    "    cnt = schema.mnode_logs.count_range(txn, from_key=key1, to_key=key2)\n",
    "    print('cnt=', cnt)\n",
    "    for key in schema.mnode_logs.select(txn, limit=5, return_values=False, from_key=key1, to_key=key2, reverse=False):\n",
    "        print(key)\n",
    "    #    #pprint(rec.marshal())\n",
    "    #    print(rec.timestamp, rec.node_id, rec.routers)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "with gdb.begin() as txn:  \n",
    "    for rec in gschema.usage.select(txn, limit=1, return_keys=False, reverse=True):\n",
    "        #print(rec.timestamp, rec.timestamp_from, rec.processed, rec.routers)\n",
    "        pprint(rec.marshal())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import uuid\n",
    "from pprint import pprint\n",
    "\n",
    "from_ts = np.datetime64('2019-06-25T09:57:00.000000000')\n",
    "until_ts = np.datetime64('2019-06-25T10:02:00.000000000')\n",
    "\n",
    "# compute aggregate sum\n",
    "res = {\n",
    "    'count': 0,\n",
    "    'nodes': 0,\n",
    "    'routers': 0,\n",
    "    'containers': 0,\n",
    "    'guests': 0,\n",
    "    'proxies': 0,\n",
    "    'marketmakers': 0,\n",
    "    'hostmonitors': 0,\n",
    "    'controllers': 0,\n",
    "}\n",
    "nodes = set()\n",
    "with db.begin() as txn:\n",
    "    for (ts, node_id) in schema.mnode_logs.select(\n",
    "            txn,\n",
    "            from_key=(from_ts, uuid.UUID(bytes=b'\\x00' * 16)),\n",
    "            to_key=(until_ts, uuid.UUID(bytes=b'\\xff' * 16)),\n",
    "            return_values=False,\n",
    "            reverse=False):\n",
    "\n",
    "        rec = schema.mnode_logs[txn, (ts, node_id)]\n",
    "        \n",
    "        #pprint(rec.marshal())\n",
    "        #print(rec.mrealm_id, rec.node_id, rec.routers, rec.period)\n",
    "\n",
    "        if node_id not in nodes:\n",
    "            nodes.add(node_id)\n",
    "\n",
    "        res['count'] += 1\n",
    "        res['nodes'] += rec.period\n",
    "        res['routers'] += rec.routers * rec.period\n",
    "        res['containers'] += rec.containers * rec.period\n",
    "        res['guests'] += rec.guests * rec.period\n",
    "        res['proxies'] += rec.proxies * rec.period\n",
    "        res['marketmakers'] += rec.marketmakers * rec.period\n",
    "        res['hostmonitors'] += rec.hostmonitors * rec.period\n",
    "        res['controllers'] += rec.controllers * rec.period\n",
    "\n",
    "pprint(res)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "with db.begin() as txn:\n",
    "    for rec in schema.mnode_logs.select(\n",
    "            txn,\n",
    "            return_keys=False,\n",
    "            reverse=True,\n",
    "            limit=20):\n",
    "        print(rec.timestamp, rec.mrealm_id, rec.node_id, rec.controllers, rec.routers, rec.hostmonitors)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "with gdb.begin() as txn:\n",
    "    print('timestamp|mrealm_id|status|node seconds|controller seconds|hostmonitor seconds|router seconds|session seconds|calls')\n",
    "    print('-'*120)\n",
    "    for rec in gschema.usage.select(\n",
    "            txn,\n",
    "            return_keys=False,\n",
    "            reverse=True,\n",
    "            limit=10):\n",
    "        print(rec.timestamp, rec.mrealm_id, rec.status, rec.nodes, rec.controllers,\n",
    "              rec.hostmonitors, rec.routers, rec.sessions, rec.msgs_call)\n",
    "\n",
    "    print()\n",
    "    print('timestamp|mrealm_id|aggregation period|processing lag')\n",
    "    print('-'*120)\n",
    "    for rec in gschema.usage.select(\n",
    "            txn,\n",
    "            return_keys=False,\n",
    "            reverse=True,\n",
    "            limit=10):\n",
    "        print(rec.timestamp, rec.mrealm_id, np.timedelta64(rec.timestamp - rec.timestamp_from, 's'),\n",
    "              np.timedelta64(rec.processed - rec.timestamp, 'ms'))\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "\n"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
