{"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/alembic\/env.py","language":"python","identifier":"run_migrations_offline","parameters":"()","argument_list":"","return_statement":"","docstring":"Run migrations in 'offline' mode.\n\n This configures the context with just a URL\n and not an Engine, though an Engine is acceptable\n here as well. By skipping the Engine creation\n we don't even need a DBAPI to be available.\n\n Calls to context.execute() here emit the given string to the\n script output.","docstring_summary":"Run migrations in 'offline' mode.","docstring_tokens":["Run","migrations","in","offline","mode","."],"function":"def run_migrations_offline():\n \"\"\"Run migrations in 'offline' mode.\n\n This configures the context with just a URL\n and not an Engine, though an Engine is acceptable\n here as well. By skipping the Engine creation\n we don't even need a DBAPI to be available.\n\n Calls to context.execute() here emit the given string to the\n script output.\n\n \"\"\"\n url = config.get_main_option(\"sqlalchemy.url\")\n\n audius_db_url = os.getenv(\"audius_db_url\")\n if audius_db_url:\n url = audius_db_url\n\n context.configure(url=url, target_metadata=target_metadata, literal_binds=True)\n\n with context.begin_transaction():\n context.run_migrations()","function_tokens":["def","run_migrations_offline","(",")",":","url","=","config",".","get_main_option","(","\"sqlalchemy.url\"",")","audius_db_url","=","os",".","getenv","(","\"audius_db_url\"",")","if","audius_db_url",":","url","=","audius_db_url","context",".","configure","(","url","=","url",",","target_metadata","=","target_metadata",",","literal_binds","=","True",")","with","context",".","begin_transaction","(",")",":","context",".","run_migrations","(",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/alembic\/env.py#L26-L47"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/alembic\/env.py","language":"python","identifier":"run_migrations_online","parameters":"()","argument_list":"","return_statement":"","docstring":"Run migrations in 'online' mode.\n\n In this scenario we need to create an Engine\n and associate a connection with the context.","docstring_summary":"Run migrations in 'online' mode.","docstring_tokens":["Run","migrations","in","online","mode","."],"function":"def run_migrations_online():\n \"\"\"Run migrations in 'online' mode.\n\n In this scenario we need to create an Engine\n and associate a connection with the context.\n\n \"\"\"\n audius_db_url = os.getenv(\"audius_db_url\")\n\n if audius_db_url:\n config.set_main_option(\"sqlalchemy.url\", audius_db_url)\n\n connectable = engine_from_config(\n config.get_section(config.config_ini_section),\n prefix=\"sqlalchemy.\",\n poolclass=pool.NullPool,\n )\n\n with connectable.connect() as connection:\n context.configure(connection=connection, target_metadata=target_metadata)\n\n with context.begin_transaction():\n context.run_migrations()","function_tokens":["def","run_migrations_online","(",")",":","audius_db_url","=","os",".","getenv","(","\"audius_db_url\"",")","if","audius_db_url",":","config",".","set_main_option","(","\"sqlalchemy.url\"",",","audius_db_url",")","connectable","=","engine_from_config","(","config",".","get_section","(","config",".","config_ini_section",")",",","prefix","=","\"sqlalchemy.\"",",","poolclass","=","pool",".","NullPool",",",")","with","connectable",".","connect","(",")","as","connection",":","context",".","configure","(","connection","=","connection",",","target_metadata","=","target_metadata",")","with","context",".","begin_transaction","(",")",":","context",".","run_migrations","(",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/alembic\/env.py#L50-L72"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/alembic\/versions\/c8d2be7dcccc_repair_poorly_sorted_tracks.py","language":"python","identifier":"fix_segments","parameters":"(segments)","argument_list":"","return_statement":"return fixed_segments","docstring":"Fixes segments from a string sorted %03d order to a proper integer based order.\n\n Currently, we observe incorrect segment orders of\n 099.ts\n 100.ts\n 1001.ts\n 1002.ts\n ...\n 101.ts\n\n This method takes in a list of segments and re orders them, returning the proper order.\n It does this by replicating a sort on the %03d naming schema and captures the indexes that move\n when that happens and then uses that relationship to back out what the original order should be.","docstring_summary":"Fixes segments from a string sorted %03d order to a proper integer based order.","docstring_tokens":["Fixes","segments","from","a","string","sorted","%03d","order","to","a","proper","integer","based","order","."],"function":"def fix_segments(segments):\n \"\"\"\n Fixes segments from a string sorted %03d order to a proper integer based order.\n\n Currently, we observe incorrect segment orders of\n 099.ts\n 100.ts\n 1001.ts\n 1002.ts\n ...\n 101.ts\n\n This method takes in a list of segments and re orders them, returning the proper order.\n It does this by replicating a sort on the %03d naming schema and captures the indexes that move\n when that happens and then uses that relationship to back out what the original order should be.\n \"\"\"\n fixed_segments = [None] * len(segments)\n # Produce tuples for the total length of segments (number, actual segment name), e.g. (0, segment000.ts)\n tuples = [(i, FMT % i) for i in range(0, len(segments))]\n # Sort segments by their stored file name fmt. This mirrors the error.\n sorted_tuples = sorted(tuples, key=lambda x: x[1])\n\n # Re-map out the tuples, capturing the actual index they should be at. This gives us\n # {0: 0, 1: 1, ..., 101: 1000, ..., 111: 101, ...}\n segment_map = {}\n for i in range(len(sorted_tuples)):\n proper_index = sorted_tuples[i][0]\n segment_map[i] = proper_index\n\n # Produce our final order and return\n for mapping in segment_map.items():\n fixed_segments[mapping[1]] = segments[mapping[0]]\n\n return fixed_segments","function_tokens":["def","fix_segments","(","segments",")",":","fixed_segments","=","[","None","]","*","len","(","segments",")","# Produce tuples for the total length of segments (number, actual segment name), e.g. (0, segment000.ts)","tuples","=","[","(","i",",","FMT","%","i",")","for","i","in","range","(","0",",","len","(","segments",")",")","]","# Sort segments by their stored file name fmt. This mirrors the error.","sorted_tuples","=","sorted","(","tuples",",","key","=","lambda","x",":","x","[","1","]",")","# Re-map out the tuples, capturing the actual index they should be at. This gives us","# {0: 0, 1: 1, ..., 101: 1000, ..., 111: 101, ...}","segment_map","=","{","}","for","i","in","range","(","len","(","sorted_tuples",")",")",":","proper_index","=","sorted_tuples","[","i","]","[","0","]","segment_map","[","i","]","=","proper_index","# Produce our final order and return","for","mapping","in","segment_map",".","items","(",")",":","fixed_segments","[","mapping","[","1","]","]","=","segments","[","mapping","[","0","]","]","return","fixed_segments"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/alembic\/versions\/c8d2be7dcccc_repair_poorly_sorted_tracks.py#L214-L247"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/alembic\/versions\/c8d2be7dcccc_repair_poorly_sorted_tracks.py","language":"python","identifier":"unfix_segments","parameters":"(segments)","argument_list":"","return_statement":"return unfixed_segments","docstring":"Un-fixes segments (for down migration).\n Identical to fix_segments, except proper_index and i are swapped to revert the change.","docstring_summary":"Un-fixes segments (for down migration).\n Identical to fix_segments, except proper_index and i are swapped to revert the change.","docstring_tokens":["Un","-","fixes","segments","(","for","down","migration",")",".","Identical","to","fix_segments","except","proper_index","and","i","are","swapped","to","revert","the","change","."],"function":"def unfix_segments(segments):\n \"\"\"\n Un-fixes segments (for down migration).\n Identical to fix_segments, except proper_index and i are swapped to revert the change.\n \"\"\"\n unfixed_segments = [None] * len(segments)\n tuples = [(i, FMT % i) for i in range(0, len(segments))]\n sorted_tuples = sorted(tuples, key=lambda x: x[1])\n segment_map = {}\n for i in range(len(sorted_tuples)):\n proper_index = sorted_tuples[i][0]\n # This line is the only difference from fix_segments.\n segment_map[proper_index] = i\n\n for mapping in segment_map.items():\n unfixed_segments[mapping[1]] = segments[mapping[0]]\n return unfixed_segments","function_tokens":["def","unfix_segments","(","segments",")",":","unfixed_segments","=","[","None","]","*","len","(","segments",")","tuples","=","[","(","i",",","FMT","%","i",")","for","i","in","range","(","0",",","len","(","segments",")",")","]","sorted_tuples","=","sorted","(","tuples",",","key","=","lambda","x",":","x","[","1","]",")","segment_map","=","{","}","for","i","in","range","(","len","(","sorted_tuples",")",")",":","proper_index","=","sorted_tuples","[","i","]","[","0","]","# This line is the only difference from fix_segments.","segment_map","[","proper_index","]","=","i","for","mapping","in","segment_map",".","items","(",")",":","unfixed_segments","[","mapping","[","1","]","]","=","segments","[","mapping","[","0","]","]","return","unfixed_segments"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/alembic\/versions\/c8d2be7dcccc_repair_poorly_sorted_tracks.py#L250-L266"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_tracks.py","language":"python","identifier":"get_tracks","parameters":"(args: GetTrackArgs)","argument_list":"","return_statement":"return tracks","docstring":"Gets tracks.\n A note on caching strategy:\n - This method is cached at two layers: at the API via the @cache decorator,\n and within this method using the shared get_unpopulated_tracks cache.\n\n The shared cache only works when fetching via ID, so calls to fetch tracks\n via handle, asc\/desc sort, or filtering by block_number won't hit the shared cache.\n These will hit the API cache unless they have a current_user_id included.","docstring_summary":"Gets tracks.\n A note on caching strategy:\n - This method is cached at two layers: at the API via the @cache decorator,\n and within this method using the shared get_unpopulated_tracks cache.","docstring_tokens":["Gets","tracks",".","A","note","on","caching","strategy",":","-","This","method","is","cached","at","two","layers",":","at","the","API","via","the","@cache","decorator","and","within","this","method","using","the","shared","get_unpopulated_tracks","cache","."],"function":"def get_tracks(args: GetTrackArgs):\n \"\"\"\n Gets tracks.\n A note on caching strategy:\n - This method is cached at two layers: at the API via the @cache decorator,\n and within this method using the shared get_unpopulated_tracks cache.\n\n The shared cache only works when fetching via ID, so calls to fetch tracks\n via handle, asc\/desc sort, or filtering by block_number won't hit the shared cache.\n These will hit the API cache unless they have a current_user_id included.\n\n \"\"\"\n tracks = []\n\n db = get_db_read_replica()\n with db.scoped_session() as session:\n\n def get_tracks_and_ids():\n if \"handle\" in args:\n handle = args.get(\"handle\")\n user = (\n session.query(User.user_id)\n .filter(User.handle_lc == handle.lower())\n .first()\n )\n args[\"user_id\"] = user.user_id\n\n if \"routes\" in args:\n # Convert the handles to user_ids\n routes = args.get(\"routes\")\n handles = [route[\"handle\"].lower() for route in routes]\n user_id_tuples = (\n session.query(User.user_id, User.handle_lc)\n .filter(User.handle_lc.in_(handles), User.is_current == True)\n .all()\n )\n user_id_map = {handle: user_id for (user_id, handle) in user_id_tuples}\n args[\"routes\"] = []\n for route in routes:\n if route[\"handle\"].lower() in user_id_map:\n args[\"routes\"].append(\n {\n \"slug\": route[\"slug\"],\n \"owner_id\": user_id_map[route[\"handle\"].lower()],\n }\n )\n # If none of the handles were found, return empty lists\n if not args[\"routes\"]:\n return ([], [])\n\n can_use_shared_cache = (\n \"id\" in args\n and \"min_block_number\" not in args\n and \"sort\" not in args\n and \"user_id\" not in args\n )\n\n if can_use_shared_cache:\n should_filter_deleted = args.get(\"filter_deleted\", False)\n tracks = get_unpopulated_tracks(\n session, args[\"id\"], should_filter_deleted\n )\n track_ids = list(map(lambda track: track[\"track_id\"], tracks))\n return (tracks, track_ids)\n\n (limit, offset) = get_pagination_vars()\n args[\"limit\"] = limit\n args[\"offset\"] = offset\n\n tracks = _get_tracks(session, args)\n\n track_ids = list(map(lambda track: track[\"track_id\"], tracks))\n\n return (tracks, track_ids)\n\n (tracks, track_ids) = get_tracks_and_ids()\n\n # bundle peripheral info into track results\n current_user_id = args.get(\"current_user_id\")\n\n # remove track segments and download cids from deactivated user tracks and deleted tracks\n for track in tracks:\n if track[\"user\"][0][\"is_deactivated\"] or track[\"is_delete\"]:\n track[\"track_segments\"] = []\n track[\"download\"][\"cid\"] = None\n\n tracks = populate_track_metadata(session, track_ids, tracks, current_user_id)\n\n if args.get(\"with_users\", False):\n add_users_to_tracks(session, tracks, current_user_id)\n else:\n # Remove the user from the tracks\n tracks = [\n {key: val for key, val in dict.items() if key != \"user\"}\n for dict in tracks\n ]\n return tracks","function_tokens":["def","get_tracks","(","args",":","GetTrackArgs",")",":","tracks","=","[","]","db","=","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","def","get_tracks_and_ids","(",")",":","if","\"handle\"","in","args",":","handle","=","args",".","get","(","\"handle\"",")","user","=","(","session",".","query","(","User",".","user_id",")",".","filter","(","User",".","handle_lc","==","handle",".","lower","(",")",")",".","first","(",")",")","args","[","\"user_id\"","]","=","user",".","user_id","if","\"routes\"","in","args",":","# Convert the handles to user_ids","routes","=","args",".","get","(","\"routes\"",")","handles","=","[","route","[","\"handle\"","]",".","lower","(",")","for","route","in","routes","]","user_id_tuples","=","(","session",".","query","(","User",".","user_id",",","User",".","handle_lc",")",".","filter","(","User",".","handle_lc",".","in_","(","handles",")",",","User",".","is_current","==","True",")",".","all","(",")",")","user_id_map","=","{","handle",":","user_id","for","(","user_id",",","handle",")","in","user_id_tuples","}","args","[","\"routes\"","]","=","[","]","for","route","in","routes",":","if","route","[","\"handle\"","]",".","lower","(",")","in","user_id_map",":","args","[","\"routes\"","]",".","append","(","{","\"slug\"",":","route","[","\"slug\"","]",",","\"owner_id\"",":","user_id_map","[","route","[","\"handle\"","]",".","lower","(",")","]",",","}",")","# If none of the handles were found, return empty lists","if","not","args","[","\"routes\"","]",":","return","(","[","]",",","[","]",")","can_use_shared_cache","=","(","\"id\"","in","args","and","\"min_block_number\"","not","in","args","and","\"sort\"","not","in","args","and","\"user_id\"","not","in","args",")","if","can_use_shared_cache",":","should_filter_deleted","=","args",".","get","(","\"filter_deleted\"",",","False",")","tracks","=","get_unpopulated_tracks","(","session",",","args","[","\"id\"","]",",","should_filter_deleted",")","track_ids","=","list","(","map","(","lambda","track",":","track","[","\"track_id\"","]",",","tracks",")",")","return","(","tracks",",","track_ids",")","(","limit",",","offset",")","=","get_pagination_vars","(",")","args","[","\"limit\"","]","=","limit","args","[","\"offset\"","]","=","offset","tracks","=","_get_tracks","(","session",",","args",")","track_ids","=","list","(","map","(","lambda","track",":","track","[","\"track_id\"","]",",","tracks",")",")","return","(","tracks",",","track_ids",")","(","tracks",",","track_ids",")","=","get_tracks_and_ids","(",")","# bundle peripheral info into track results","current_user_id","=","args",".","get","(","\"current_user_id\"",")","# remove track segments and download cids from deactivated user tracks and deleted tracks","for","track","in","tracks",":","if","track","[","\"user\"","]","[","0","]","[","\"is_deactivated\"","]","or","track","[","\"is_delete\"","]",":","track","[","\"track_segments\"","]","=","[","]","track","[","\"download\"","]","[","\"cid\"","]","=","None","tracks","=","populate_track_metadata","(","session",",","track_ids",",","tracks",",","current_user_id",")","if","args",".","get","(","\"with_users\"",",","False",")",":","add_users_to_tracks","(","session",",","tracks",",","current_user_id",")","else",":","# Remove the user from the tracks","tracks","=","[","{","key",":","val","for","key",",","val","in","dict",".","items","(",")","if","key","!=","\"user\"","}","for","dict","in","tracks","]","return","tracks"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_tracks.py#L128-L224"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/search_user_tags.py","language":"python","identifier":"search_user_tags","parameters":"(session, args)","argument_list":"","return_statement":"return followee_sorted_users","docstring":"Gets the users with tracks with a given tag\n\n Args:\n session: sqlalchemy db session instance\n args: dict of arguments\n args.search_str: string the tag search string\n args.current_user_id: id | null The user id making the query\n args.limit: number the query limit of number of returns tracks\n args.offset: number the query offset for results\n args.user_tag_count: number The number of tracks with the query tag\n\n Returns:\n list of users sorted by followee count","docstring_summary":"Gets the users with tracks with a given tag","docstring_tokens":["Gets","the","users","with","tracks","with","a","given","tag"],"function":"def search_user_tags(session, args):\n \"\"\"\n Gets the users with tracks with a given tag\n\n Args:\n session: sqlalchemy db session instance\n args: dict of arguments\n args.search_str: string the tag search string\n args.current_user_id: id | null The user id making the query\n args.limit: number the query limit of number of returns tracks\n args.offset: number the query offset for results\n args.user_tag_count: number The number of tracks with the query tag\n\n Returns:\n list of users sorted by followee count\n \"\"\"\n user_ids = (\n session.query(TagTrackUserMatview.owner_id)\n .filter(TagTrackUserMatview.tag == args[\"search_str\"].lower())\n .group_by(TagTrackUserMatview.owner_id)\n .having(func.count(TagTrackUserMatview.owner_id) >= args[\"user_tag_count\"])\n .all()\n )\n\n # user_ids is list of tuples - simplify to 1-D list\n user_ids = [i[0] for i in user_ids]\n\n users = (\n session.query(User)\n .filter(User.is_current == True, User.user_id.in_(user_ids))\n .all()\n )\n users = helpers.query_result_to_list(users)\n\n users = populate_user_metadata(session, user_ids, users, args[\"current_user_id\"])\n\n followee_sorted_users = sorted(\n users, key=lambda i: i[response_name_constants.follower_count], reverse=True\n )\n\n followee_sorted_users = followee_sorted_users[\n slice(args[\"offset\"], args[\"offset\"] + args[\"limit\"], 1)\n ]\n\n return followee_sorted_users","function_tokens":["def","search_user_tags","(","session",",","args",")",":","user_ids","=","(","session",".","query","(","TagTrackUserMatview",".","owner_id",")",".","filter","(","TagTrackUserMatview",".","tag","==","args","[","\"search_str\"","]",".","lower","(",")",")",".","group_by","(","TagTrackUserMatview",".","owner_id",")",".","having","(","func",".","count","(","TagTrackUserMatview",".","owner_id",")",">=","args","[","\"user_tag_count\"","]",")",".","all","(",")",")","# user_ids is list of tuples - simplify to 1-D list","user_ids","=","[","i","[","0","]","for","i","in","user_ids","]","users","=","(","session",".","query","(","User",")",".","filter","(","User",".","is_current","==","True",",","User",".","user_id",".","in_","(","user_ids",")",")",".","all","(",")",")","users","=","helpers",".","query_result_to_list","(","users",")","users","=","populate_user_metadata","(","session",",","user_ids",",","users",",","args","[","\"current_user_id\"","]",")","followee_sorted_users","=","sorted","(","users",",","key","=","lambda","i",":","i","[","response_name_constants",".","follower_count","]",",","reverse","=","True",")","followee_sorted_users","=","followee_sorted_users","[","slice","(","args","[","\"offset\"","]",",","args","[","\"offset\"","]","+","args","[","\"limit\"","]",",","1",")","]","return","followee_sorted_users"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/search_user_tags.py#L12-L56"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_cid_source.py","language":"python","identifier":"get_cid_source","parameters":"(cid)","argument_list":"","return_statement":"","docstring":"Returns the CID source (e.g. CID is a metadata hash, a cover photo, a track segment, etc.)\n\n Args: the observed CID","docstring_summary":"Returns the CID source (e.g. CID is a metadata hash, a cover photo, a track segment, etc.)","docstring_tokens":["Returns","the","CID","source","(","e",".","g",".","CID","is","a","metadata","hash","a","cover","photo","a","track","segment","etc",".",")"],"function":"def get_cid_source(cid):\n \"\"\"\n Returns the CID source (e.g. CID is a metadata hash, a cover photo, a track segment, etc.)\n\n Args: the observed CID\n \"\"\"\n if cid is None:\n raise exceptions.ArgumentError(\"Input CID is invalid\")\n\n have_lock = False\n update_lock = redis.lock(\"get_cid_source_lock\", blocking_timeout=25)\n\n try:\n # Attempt to acquire lock - do not block if unable to acquire\n have_lock = update_lock.acquire(blocking=False)\n response = []\n if have_lock:\n db = db_session.get_db_read_replica()\n with db.scoped_session() as session:\n # Check to see if CID is of any type but a segment\n cid_source_res = sqlalchemy.text(\n \"\"\"\n WITH cid_const AS (VALUES (:cid))\n SELECT * FROM\n (\n (\n SELECT\n \"user_id\" as \"id\",\n 'users' as \"table_name\",\n 'metadata_multihash' as \"type\",\n \"is_current\"\n FROM \"users\" WHERE (table cid_const) = \"metadata_multihash\"\n )\n UNION ALL\n (\n SELECT\n \"user_id\" as \"id\",\n 'users' as \"table_name\",\n 'profile_cover_images' as \"type\",\n \"is_current\"\n FROM\n \"users\"\n WHERE\n (table cid_const) in (\n \"profile_picture\",\n \"cover_photo\",\n \"profile_picture_sizes\",\n \"cover_photo_sizes\"\n )\n )\n UNION ALL\n (\n SELECT\n \"playlist_id\" as \"id\",\n 'playlists' as \"table_name\",\n 'playlist_image_multihash' as \"type\",\n \"is_current\"\n FROM\n \"playlists\"\n WHERE\n (table cid_const) in (\n \"playlist_image_sizes_multihash\",\n \"playlist_image_multihash\"\n )\n )\n UNION ALL\n (\n SELECT\n \"track_id\" as \"id\",\n 'tracks' as \"table_name\",\n 'track_metadata' as \"type\",\n \"is_current\"\n FROM\n \"tracks\"\n WHERE\n (table cid_const) = \"metadata_multihash\"\n )\n UNION ALL\n (\n SELECT\n \"track_id\" as \"id\",\n 'tracks' as \"table_name\",\n 'cover_art_size' as \"type\",\n \"is_current\"\n FROM\n \"tracks\"\n WHERE\n (table cid_const) = \"cover_art_sizes\"\n )\n ) as \"outer\"\n \"\"\"\n )\n cid_source = session.execute(cid_source_res, {\"cid\": cid}).fetchall()\n\n # If something is found, set response\n if len(cid_source) != 0:\n response = [dict(row) for row in cid_source]\n\n # If CID was not found, check to see if it is a type segment\n if len(response) == 0:\n cid_source_res = sqlalchemy.text(\n \"\"\"\n WITH cid_const AS (VALUES (:cid))\n SELECT\n \"track_id\" as \"id\",\n 'tracks' as \"table_name\",\n 'segment' as \"type\",\n \"is_current\"\n FROM\n (\n SELECT\n jb -> 'duration' as \"d\",\n jb -> 'multihash' :: varchar as \"cid\",\n \"track_id\",\n \"is_current\"\n FROM\n (\n SELECT\n jsonb_array_elements(\"track_segments\") as \"jb\",\n \"track_id\",\n \"is_current\"\n FROM\n \"tracks\"\n ) as a\n ) as a2\n WHERE\n \"cid\" ? (table cid_const)\n \"\"\"\n )\n\n cid_source = session.execute(\n cid_source_res, {\"cid\": cid}\n ).fetchall()\n\n # If something is found, set response\n if len(cid_source) != 0:\n response = [dict(row) for row in cid_source]\n else:\n logger.warning(\"get_cid_source | Failed to acquire get_cid_source_lock\")\n\n return response\n except Exception as e:\n logger.error(\"get_cid_source | Error with query: %s\", exc_info=True)\n raise e\n finally:\n if have_lock:\n update_lock.release()","function_tokens":["def","get_cid_source","(","cid",")",":","if","cid","is","None",":","raise","exceptions",".","ArgumentError","(","\"Input CID is invalid\"",")","have_lock","=","False","update_lock","=","redis",".","lock","(","\"get_cid_source_lock\"",",","blocking_timeout","=","25",")","try",":","# Attempt to acquire lock - do not block if unable to acquire","have_lock","=","update_lock",".","acquire","(","blocking","=","False",")","response","=","[","]","if","have_lock",":","db","=","db_session",".","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","# Check to see if CID is of any type but a segment","cid_source_res","=","sqlalchemy",".","text","(","\"\"\"\n WITH cid_const AS (VALUES (:cid))\n SELECT * FROM\n (\n (\n SELECT\n \"user_id\" as \"id\",\n 'users' as \"table_name\",\n 'metadata_multihash' as \"type\",\n \"is_current\"\n FROM \"users\" WHERE (table cid_const) = \"metadata_multihash\"\n )\n UNION ALL\n (\n SELECT\n \"user_id\" as \"id\",\n 'users' as \"table_name\",\n 'profile_cover_images' as \"type\",\n \"is_current\"\n FROM\n \"users\"\n WHERE\n (table cid_const) in (\n \"profile_picture\",\n \"cover_photo\",\n \"profile_picture_sizes\",\n \"cover_photo_sizes\"\n )\n )\n UNION ALL\n (\n SELECT\n \"playlist_id\" as \"id\",\n 'playlists' as \"table_name\",\n 'playlist_image_multihash' as \"type\",\n \"is_current\"\n FROM\n \"playlists\"\n WHERE\n (table cid_const) in (\n \"playlist_image_sizes_multihash\",\n \"playlist_image_multihash\"\n )\n )\n UNION ALL\n (\n SELECT\n \"track_id\" as \"id\",\n 'tracks' as \"table_name\",\n 'track_metadata' as \"type\",\n \"is_current\"\n FROM\n \"tracks\"\n WHERE\n (table cid_const) = \"metadata_multihash\"\n )\n UNION ALL\n (\n SELECT\n \"track_id\" as \"id\",\n 'tracks' as \"table_name\",\n 'cover_art_size' as \"type\",\n \"is_current\"\n FROM\n \"tracks\"\n WHERE\n (table cid_const) = \"cover_art_sizes\"\n )\n ) as \"outer\"\n \"\"\"",")","cid_source","=","session",".","execute","(","cid_source_res",",","{","\"cid\"",":","cid","}",")",".","fetchall","(",")","# If something is found, set response","if","len","(","cid_source",")","!=","0",":","response","=","[","dict","(","row",")","for","row","in","cid_source","]","# If CID was not found, check to see if it is a type segment","if","len","(","response",")","==","0",":","cid_source_res","=","sqlalchemy",".","text","(","\"\"\"\n WITH cid_const AS (VALUES (:cid))\n SELECT\n \"track_id\" as \"id\",\n 'tracks' as \"table_name\",\n 'segment' as \"type\",\n \"is_current\"\n FROM\n (\n SELECT\n jb -> 'duration' as \"d\",\n jb -> 'multihash' :: varchar as \"cid\",\n \"track_id\",\n \"is_current\"\n FROM\n (\n SELECT\n jsonb_array_elements(\"track_segments\") as \"jb\",\n \"track_id\",\n \"is_current\"\n FROM\n \"tracks\"\n ) as a\n ) as a2\n WHERE\n \"cid\" ? (table cid_const)\n \"\"\"",")","cid_source","=","session",".","execute","(","cid_source_res",",","{","\"cid\"",":","cid","}",")",".","fetchall","(",")","# If something is found, set response","if","len","(","cid_source",")","!=","0",":","response","=","[","dict","(","row",")","for","row","in","cid_source","]","else",":","logger",".","warning","(","\"get_cid_source | Failed to acquire get_cid_source_lock\"",")","return","response","except","Exception","as","e",":","logger",".","error","(","\"get_cid_source | Error with query: %s\"",",","exc_info","=","True",")","raise","e","finally",":","if","have_lock",":","update_lock",".","release","(",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_cid_source.py#L11-L157"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/search_config.py","language":"python","identifier":"set_search_similarity","parameters":"(cursor)","argument_list":"","return_statement":"","docstring":"Sets the search similarity threshold to be used by % operator in queries.\n https:\/\/www.postgresql.org\/docs\/9.6\/pgtrgm.html\n\n Note: set_limit was replaced by pg_trgm.similarity_threshold in PG 9.6.\n https:\/\/stackoverflow.com\/a\/11250001\/11435157","docstring_summary":"Sets the search similarity threshold to be used by % operator in queries.\n https:\/\/www.postgresql.org\/docs\/9.6\/pgtrgm.html","docstring_tokens":["Sets","the","search","similarity","threshold","to","be","used","by","%","operator","in","queries",".","https",":","\/\/","www",".","postgresql",".","org","\/","docs","\/","9",".","6","\/","pgtrgm",".","html"],"function":"def set_search_similarity(cursor):\n \"\"\"\n Sets the search similarity threshold to be used by % operator in queries.\n https:\/\/www.postgresql.org\/docs\/9.6\/pgtrgm.html\n\n Note: set_limit was replaced by pg_trgm.similarity_threshold in PG 9.6.\n https:\/\/stackoverflow.com\/a\/11250001\/11435157\n \"\"\"\n try:\n cursor.execute(f\"SET pg_trgm.similarity_threshold = {min_search_similarity}\")\n except Exception as e:\n logger.error(f\"Unable to set similarity_threshold: {e}\")","function_tokens":["def","set_search_similarity","(","cursor",")",":","try",":","cursor",".","execute","(","f\"SET pg_trgm.similarity_threshold = {min_search_similarity}\"",")","except","Exception","as","e",":","logger",".","error","(","f\"Unable to set similarity_threshold: {e}\"",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/search_config.py#L39-L50"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_recommended_tracks.py","language":"python","identifier":"get_recommended_tracks","parameters":"(args, strategy)","argument_list":"","return_statement":"return list(map(extend_track, filtered_tracks))","docstring":"Gets recommended tracks from trending by getting the currently cached tracks and then populating them.","docstring_summary":"Gets recommended tracks from trending by getting the currently cached tracks and then populating them.","docstring_tokens":["Gets","recommended","tracks","from","trending","by","getting","the","currently","cached","tracks","and","then","populating","them","."],"function":"def get_recommended_tracks(args, strategy):\n \"\"\"Gets recommended tracks from trending by getting the currently cached tracks and then populating them.\"\"\"\n exclusion_list = args.get(\"exclusion_list\") or []\n time = args.get(\"time\") if args.get(\"time\") is not None else \"week\"\n current_user_id = args.get(\"user_id\")\n args = {\n \"time\": time,\n \"genre\": args.get(\"genre\", None),\n \"with_users\": True,\n \"limit\": args.get(\"limit\"),\n \"offset\": 0,\n }\n\n # decode and add user_id if necessary\n if current_user_id:\n args[\"current_user_id\"] = decode_string_id(current_user_id)\n\n tracks = get_trending_tracks(args, strategy)\n filtered_tracks = list(\n filter(lambda track: track[\"track_id\"] not in exclusion_list, tracks)\n )\n\n random.shuffle(filtered_tracks)\n return list(map(extend_track, filtered_tracks))","function_tokens":["def","get_recommended_tracks","(","args",",","strategy",")",":","exclusion_list","=","args",".","get","(","\"exclusion_list\"",")","or","[","]","time","=","args",".","get","(","\"time\"",")","if","args",".","get","(","\"time\"",")","is","not","None","else","\"week\"","current_user_id","=","args",".","get","(","\"user_id\"",")","args","=","{","\"time\"",":","time",",","\"genre\"",":","args",".","get","(","\"genre\"",",","None",")",",","\"with_users\"",":","True",",","\"limit\"",":","args",".","get","(","\"limit\"",")",",","\"offset\"",":","0",",","}","# decode and add user_id if necessary","if","current_user_id",":","args","[","\"current_user_id\"","]","=","decode_string_id","(","current_user_id",")","tracks","=","get_trending_tracks","(","args",",","strategy",")","filtered_tracks","=","list","(","filter","(","lambda","track",":","track","[","\"track_id\"","]","not","in","exclusion_list",",","tracks",")",")","random",".","shuffle","(","filtered_tracks",")","return","list","(","map","(","extend_track",",","filtered_tracks",")",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_recommended_tracks.py#L14-L37"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/notifications.py","language":"python","identifier":"get_owner_id","parameters":"(session, entity_type, entity_id)","argument_list":"","return_statement":"return None","docstring":"Fetches the owner user id of the requested entity_type\/entity_id\n\n Args:\n session: (obj) The start block number for querying for notifications\n entity_type: (string) Must be either 'track' | 'album' | 'playlist\n entity_id: (int) The id of the 'entity_type'\n\n Returns:\n owner_id: (int | None) The user id of the owner of the entity_type\/entity_id","docstring_summary":"Fetches the owner user id of the requested entity_type\/entity_id","docstring_tokens":["Fetches","the","owner","user","id","of","the","requested","entity_type","\/","entity_id"],"function":"def get_owner_id(session, entity_type, entity_id):\n \"\"\"\n Fetches the owner user id of the requested entity_type\/entity_id\n\n Args:\n session: (obj) The start block number for querying for notifications\n entity_type: (string) Must be either 'track' | 'album' | 'playlist\n entity_id: (int) The id of the 'entity_type'\n\n Returns:\n owner_id: (int | None) The user id of the owner of the entity_type\/entity_id\n \"\"\"\n if entity_type == \"track\":\n owner_id_query = (\n session.query(Track.owner_id)\n .filter(\n Track.track_id == entity_id,\n Track.is_delete == False,\n Track.is_current == True,\n )\n .all()\n )\n if not owner_id_query:\n return None\n owner_id = owner_id_query[0][0]\n return owner_id\n\n if entity_type == \"album\":\n owner_id_query = (\n session.query(Playlist.playlist_owner_id)\n .filter(\n Playlist.playlist_id == entity_id,\n Playlist.is_delete == False,\n Playlist.is_current == True,\n Playlist.is_album == True,\n )\n .all()\n )\n if not owner_id_query:\n return None\n owner_id = owner_id_query[0][0]\n return owner_id\n\n if entity_type == \"playlist\":\n owner_id_query = (\n session.query(Playlist.playlist_owner_id)\n .filter(\n Playlist.playlist_id == entity_id,\n Playlist.is_delete == False,\n Playlist.is_current == True,\n Playlist.is_album == False,\n )\n .all()\n )\n if not owner_id_query:\n return None\n owner_id = owner_id_query[0][0]\n return owner_id\n\n return None","function_tokens":["def","get_owner_id","(","session",",","entity_type",",","entity_id",")",":","if","entity_type","==","\"track\"",":","owner_id_query","=","(","session",".","query","(","Track",".","owner_id",")",".","filter","(","Track",".","track_id","==","entity_id",",","Track",".","is_delete","==","False",",","Track",".","is_current","==","True",",",")",".","all","(",")",")","if","not","owner_id_query",":","return","None","owner_id","=","owner_id_query","[","0","]","[","0","]","return","owner_id","if","entity_type","==","\"album\"",":","owner_id_query","=","(","session",".","query","(","Playlist",".","playlist_owner_id",")",".","filter","(","Playlist",".","playlist_id","==","entity_id",",","Playlist",".","is_delete","==","False",",","Playlist",".","is_current","==","True",",","Playlist",".","is_album","==","True",",",")",".","all","(",")",")","if","not","owner_id_query",":","return","None","owner_id","=","owner_id_query","[","0","]","[","0","]","return","owner_id","if","entity_type","==","\"playlist\"",":","owner_id_query","=","(","session",".","query","(","Playlist",".","playlist_owner_id",")",".","filter","(","Playlist",".","playlist_id","==","entity_id",",","Playlist",".","is_delete","==","False",",","Playlist",".","is_current","==","True",",","Playlist",".","is_album","==","False",",",")",".","all","(",")",")","if","not","owner_id_query",":","return","None","owner_id","=","owner_id_query","[","0","]","[","0","]","return","owner_id","return","None"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/notifications.py#L52-L111"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/notifications.py","language":"python","identifier":"get_cosign_remix_notifications","parameters":"(session, max_block_number, remix_tracks)","argument_list":"","return_statement":"return remix_notifications","docstring":"Get the notifications for remix tracks that are reposted\/favorited by the parent remix author\n\n Args:\n session: (DB)\n max_block_number: (int)\n remix_tracks: (Array<{ }>)\n 'user_id'\n 'item_id'\n const.notification_blocknumber\n const.notification_timestamp\n 'item_owner_id'\n\n Returns:\n Array of cosign notifications","docstring_summary":"Get the notifications for remix tracks that are reposted\/favorited by the parent remix author","docstring_tokens":["Get","the","notifications","for","remix","tracks","that","are","reposted","\/","favorited","by","the","parent","remix","author"],"function":"def get_cosign_remix_notifications(session, max_block_number, remix_tracks):\n \"\"\"\n Get the notifications for remix tracks that are reposted\/favorited by the parent remix author\n\n Args:\n session: (DB)\n max_block_number: (int)\n remix_tracks: (Array<{ }>)\n 'user_id'\n 'item_id'\n const.notification_blocknumber\n const.notification_timestamp\n 'item_owner_id'\n\n Returns:\n Array of cosign notifications\n\n \"\"\"\n if not remix_tracks:\n return []\n\n remix_notifications = []\n remix_track_ids = [r[\"item_id\"] for r in remix_tracks]\n\n # Query for all the parent tracks of the remix tracks\n tracks_subquery = (\n session.query(Track)\n .filter(\n Track.is_unlisted == False,\n Track.is_delete == False,\n Track.is_current == True,\n )\n .subquery()\n )\n\n parent_tracks = (\n session.query(\n Remix.child_track_id, Remix.parent_track_id, tracks_subquery.c.owner_id\n )\n .join(tracks_subquery, Remix.parent_track_id == tracks_subquery.c.track_id)\n .filter(Remix.child_track_id.in_(remix_track_ids))\n .all()\n )\n # Mapping of parent track users to child track to parent track\n parent_track_users_to_remixes = {}\n for track_parent in parent_tracks:\n [remix_track_id, remix_parent_id, remix_parent_user_id] = track_parent\n if remix_parent_user_id not in parent_track_users_to_remixes:\n parent_track_users_to_remixes[remix_parent_user_id] = {\n remix_track_id: remix_parent_id\n }\n else:\n parent_track_users_to_remixes[remix_parent_user_id][\n remix_track_id\n ] = remix_parent_id\n\n for remix_track in remix_tracks:\n user_id = remix_track[\"user_id\"]\n track_id = remix_track[\"item_id\"]\n\n if (\n user_id in parent_track_users_to_remixes\n and track_id in parent_track_users_to_remixes[user_id]\n ):\n remix_notifications.append(\n {\n const.notification_type: const.notification_type_remix_cosign,\n const.notification_blocknumber: remix_track[\n const.notification_blocknumber\n ],\n const.notification_timestamp: remix_track[\n const.notification_timestamp\n ],\n const.notification_initiator: user_id,\n const.notification_metadata: {\n const.notification_entity_id: track_id,\n const.notification_entity_type: \"track\",\n const.notification_entity_owner_id: remix_track[\n \"item_owner_id\"\n ],\n },\n }\n )\n\n return remix_notifications","function_tokens":["def","get_cosign_remix_notifications","(","session",",","max_block_number",",","remix_tracks",")",":","if","not","remix_tracks",":","return","[","]","remix_notifications","=","[","]","remix_track_ids","=","[","r","[","\"item_id\"","]","for","r","in","remix_tracks","]","# Query for all the parent tracks of the remix tracks","tracks_subquery","=","(","session",".","query","(","Track",")",".","filter","(","Track",".","is_unlisted","==","False",",","Track",".","is_delete","==","False",",","Track",".","is_current","==","True",",",")",".","subquery","(",")",")","parent_tracks","=","(","session",".","query","(","Remix",".","child_track_id",",","Remix",".","parent_track_id",",","tracks_subquery",".","c",".","owner_id",")",".","join","(","tracks_subquery",",","Remix",".","parent_track_id","==","tracks_subquery",".","c",".","track_id",")",".","filter","(","Remix",".","child_track_id",".","in_","(","remix_track_ids",")",")",".","all","(",")",")","# Mapping of parent track users to child track to parent track","parent_track_users_to_remixes","=","{","}","for","track_parent","in","parent_tracks",":","[","remix_track_id",",","remix_parent_id",",","remix_parent_user_id","]","=","track_parent","if","remix_parent_user_id","not","in","parent_track_users_to_remixes",":","parent_track_users_to_remixes","[","remix_parent_user_id","]","=","{","remix_track_id",":","remix_parent_id","}","else",":","parent_track_users_to_remixes","[","remix_parent_user_id","]","[","remix_track_id","]","=","remix_parent_id","for","remix_track","in","remix_tracks",":","user_id","=","remix_track","[","\"user_id\"","]","track_id","=","remix_track","[","\"item_id\"","]","if","(","user_id","in","parent_track_users_to_remixes","and","track_id","in","parent_track_users_to_remixes","[","user_id","]",")",":","remix_notifications",".","append","(","{","const",".","notification_type",":","const",".","notification_type_remix_cosign",",","const",".","notification_blocknumber",":","remix_track","[","const",".","notification_blocknumber","]",",","const",".","notification_timestamp",":","remix_track","[","const",".","notification_timestamp","]",",","const",".","notification_initiator",":","user_id",",","const",".","notification_metadata",":","{","const",".","notification_entity_id",":","track_id",",","const",".","notification_entity_type",":","\"track\"",",","const",".","notification_entity_owner_id",":","remix_track","[","\"item_owner_id\"","]",",","}",",","}",")","return","remix_notifications"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/notifications.py#L114-L198"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/notifications.py","language":"python","identifier":"notifications","parameters":"()","argument_list":"","return_statement":"return api_helpers.success_response(\n {\n \"notifications\": sorted_notifications,\n \"info\": notification_metadata,\n \"milestones\": milestone_info,\n \"owners\": owner_info,\n }\n )","docstring":"Fetches the notifications events that occurred between the given block numbers\n\n URL Params:\n min_block_number: (int) The start block number for querying for notifications\n max_block_number?: (int) The end block number for querying for notifications\n track_id?: (Array) Array of track id for fetching the track's owner id\n and adding the track id to owner user id mapping to the `owners` response field\n NOTE: this is added for notification for listen counts\n\n Response - Json object w\/ the following fields\n notifications: Array of notifications of shape:\n type: 'Follow' | 'Favorite' | 'Repost' | 'Create' | 'RemixCreate' | 'RemixCosign' | 'PlaylistUpdate'\n blocknumber: (int) blocknumber of notification\n timestamp: (string) timestamp of notification\n initiator: (int) the user id that caused this notification\n metadata?: (any) additional information about the notification\n entity_id?: (int) the id of the target entity (ie. playlist id of a playlist that is reposted)\n entity_type?: (string) the type of the target entity\n entity_owner_id?: (int) the id of the target entity's owner (if applicable)\n playlist_update_timestamp?: (string) timestamp of last update of a given playlist\n playlist_update_users?: (array) user ids which favorited a given playlist\n\n info: Dictionary of metadata w\/ min_block_number & max_block_number fields\n\n milestones: Dictionary mapping of follows\/reposts\/favorites (processed within the blocks params)\n Root fields:\n follower_counts: Contains a dictionary of user id => follower count (up to the max_block_number)\n repost_counts: Contains a dictionary tracks\/albums\/playlists of id to repost count\n favorite_counts: Contains a dictionary tracks\/albums\/playlists of id to favorite count\n\n owners: Dictionary containing the mapping for track id \/ playlist id \/ album -> owner user id\n The root keys are 'tracks', 'playlists', 'albums' and each contains the id to owner id mapping","docstring_summary":"Fetches the notifications events that occurred between the given block numbers","docstring_tokens":["Fetches","the","notifications","events","that","occurred","between","the","given","block","numbers"],"function":"def notifications():\n \"\"\"\n Fetches the notifications events that occurred between the given block numbers\n\n URL Params:\n min_block_number: (int) The start block number for querying for notifications\n max_block_number?: (int) The end block number for querying for notifications\n track_id?: (Array) Array of track id for fetching the track's owner id\n and adding the track id to owner user id mapping to the `owners` response field\n NOTE: this is added for notification for listen counts\n\n Response - Json object w\/ the following fields\n notifications: Array of notifications of shape:\n type: 'Follow' | 'Favorite' | 'Repost' | 'Create' | 'RemixCreate' | 'RemixCosign' | 'PlaylistUpdate'\n blocknumber: (int) blocknumber of notification\n timestamp: (string) timestamp of notification\n initiator: (int) the user id that caused this notification\n metadata?: (any) additional information about the notification\n entity_id?: (int) the id of the target entity (ie. playlist id of a playlist that is reposted)\n entity_type?: (string) the type of the target entity\n entity_owner_id?: (int) the id of the target entity's owner (if applicable)\n playlist_update_timestamp?: (string) timestamp of last update of a given playlist\n playlist_update_users?: (array) user ids which favorited a given playlist\n\n info: Dictionary of metadata w\/ min_block_number & max_block_number fields\n\n milestones: Dictionary mapping of follows\/reposts\/favorites (processed within the blocks params)\n Root fields:\n follower_counts: Contains a dictionary of user id => follower count (up to the max_block_number)\n repost_counts: Contains a dictionary tracks\/albums\/playlists of id to repost count\n favorite_counts: Contains a dictionary tracks\/albums\/playlists of id to favorite count\n\n owners: Dictionary containing the mapping for track id \/ playlist id \/ album -> owner user id\n The root keys are 'tracks', 'playlists', 'albums' and each contains the id to owner id mapping\n \"\"\"\n\n db = get_db_read_replica()\n min_block_number = request.args.get(\"min_block_number\", type=int)\n max_block_number = request.args.get(\"max_block_number\", type=int)\n\n track_ids_to_owner = []\n try:\n track_ids_str_list = request.args.getlist(\"track_id\")\n track_ids_to_owner = [int(y) for y in track_ids_str_list]\n except Exception as e:\n logger.error(f\"Failed to retrieve track list {e}\")\n\n # Max block number is not explicitly required (yet)\n if not min_block_number and min_block_number != 0:\n return api_helpers.error_response({\"msg\": \"Missing min block number\"}, 400)\n\n if not max_block_number:\n max_block_number = min_block_number + max_block_diff\n elif (max_block_number - min_block_number) > max_block_diff:\n max_block_number = min_block_number + max_block_diff\n\n with db.scoped_session() as session:\n current_block_query = session.query(Block).filter_by(is_current=True)\n current_block_query_results = current_block_query.all()\n current_block = current_block_query_results[0]\n current_max_block_num = current_block.number\n if current_max_block_num < max_block_number:\n max_block_number = current_max_block_num\n\n notification_metadata = {\n \"min_block_number\": min_block_number,\n \"max_block_number\": max_block_number,\n }\n\n # Retrieve milestones statistics\n milestone_info = {}\n\n # Cache owner info for network entities and pass in w\/results\n owner_info = {const.tracks: {}, const.albums: {}, const.playlists: {}}\n\n start_time = datetime.now()\n logger.info(f\"notifications.py | start_time ${start_time}\")\n\n # List of notifications generated from current protocol state\n notifications_unsorted = []\n with db.scoped_session() as session:\n #\n # Query relevant follow information\n #\n follow_query = session.query(Follow)\n\n # Impose min block number restriction\n follow_query = follow_query.filter(\n Follow.is_current == True,\n Follow.is_delete == False,\n Follow.blocknumber > min_block_number,\n Follow.blocknumber <= max_block_number,\n )\n\n follow_results = follow_query.all()\n # Used to retrieve follower counts for this window\n followed_users = []\n # Represents all follow notifications\n follow_notifications = []\n for entry in follow_results:\n follow_notif = {\n const.notification_type: const.notification_type_follow,\n const.notification_blocknumber: entry.blocknumber,\n const.notification_timestamp: entry.created_at,\n const.notification_initiator: entry.follower_user_id,\n const.notification_metadata: {\n const.notification_follower_id: entry.follower_user_id,\n const.notification_followee_id: entry.followee_user_id,\n },\n }\n follow_notifications.append(follow_notif)\n # Add every user who gained a new follower\n followed_users.append(entry.followee_user_id)\n\n # Query count for any user w\/new followers\n follower_counts = get_follower_count_dict(\n session, followed_users, max_block_number\n )\n milestone_info[\"follower_counts\"] = follower_counts\n\n notifications_unsorted.extend(follow_notifications)\n\n logger.info(f\"notifications.py | followers at {datetime.now() - start_time}\")\n\n #\n # Query relevant favorite information\n #\n favorites_query = session.query(Save)\n favorites_query = favorites_query.filter(\n Save.is_current == True,\n Save.is_delete == False,\n Save.blocknumber > min_block_number,\n Save.blocknumber <= max_block_number,\n )\n favorite_results = favorites_query.all()\n\n # ID lists to query count aggregates\n favorited_track_ids = []\n favorited_album_ids = []\n favorited_playlist_ids = []\n\n # List of favorite notifications\n favorite_notifications = []\n favorite_remix_tracks = []\n\n for entry in favorite_results:\n favorite_notif = {\n const.notification_type: const.notification_type_favorite,\n const.notification_blocknumber: entry.blocknumber,\n const.notification_timestamp: entry.created_at,\n const.notification_initiator: entry.user_id,\n }\n save_type = entry.save_type\n save_item_id = entry.save_item_id\n metadata = {\n const.notification_entity_type: save_type,\n const.notification_entity_id: save_item_id,\n }\n\n # NOTE if deleted, the favorite can still exist\n # TODO: Can we aggregate all owner queries and perform at once...?\n if save_type == SaveType.track:\n owner_id = get_owner_id(session, \"track\", save_item_id)\n if not owner_id:\n continue\n metadata[const.notification_entity_owner_id] = owner_id\n favorited_track_ids.append(save_item_id)\n owner_info[const.tracks][save_item_id] = owner_id\n\n favorite_remix_tracks.append(\n {\n const.notification_blocknumber: entry.blocknumber,\n const.notification_timestamp: entry.created_at,\n \"user_id\": entry.user_id,\n \"item_owner_id\": owner_id,\n \"item_id\": save_item_id,\n }\n )\n\n elif save_type == SaveType.album:\n owner_id = get_owner_id(session, \"album\", save_item_id)\n if not owner_id:\n continue\n metadata[const.notification_entity_owner_id] = owner_id\n favorited_album_ids.append(save_item_id)\n owner_info[const.albums][save_item_id] = owner_id\n\n elif save_type == SaveType.playlist:\n owner_id = get_owner_id(session, \"playlist\", save_item_id)\n if not owner_id:\n continue\n metadata[const.notification_entity_owner_id] = owner_id\n favorited_playlist_ids.append(save_item_id)\n owner_info[const.playlists][save_item_id] = owner_id\n\n favorite_notif[const.notification_metadata] = metadata\n favorite_notifications.append(favorite_notif)\n notifications_unsorted.extend(favorite_notifications)\n\n track_favorite_dict = {}\n album_favorite_dict = {}\n playlist_favorite_dict = {}\n\n if favorited_track_ids:\n track_favorite_counts = get_save_counts(\n session,\n False,\n False,\n favorited_track_ids,\n [SaveType.track],\n max_block_number,\n )\n track_favorite_dict = dict(track_favorite_counts)\n\n favorite_remix_notifications = get_cosign_remix_notifications(\n session, max_block_number, favorite_remix_tracks\n )\n notifications_unsorted.extend(favorite_remix_notifications)\n\n if favorited_album_ids:\n album_favorite_counts = get_save_counts(\n session,\n False,\n False,\n favorited_album_ids,\n [SaveType.album],\n max_block_number,\n )\n album_favorite_dict = dict(album_favorite_counts)\n\n if favorited_playlist_ids:\n playlist_favorite_counts = get_save_counts(\n session,\n False,\n False,\n favorited_playlist_ids,\n [SaveType.playlist],\n max_block_number,\n )\n playlist_favorite_dict = dict(playlist_favorite_counts)\n\n milestone_info[const.notification_favorite_counts] = {}\n milestone_info[const.notification_favorite_counts][\n const.tracks\n ] = track_favorite_dict\n milestone_info[const.notification_favorite_counts][\n const.albums\n ] = album_favorite_dict\n milestone_info[const.notification_favorite_counts][\n const.playlists\n ] = playlist_favorite_dict\n\n logger.info(f\"notifications.py | favorites at {datetime.now() - start_time}\")\n\n #\n # Query relevant tier change information\n #\n balance_change_query = session.query(UserBalanceChange)\n\n # Impose min block number restriction\n balance_change_query = balance_change_query.filter(\n UserBalanceChange.blocknumber > min_block_number,\n UserBalanceChange.blocknumber <= max_block_number,\n )\n\n balance_change_results = balance_change_query.all()\n tier_change_notifications = []\n\n for entry in balance_change_results:\n prev = int(entry.previous_balance)\n current = int(entry.current_balance)\n # Check for a tier change and add to tier_change_notification\n tier = None\n if prev < 100000 <= current:\n tier = \"platinum\"\n elif prev < 10000 <= current:\n tier = \"gold\"\n elif prev < 100 <= current:\n tier = \"silver\"\n elif prev < 10 <= current:\n tier = \"bronze\"\n\n if tier is not None:\n tier_change_notif = {\n const.notification_type: const.notification_type_tier_change,\n const.notification_blocknumber: entry.blocknumber,\n const.notification_timestamp: datetime.now(),\n const.notification_initiator: entry.user_id,\n const.notification_metadata: {\n const.notification_tier: tier,\n },\n }\n tier_change_notifications.append(tier_change_notif)\n\n notifications_unsorted.extend(tier_change_notifications)\n\n logger.info(\n f\"notifications.py | balance change at {datetime.now() - start_time}\"\n )\n\n #\n # Query relevant repost information\n #\n repost_query = session.query(Repost)\n repost_query = repost_query.filter(\n Repost.is_current == True,\n Repost.is_delete == False,\n Repost.blocknumber > min_block_number,\n Repost.blocknumber <= max_block_number,\n )\n repost_results = repost_query.all()\n\n # ID lists to query counts\n reposted_track_ids = []\n reposted_album_ids = []\n reposted_playlist_ids = []\n\n # List of repost notifications\n repost_notifications = []\n\n # List of repost notifications\n repost_remix_notifications = []\n repost_remix_tracks = []\n\n for entry in repost_results:\n repost_notif = {\n const.notification_type: const.notification_type_repost,\n const.notification_blocknumber: entry.blocknumber,\n const.notification_timestamp: entry.created_at,\n const.notification_initiator: entry.user_id,\n }\n repost_type = entry.repost_type\n repost_item_id = entry.repost_item_id\n metadata = {\n const.notification_entity_type: repost_type,\n const.notification_entity_id: repost_item_id,\n }\n if repost_type == RepostType.track:\n owner_id = get_owner_id(session, \"track\", repost_item_id)\n if not owner_id:\n continue\n metadata[const.notification_entity_owner_id] = owner_id\n reposted_track_ids.append(repost_item_id)\n owner_info[const.tracks][repost_item_id] = owner_id\n repost_remix_tracks.append(\n {\n const.notification_blocknumber: entry.blocknumber,\n const.notification_timestamp: entry.created_at,\n \"user_id\": entry.user_id,\n \"item_owner_id\": owner_id,\n \"item_id\": repost_item_id,\n }\n )\n\n elif repost_type == RepostType.album:\n owner_id = get_owner_id(session, \"album\", repost_item_id)\n if not owner_id:\n continue\n metadata[const.notification_entity_owner_id] = owner_id\n reposted_album_ids.append(repost_item_id)\n owner_info[const.albums][repost_item_id] = owner_id\n\n elif repost_type == RepostType.playlist:\n owner_id = get_owner_id(session, \"playlist\", repost_item_id)\n if not owner_id:\n continue\n metadata[const.notification_entity_owner_id] = owner_id\n reposted_playlist_ids.append(repost_item_id)\n owner_info[const.playlists][repost_item_id] = owner_id\n\n repost_notif[const.notification_metadata] = metadata\n repost_notifications.append(repost_notif)\n\n # Append repost notifications\n notifications_unsorted.extend(repost_notifications)\n\n track_repost_count_dict = {}\n album_repost_count_dict = {}\n playlist_repost_count_dict = {}\n\n # Aggregate repost counts for relevant fields\n # Used to notify users of entity-specific milestones\n if reposted_track_ids:\n track_repost_counts = get_repost_counts(\n session,\n False,\n False,\n reposted_track_ids,\n [RepostType.track],\n max_block_number,\n )\n track_repost_count_dict = dict(track_repost_counts)\n\n repost_remix_notifications = get_cosign_remix_notifications(\n session, max_block_number, repost_remix_tracks\n )\n notifications_unsorted.extend(repost_remix_notifications)\n\n if reposted_album_ids:\n album_repost_counts = get_repost_counts(\n session,\n False,\n False,\n reposted_album_ids,\n [RepostType.album],\n max_block_number,\n )\n album_repost_count_dict = dict(album_repost_counts)\n\n if reposted_playlist_ids:\n playlist_repost_counts = get_repost_counts(\n session,\n False,\n False,\n reposted_playlist_ids,\n [RepostType.playlist],\n max_block_number,\n )\n playlist_repost_count_dict = dict(playlist_repost_counts)\n\n milestone_info[const.notification_repost_counts] = {}\n milestone_info[const.notification_repost_counts][\n const.tracks\n ] = track_repost_count_dict\n milestone_info[const.notification_repost_counts][\n const.albums\n ] = album_repost_count_dict\n milestone_info[const.notification_repost_counts][\n const.playlists\n ] = playlist_repost_count_dict\n\n # Query relevant created entity notification - tracks\/albums\/playlists\n created_notifications = []\n\n logger.info(f\"notifications.py | reposts at {datetime.now() - start_time}\")\n\n #\n # Query relevant created tracks for remix information\n #\n remix_created_notifications = []\n\n # Aggregate track notifs\n tracks_query = session.query(Track)\n # TODO: Is it valid to use Track.is_current here? Might not be the right info...\n tracks_query = tracks_query.filter(\n Track.is_unlisted == False,\n Track.is_delete == False,\n Track.stem_of == None,\n Track.blocknumber > min_block_number,\n Track.blocknumber <= max_block_number,\n )\n tracks_query = tracks_query.filter(Track.created_at == Track.updated_at)\n track_results = tracks_query.all()\n for entry in track_results:\n track_notif = {\n const.notification_type: const.notification_type_create,\n const.notification_blocknumber: entry.blocknumber,\n const.notification_timestamp: entry.created_at,\n const.notification_initiator: entry.owner_id,\n # TODO: is entity owner id necessary for tracks?\n const.notification_metadata: {\n const.notification_entity_type: \"track\",\n const.notification_entity_id: entry.track_id,\n const.notification_entity_owner_id: entry.owner_id,\n },\n }\n created_notifications.append(track_notif)\n\n if entry.remix_of:\n # Add notification to remix track owner\n parent_remix_tracks = [\n t[\"parent_track_id\"] for t in entry.remix_of[\"tracks\"]\n ]\n remix_track_parents = (\n session.query(Track.owner_id, Track.track_id)\n .filter(\n Track.track_id.in_(parent_remix_tracks),\n Track.is_unlisted == False,\n Track.is_delete == False,\n Track.is_current == True,\n )\n .all()\n )\n for remix_track_parent in remix_track_parents:\n [\n remix_track_parent_owner,\n remix_track_parent_id,\n ] = remix_track_parent\n remix_notif = {\n const.notification_type: const.notification_type_remix_create,\n const.notification_blocknumber: entry.blocknumber,\n const.notification_timestamp: entry.created_at,\n const.notification_initiator: entry.owner_id,\n # TODO: is entity owner id necessary for tracks?\n const.notification_metadata: {\n const.notification_entity_type: \"track\",\n const.notification_entity_id: entry.track_id,\n const.notification_entity_owner_id: entry.owner_id,\n const.notification_remix_parent_track_user_id: remix_track_parent_owner,\n const.notification_remix_parent_track_id: remix_track_parent_id,\n },\n }\n remix_created_notifications.append(remix_notif)\n\n logger.info(f\"notifications.py | remixes at {datetime.now() - start_time}\")\n\n # Handle track update notifications\n # TODO: Consider switching blocknumber for updated at?\n updated_tracks_query = session.query(Track)\n updated_tracks_query = updated_tracks_query.filter(\n Track.is_unlisted == False,\n Track.stem_of == None,\n Track.created_at != Track.updated_at,\n Track.blocknumber > min_block_number,\n Track.blocknumber <= max_block_number,\n )\n updated_tracks = updated_tracks_query.all()\n\n prev_tracks = get_prev_track_entries(updated_tracks)\n\n for prev_entry in prev_tracks:\n entry = next(t for t in updated_tracks if t.track_id == prev_entry.track_id)\n logger.info(\n f\"notifications.py | single track update {entry.track_id} {entry.blocknumber} {datetime.now() - start_time}\"\n )\n\n # Tracks that were unlisted and turned to public\n if prev_entry.is_unlisted == True:\n logger.info(\n f\"notifications.py | single track update to public {datetime.now() - start_time}\"\n )\n track_notif = {\n const.notification_type: const.notification_type_create,\n const.notification_blocknumber: entry.blocknumber,\n const.notification_timestamp: entry.created_at,\n const.notification_initiator: entry.owner_id,\n # TODO: is entity owner id necessary for tracks?\n const.notification_metadata: {\n const.notification_entity_type: \"track\",\n const.notification_entity_id: entry.track_id,\n const.notification_entity_owner_id: entry.owner_id,\n },\n }\n created_notifications.append(track_notif)\n\n # Tracks that were not remixes and turned into remixes\n if not prev_entry.remix_of and entry.remix_of:\n # Add notification to remix track owner\n parent_remix_tracks = [\n t[\"parent_track_id\"] for t in entry.remix_of[\"tracks\"]\n ]\n remix_track_parents = (\n session.query(Track.owner_id, Track.track_id)\n .filter(\n Track.track_id.in_(parent_remix_tracks),\n Track.is_unlisted == False,\n Track.is_delete == False,\n Track.is_current == True,\n )\n .all()\n )\n logger.info(\n f\"notifications.py | single track update parents {remix_track_parents} {datetime.now() - start_time}\"\n )\n for remix_track_parent in remix_track_parents:\n [\n remix_track_parent_owner,\n remix_track_parent_id,\n ] = remix_track_parent\n remix_notif = {\n const.notification_type: const.notification_type_remix_create,\n const.notification_blocknumber: entry.blocknumber,\n const.notification_timestamp: entry.created_at,\n const.notification_initiator: entry.owner_id,\n # TODO: is entity owner id necessary for tracks?\n const.notification_metadata: {\n const.notification_entity_type: \"track\",\n const.notification_entity_id: entry.track_id,\n const.notification_entity_owner_id: entry.owner_id,\n const.notification_remix_parent_track_user_id: remix_track_parent_owner,\n const.notification_remix_parent_track_id: remix_track_parent_id,\n },\n }\n remix_created_notifications.append(remix_notif)\n\n notifications_unsorted.extend(remix_created_notifications)\n\n logger.info(\n f\"notifications.py | track updates at {datetime.now() - start_time}\"\n )\n\n # Aggregate playlist\/album notifs\n collection_query = session.query(Playlist)\n # TODO: Is it valid to use is_current here? Might not be the right info...\n collection_query = collection_query.filter(\n Playlist.is_delete == False,\n Playlist.is_private == False,\n Playlist.blocknumber > min_block_number,\n Playlist.blocknumber <= max_block_number,\n )\n collection_query = collection_query.filter(\n Playlist.created_at == Playlist.updated_at\n )\n collection_results = collection_query.all()\n\n for entry in collection_results:\n collection_notif = {\n const.notification_type: const.notification_type_create,\n const.notification_blocknumber: entry.blocknumber,\n const.notification_timestamp: entry.created_at,\n const.notification_initiator: entry.playlist_owner_id,\n }\n metadata = {\n const.notification_entity_id: entry.playlist_id,\n const.notification_entity_owner_id: entry.playlist_owner_id,\n const.notification_collection_content: entry.playlist_contents,\n }\n\n if entry.is_album:\n metadata[const.notification_entity_type] = \"album\"\n else:\n metadata[const.notification_entity_type] = \"playlist\"\n collection_notif[const.notification_metadata] = metadata\n created_notifications.append(collection_notif)\n\n # Playlists that were private and turned to public aka 'published'\n # TODO: Consider switching blocknumber for updated at?\n publish_playlists_query = session.query(Playlist)\n publish_playlists_query = publish_playlists_query.filter(\n Playlist.is_private == False,\n Playlist.created_at != Playlist.updated_at,\n Playlist.blocknumber > min_block_number,\n Playlist.blocknumber <= max_block_number,\n )\n publish_playlist_results = publish_playlists_query.all()\n for entry in publish_playlist_results:\n prev_entry_query = (\n session.query(Playlist)\n .filter(\n Playlist.playlist_id == entry.playlist_id,\n Playlist.blocknumber < entry.blocknumber,\n )\n .order_by(desc(Playlist.blocknumber))\n )\n # Previous private entry indicates transition to public, triggering a notification\n prev_entry = prev_entry_query.first()\n if prev_entry.is_private == True:\n publish_playlist_notif = {\n const.notification_type: const.notification_type_create,\n const.notification_blocknumber: entry.blocknumber,\n const.notification_timestamp: entry.created_at,\n const.notification_initiator: entry.playlist_owner_id,\n }\n metadata = {\n const.notification_entity_id: entry.playlist_id,\n const.notification_entity_owner_id: entry.playlist_owner_id,\n const.notification_collection_content: entry.playlist_contents,\n const.notification_entity_type: \"playlist\",\n }\n publish_playlist_notif[const.notification_metadata] = metadata\n created_notifications.append(publish_playlist_notif)\n\n notifications_unsorted.extend(created_notifications)\n\n logger.info(f\"notifications.py | playlists at {datetime.now() - start_time}\")\n\n # Get additional owner info as requested for listen counts\n tracks_owner_query = session.query(Track).filter(\n Track.is_current == True, Track.track_id.in_(track_ids_to_owner)\n )\n track_owner_results = tracks_owner_query.all()\n for entry in track_owner_results:\n owner = entry.owner_id\n track_id = entry.track_id\n owner_info[const.tracks][track_id] = owner\n\n # Get playlist updates\n today = date.today()\n thirty_days_ago = today - timedelta(days=30)\n thirty_days_ago_time = datetime(\n thirty_days_ago.year, thirty_days_ago.month, thirty_days_ago.day, 0, 0, 0\n )\n playlist_update_query = session.query(Playlist)\n playlist_update_query = playlist_update_query.filter(\n Playlist.is_current == True,\n Playlist.is_delete == False,\n Playlist.last_added_to >= thirty_days_ago_time,\n Playlist.blocknumber > min_block_number,\n Playlist.blocknumber <= max_block_number,\n )\n\n playlist_update_results = playlist_update_query.all()\n\n # Represents all playlist update notifications\n playlist_update_notifications = []\n playlist_update_notifs_by_playlist_id = {}\n for entry in playlist_update_results:\n playlist_update_notifs_by_playlist_id[entry.playlist_id] = {\n const.notification_type: const.notification_type_playlist_update,\n const.notification_blocknumber: entry.blocknumber,\n const.notification_timestamp: entry.created_at,\n const.notification_initiator: entry.playlist_owner_id,\n const.notification_metadata: {\n const.notification_entity_id: entry.playlist_id,\n const.notification_entity_type: \"playlist\",\n const.notification_playlist_update_timestamp: entry.last_added_to,\n },\n }\n\n # get all favorited playlists\n # playlists may have been favorited outside the blocknumber bounds\n # e.g. before the min_block_number\n playlist_favorites_query = session.query(Save)\n playlist_favorites_query = playlist_favorites_query.filter(\n Save.is_current == True,\n Save.is_delete == False,\n Save.save_type == SaveType.playlist,\n Save.save_item_id.in_(playlist_update_notifs_by_playlist_id.keys()),\n )\n playlist_favorites_results = playlist_favorites_query.all()\n\n # dictionary of playlist id => users that favorited said playlist\n # e.g. { playlist1: [user1, user2, ...], ... }\n # we need this dictionary to know which users need to be notified of a playlist update\n users_that_favorited_playlists_dict = ft.reduce(\n lambda accumulator, current: accumulator.update(\n {\n current.save_item_id: accumulator[current.save_item_id]\n + [current.user_id]\n if current.save_item_id in accumulator\n else [current.user_id]\n }\n )\n or accumulator,\n playlist_favorites_results,\n {},\n )\n\n for playlist_id in users_that_favorited_playlists_dict:\n # TODO: We probably do not need this check because we are filtering\n # playlist_favorites_query to only matching ids\n if playlist_id not in playlist_update_notifs_by_playlist_id:\n continue\n playlist_update_notif = playlist_update_notifs_by_playlist_id[playlist_id]\n playlist_update_notif[const.notification_metadata].update(\n {\n const.notification_playlist_update_users: users_that_favorited_playlists_dict[\n playlist_id\n ]\n }\n )\n playlist_update_notifications.append(playlist_update_notif)\n\n notifications_unsorted.extend(playlist_update_notifications)\n\n logger.info(\n f\"notifications.py | playlist updates at {datetime.now() - start_time}\"\n )\n\n # Final sort - TODO: can we sort by timestamp?\n sorted_notifications = sorted(\n notifications_unsorted,\n key=lambda i: i[const.notification_blocknumber],\n reverse=False,\n )\n\n return api_helpers.success_response(\n {\n \"notifications\": sorted_notifications,\n \"info\": notification_metadata,\n \"milestones\": milestone_info,\n \"owners\": owner_info,\n }\n )","function_tokens":["def","notifications","(",")",":","db","=","get_db_read_replica","(",")","min_block_number","=","request",".","args",".","get","(","\"min_block_number\"",",","type","=","int",")","max_block_number","=","request",".","args",".","get","(","\"max_block_number\"",",","type","=","int",")","track_ids_to_owner","=","[","]","try",":","track_ids_str_list","=","request",".","args",".","getlist","(","\"track_id\"",")","track_ids_to_owner","=","[","int","(","y",")","for","y","in","track_ids_str_list","]","except","Exception","as","e",":","logger",".","error","(","f\"Failed to retrieve track list {e}\"",")","# Max block number is not explicitly required (yet)","if","not","min_block_number","and","min_block_number","!=","0",":","return","api_helpers",".","error_response","(","{","\"msg\"",":","\"Missing min block number\"","}",",","400",")","if","not","max_block_number",":","max_block_number","=","min_block_number","+","max_block_diff","elif","(","max_block_number","-","min_block_number",")",">","max_block_diff",":","max_block_number","=","min_block_number","+","max_block_diff","with","db",".","scoped_session","(",")","as","session",":","current_block_query","=","session",".","query","(","Block",")",".","filter_by","(","is_current","=","True",")","current_block_query_results","=","current_block_query",".","all","(",")","current_block","=","current_block_query_results","[","0","]","current_max_block_num","=","current_block",".","number","if","current_max_block_num","<","max_block_number",":","max_block_number","=","current_max_block_num","notification_metadata","=","{","\"min_block_number\"",":","min_block_number",",","\"max_block_number\"",":","max_block_number",",","}","# Retrieve milestones statistics","milestone_info","=","{","}","# Cache owner info for network entities and pass in w\/results","owner_info","=","{","const",".","tracks",":","{","}",",","const",".","albums",":","{","}",",","const",".","playlists",":","{","}","}","start_time","=","datetime",".","now","(",")","logger",".","info","(","f\"notifications.py | start_time ${start_time}\"",")","# List of notifications generated from current protocol state","notifications_unsorted","=","[","]","with","db",".","scoped_session","(",")","as","session",":","#","# Query relevant follow information","#","follow_query","=","session",".","query","(","Follow",")","# Impose min block number restriction","follow_query","=","follow_query",".","filter","(","Follow",".","is_current","==","True",",","Follow",".","is_delete","==","False",",","Follow",".","blocknumber",">","min_block_number",",","Follow",".","blocknumber","<=","max_block_number",",",")","follow_results","=","follow_query",".","all","(",")","# Used to retrieve follower counts for this window","followed_users","=","[","]","# Represents all follow notifications","follow_notifications","=","[","]","for","entry","in","follow_results",":","follow_notif","=","{","const",".","notification_type",":","const",".","notification_type_follow",",","const",".","notification_blocknumber",":","entry",".","blocknumber",",","const",".","notification_timestamp",":","entry",".","created_at",",","const",".","notification_initiator",":","entry",".","follower_user_id",",","const",".","notification_metadata",":","{","const",".","notification_follower_id",":","entry",".","follower_user_id",",","const",".","notification_followee_id",":","entry",".","followee_user_id",",","}",",","}","follow_notifications",".","append","(","follow_notif",")","# Add every user who gained a new follower","followed_users",".","append","(","entry",".","followee_user_id",")","# Query count for any user w\/new followers","follower_counts","=","get_follower_count_dict","(","session",",","followed_users",",","max_block_number",")","milestone_info","[","\"follower_counts\"","]","=","follower_counts","notifications_unsorted",".","extend","(","follow_notifications",")","logger",".","info","(","f\"notifications.py | followers at {datetime.now() - start_time}\"",")","#","# Query relevant favorite information","#","favorites_query","=","session",".","query","(","Save",")","favorites_query","=","favorites_query",".","filter","(","Save",".","is_current","==","True",",","Save",".","is_delete","==","False",",","Save",".","blocknumber",">","min_block_number",",","Save",".","blocknumber","<=","max_block_number",",",")","favorite_results","=","favorites_query",".","all","(",")","# ID lists to query count aggregates","favorited_track_ids","=","[","]","favorited_album_ids","=","[","]","favorited_playlist_ids","=","[","]","# List of favorite notifications","favorite_notifications","=","[","]","favorite_remix_tracks","=","[","]","for","entry","in","favorite_results",":","favorite_notif","=","{","const",".","notification_type",":","const",".","notification_type_favorite",",","const",".","notification_blocknumber",":","entry",".","blocknumber",",","const",".","notification_timestamp",":","entry",".","created_at",",","const",".","notification_initiator",":","entry",".","user_id",",","}","save_type","=","entry",".","save_type","save_item_id","=","entry",".","save_item_id","metadata","=","{","const",".","notification_entity_type",":","save_type",",","const",".","notification_entity_id",":","save_item_id",",","}","# NOTE if deleted, the favorite can still exist","# TODO: Can we aggregate all owner queries and perform at once...?","if","save_type","==","SaveType",".","track",":","owner_id","=","get_owner_id","(","session",",","\"track\"",",","save_item_id",")","if","not","owner_id",":","continue","metadata","[","const",".","notification_entity_owner_id","]","=","owner_id","favorited_track_ids",".","append","(","save_item_id",")","owner_info","[","const",".","tracks","]","[","save_item_id","]","=","owner_id","favorite_remix_tracks",".","append","(","{","const",".","notification_blocknumber",":","entry",".","blocknumber",",","const",".","notification_timestamp",":","entry",".","created_at",",","\"user_id\"",":","entry",".","user_id",",","\"item_owner_id\"",":","owner_id",",","\"item_id\"",":","save_item_id",",","}",")","elif","save_type","==","SaveType",".","album",":","owner_id","=","get_owner_id","(","session",",","\"album\"",",","save_item_id",")","if","not","owner_id",":","continue","metadata","[","const",".","notification_entity_owner_id","]","=","owner_id","favorited_album_ids",".","append","(","save_item_id",")","owner_info","[","const",".","albums","]","[","save_item_id","]","=","owner_id","elif","save_type","==","SaveType",".","playlist",":","owner_id","=","get_owner_id","(","session",",","\"playlist\"",",","save_item_id",")","if","not","owner_id",":","continue","metadata","[","const",".","notification_entity_owner_id","]","=","owner_id","favorited_playlist_ids",".","append","(","save_item_id",")","owner_info","[","const",".","playlists","]","[","save_item_id","]","=","owner_id","favorite_notif","[","const",".","notification_metadata","]","=","metadata","favorite_notifications",".","append","(","favorite_notif",")","notifications_unsorted",".","extend","(","favorite_notifications",")","track_favorite_dict","=","{","}","album_favorite_dict","=","{","}","playlist_favorite_dict","=","{","}","if","favorited_track_ids",":","track_favorite_counts","=","get_save_counts","(","session",",","False",",","False",",","favorited_track_ids",",","[","SaveType",".","track","]",",","max_block_number",",",")","track_favorite_dict","=","dict","(","track_favorite_counts",")","favorite_remix_notifications","=","get_cosign_remix_notifications","(","session",",","max_block_number",",","favorite_remix_tracks",")","notifications_unsorted",".","extend","(","favorite_remix_notifications",")","if","favorited_album_ids",":","album_favorite_counts","=","get_save_counts","(","session",",","False",",","False",",","favorited_album_ids",",","[","SaveType",".","album","]",",","max_block_number",",",")","album_favorite_dict","=","dict","(","album_favorite_counts",")","if","favorited_playlist_ids",":","playlist_favorite_counts","=","get_save_counts","(","session",",","False",",","False",",","favorited_playlist_ids",",","[","SaveType",".","playlist","]",",","max_block_number",",",")","playlist_favorite_dict","=","dict","(","playlist_favorite_counts",")","milestone_info","[","const",".","notification_favorite_counts","]","=","{","}","milestone_info","[","const",".","notification_favorite_counts","]","[","const",".","tracks","]","=","track_favorite_dict","milestone_info","[","const",".","notification_favorite_counts","]","[","const",".","albums","]","=","album_favorite_dict","milestone_info","[","const",".","notification_favorite_counts","]","[","const",".","playlists","]","=","playlist_favorite_dict","logger",".","info","(","f\"notifications.py | favorites at {datetime.now() - start_time}\"",")","#","# Query relevant tier change information","#","balance_change_query","=","session",".","query","(","UserBalanceChange",")","# Impose min block number restriction","balance_change_query","=","balance_change_query",".","filter","(","UserBalanceChange",".","blocknumber",">","min_block_number",",","UserBalanceChange",".","blocknumber","<=","max_block_number",",",")","balance_change_results","=","balance_change_query",".","all","(",")","tier_change_notifications","=","[","]","for","entry","in","balance_change_results",":","prev","=","int","(","entry",".","previous_balance",")","current","=","int","(","entry",".","current_balance",")","# Check for a tier change and add to tier_change_notification","tier","=","None","if","prev","<","100000","<=","current",":","tier","=","\"platinum\"","elif","prev","<","10000","<=","current",":","tier","=","\"gold\"","elif","prev","<","100","<=","current",":","tier","=","\"silver\"","elif","prev","<","10","<=","current",":","tier","=","\"bronze\"","if","tier","is","not","None",":","tier_change_notif","=","{","const",".","notification_type",":","const",".","notification_type_tier_change",",","const",".","notification_blocknumber",":","entry",".","blocknumber",",","const",".","notification_timestamp",":","datetime",".","now","(",")",",","const",".","notification_initiator",":","entry",".","user_id",",","const",".","notification_metadata",":","{","const",".","notification_tier",":","tier",",","}",",","}","tier_change_notifications",".","append","(","tier_change_notif",")","notifications_unsorted",".","extend","(","tier_change_notifications",")","logger",".","info","(","f\"notifications.py | balance change at {datetime.now() - start_time}\"",")","#","# Query relevant repost information","#","repost_query","=","session",".","query","(","Repost",")","repost_query","=","repost_query",".","filter","(","Repost",".","is_current","==","True",",","Repost",".","is_delete","==","False",",","Repost",".","blocknumber",">","min_block_number",",","Repost",".","blocknumber","<=","max_block_number",",",")","repost_results","=","repost_query",".","all","(",")","# ID lists to query counts","reposted_track_ids","=","[","]","reposted_album_ids","=","[","]","reposted_playlist_ids","=","[","]","# List of repost notifications","repost_notifications","=","[","]","# List of repost notifications","repost_remix_notifications","=","[","]","repost_remix_tracks","=","[","]","for","entry","in","repost_results",":","repost_notif","=","{","const",".","notification_type",":","const",".","notification_type_repost",",","const",".","notification_blocknumber",":","entry",".","blocknumber",",","const",".","notification_timestamp",":","entry",".","created_at",",","const",".","notification_initiator",":","entry",".","user_id",",","}","repost_type","=","entry",".","repost_type","repost_item_id","=","entry",".","repost_item_id","metadata","=","{","const",".","notification_entity_type",":","repost_type",",","const",".","notification_entity_id",":","repost_item_id",",","}","if","repost_type","==","RepostType",".","track",":","owner_id","=","get_owner_id","(","session",",","\"track\"",",","repost_item_id",")","if","not","owner_id",":","continue","metadata","[","const",".","notification_entity_owner_id","]","=","owner_id","reposted_track_ids",".","append","(","repost_item_id",")","owner_info","[","const",".","tracks","]","[","repost_item_id","]","=","owner_id","repost_remix_tracks",".","append","(","{","const",".","notification_blocknumber",":","entry",".","blocknumber",",","const",".","notification_timestamp",":","entry",".","created_at",",","\"user_id\"",":","entry",".","user_id",",","\"item_owner_id\"",":","owner_id",",","\"item_id\"",":","repost_item_id",",","}",")","elif","repost_type","==","RepostType",".","album",":","owner_id","=","get_owner_id","(","session",",","\"album\"",",","repost_item_id",")","if","not","owner_id",":","continue","metadata","[","const",".","notification_entity_owner_id","]","=","owner_id","reposted_album_ids",".","append","(","repost_item_id",")","owner_info","[","const",".","albums","]","[","repost_item_id","]","=","owner_id","elif","repost_type","==","RepostType",".","playlist",":","owner_id","=","get_owner_id","(","session",",","\"playlist\"",",","repost_item_id",")","if","not","owner_id",":","continue","metadata","[","const",".","notification_entity_owner_id","]","=","owner_id","reposted_playlist_ids",".","append","(","repost_item_id",")","owner_info","[","const",".","playlists","]","[","repost_item_id","]","=","owner_id","repost_notif","[","const",".","notification_metadata","]","=","metadata","repost_notifications",".","append","(","repost_notif",")","# Append repost notifications","notifications_unsorted",".","extend","(","repost_notifications",")","track_repost_count_dict","=","{","}","album_repost_count_dict","=","{","}","playlist_repost_count_dict","=","{","}","# Aggregate repost counts for relevant fields","# Used to notify users of entity-specific milestones","if","reposted_track_ids",":","track_repost_counts","=","get_repost_counts","(","session",",","False",",","False",",","reposted_track_ids",",","[","RepostType",".","track","]",",","max_block_number",",",")","track_repost_count_dict","=","dict","(","track_repost_counts",")","repost_remix_notifications","=","get_cosign_remix_notifications","(","session",",","max_block_number",",","repost_remix_tracks",")","notifications_unsorted",".","extend","(","repost_remix_notifications",")","if","reposted_album_ids",":","album_repost_counts","=","get_repost_counts","(","session",",","False",",","False",",","reposted_album_ids",",","[","RepostType",".","album","]",",","max_block_number",",",")","album_repost_count_dict","=","dict","(","album_repost_counts",")","if","reposted_playlist_ids",":","playlist_repost_counts","=","get_repost_counts","(","session",",","False",",","False",",","reposted_playlist_ids",",","[","RepostType",".","playlist","]",",","max_block_number",",",")","playlist_repost_count_dict","=","dict","(","playlist_repost_counts",")","milestone_info","[","const",".","notification_repost_counts","]","=","{","}","milestone_info","[","const",".","notification_repost_counts","]","[","const",".","tracks","]","=","track_repost_count_dict","milestone_info","[","const",".","notification_repost_counts","]","[","const",".","albums","]","=","album_repost_count_dict","milestone_info","[","const",".","notification_repost_counts","]","[","const",".","playlists","]","=","playlist_repost_count_dict","# Query relevant created entity notification - tracks\/albums\/playlists","created_notifications","=","[","]","logger",".","info","(","f\"notifications.py | reposts at {datetime.now() - start_time}\"",")","#","# Query relevant created tracks for remix information","#","remix_created_notifications","=","[","]","# Aggregate track notifs","tracks_query","=","session",".","query","(","Track",")","# TODO: Is it valid to use Track.is_current here? Might not be the right info...","tracks_query","=","tracks_query",".","filter","(","Track",".","is_unlisted","==","False",",","Track",".","is_delete","==","False",",","Track",".","stem_of","==","None",",","Track",".","blocknumber",">","min_block_number",",","Track",".","blocknumber","<=","max_block_number",",",")","tracks_query","=","tracks_query",".","filter","(","Track",".","created_at","==","Track",".","updated_at",")","track_results","=","tracks_query",".","all","(",")","for","entry","in","track_results",":","track_notif","=","{","const",".","notification_type",":","const",".","notification_type_create",",","const",".","notification_blocknumber",":","entry",".","blocknumber",",","const",".","notification_timestamp",":","entry",".","created_at",",","const",".","notification_initiator",":","entry",".","owner_id",",","# TODO: is entity owner id necessary for tracks?","const",".","notification_metadata",":","{","const",".","notification_entity_type",":","\"track\"",",","const",".","notification_entity_id",":","entry",".","track_id",",","const",".","notification_entity_owner_id",":","entry",".","owner_id",",","}",",","}","created_notifications",".","append","(","track_notif",")","if","entry",".","remix_of",":","# Add notification to remix track owner","parent_remix_tracks","=","[","t","[","\"parent_track_id\"","]","for","t","in","entry",".","remix_of","[","\"tracks\"","]","]","remix_track_parents","=","(","session",".","query","(","Track",".","owner_id",",","Track",".","track_id",")",".","filter","(","Track",".","track_id",".","in_","(","parent_remix_tracks",")",",","Track",".","is_unlisted","==","False",",","Track",".","is_delete","==","False",",","Track",".","is_current","==","True",",",")",".","all","(",")",")","for","remix_track_parent","in","remix_track_parents",":","[","remix_track_parent_owner",",","remix_track_parent_id",",","]","=","remix_track_parent","remix_notif","=","{","const",".","notification_type",":","const",".","notification_type_remix_create",",","const",".","notification_blocknumber",":","entry",".","blocknumber",",","const",".","notification_timestamp",":","entry",".","created_at",",","const",".","notification_initiator",":","entry",".","owner_id",",","# TODO: is entity owner id necessary for tracks?","const",".","notification_metadata",":","{","const",".","notification_entity_type",":","\"track\"",",","const",".","notification_entity_id",":","entry",".","track_id",",","const",".","notification_entity_owner_id",":","entry",".","owner_id",",","const",".","notification_remix_parent_track_user_id",":","remix_track_parent_owner",",","const",".","notification_remix_parent_track_id",":","remix_track_parent_id",",","}",",","}","remix_created_notifications",".","append","(","remix_notif",")","logger",".","info","(","f\"notifications.py | remixes at {datetime.now() - start_time}\"",")","# Handle track update notifications","# TODO: Consider switching blocknumber for updated at?","updated_tracks_query","=","session",".","query","(","Track",")","updated_tracks_query","=","updated_tracks_query",".","filter","(","Track",".","is_unlisted","==","False",",","Track",".","stem_of","==","None",",","Track",".","created_at","!=","Track",".","updated_at",",","Track",".","blocknumber",">","min_block_number",",","Track",".","blocknumber","<=","max_block_number",",",")","updated_tracks","=","updated_tracks_query",".","all","(",")","prev_tracks","=","get_prev_track_entries","(","updated_tracks",")","for","prev_entry","in","prev_tracks",":","entry","=","next","(","t","for","t","in","updated_tracks","if","t",".","track_id","==","prev_entry",".","track_id",")","logger",".","info","(","f\"notifications.py | single track update {entry.track_id} {entry.blocknumber} {datetime.now() - start_time}\"",")","# Tracks that were unlisted and turned to public","if","prev_entry",".","is_unlisted","==","True",":","logger",".","info","(","f\"notifications.py | single track update to public {datetime.now() - start_time}\"",")","track_notif","=","{","const",".","notification_type",":","const",".","notification_type_create",",","const",".","notification_blocknumber",":","entry",".","blocknumber",",","const",".","notification_timestamp",":","entry",".","created_at",",","const",".","notification_initiator",":","entry",".","owner_id",",","# TODO: is entity owner id necessary for tracks?","const",".","notification_metadata",":","{","const",".","notification_entity_type",":","\"track\"",",","const",".","notification_entity_id",":","entry",".","track_id",",","const",".","notification_entity_owner_id",":","entry",".","owner_id",",","}",",","}","created_notifications",".","append","(","track_notif",")","# Tracks that were not remixes and turned into remixes","if","not","prev_entry",".","remix_of","and","entry",".","remix_of",":","# Add notification to remix track owner","parent_remix_tracks","=","[","t","[","\"parent_track_id\"","]","for","t","in","entry",".","remix_of","[","\"tracks\"","]","]","remix_track_parents","=","(","session",".","query","(","Track",".","owner_id",",","Track",".","track_id",")",".","filter","(","Track",".","track_id",".","in_","(","parent_remix_tracks",")",",","Track",".","is_unlisted","==","False",",","Track",".","is_delete","==","False",",","Track",".","is_current","==","True",",",")",".","all","(",")",")","logger",".","info","(","f\"notifications.py | single track update parents {remix_track_parents} {datetime.now() - start_time}\"",")","for","remix_track_parent","in","remix_track_parents",":","[","remix_track_parent_owner",",","remix_track_parent_id",",","]","=","remix_track_parent","remix_notif","=","{","const",".","notification_type",":","const",".","notification_type_remix_create",",","const",".","notification_blocknumber",":","entry",".","blocknumber",",","const",".","notification_timestamp",":","entry",".","created_at",",","const",".","notification_initiator",":","entry",".","owner_id",",","# TODO: is entity owner id necessary for tracks?","const",".","notification_metadata",":","{","const",".","notification_entity_type",":","\"track\"",",","const",".","notification_entity_id",":","entry",".","track_id",",","const",".","notification_entity_owner_id",":","entry",".","owner_id",",","const",".","notification_remix_parent_track_user_id",":","remix_track_parent_owner",",","const",".","notification_remix_parent_track_id",":","remix_track_parent_id",",","}",",","}","remix_created_notifications",".","append","(","remix_notif",")","notifications_unsorted",".","extend","(","remix_created_notifications",")","logger",".","info","(","f\"notifications.py | track updates at {datetime.now() - start_time}\"",")","# Aggregate playlist\/album notifs","collection_query","=","session",".","query","(","Playlist",")","# TODO: Is it valid to use is_current here? Might not be the right info...","collection_query","=","collection_query",".","filter","(","Playlist",".","is_delete","==","False",",","Playlist",".","is_private","==","False",",","Playlist",".","blocknumber",">","min_block_number",",","Playlist",".","blocknumber","<=","max_block_number",",",")","collection_query","=","collection_query",".","filter","(","Playlist",".","created_at","==","Playlist",".","updated_at",")","collection_results","=","collection_query",".","all","(",")","for","entry","in","collection_results",":","collection_notif","=","{","const",".","notification_type",":","const",".","notification_type_create",",","const",".","notification_blocknumber",":","entry",".","blocknumber",",","const",".","notification_timestamp",":","entry",".","created_at",",","const",".","notification_initiator",":","entry",".","playlist_owner_id",",","}","metadata","=","{","const",".","notification_entity_id",":","entry",".","playlist_id",",","const",".","notification_entity_owner_id",":","entry",".","playlist_owner_id",",","const",".","notification_collection_content",":","entry",".","playlist_contents",",","}","if","entry",".","is_album",":","metadata","[","const",".","notification_entity_type","]","=","\"album\"","else",":","metadata","[","const",".","notification_entity_type","]","=","\"playlist\"","collection_notif","[","const",".","notification_metadata","]","=","metadata","created_notifications",".","append","(","collection_notif",")","# Playlists that were private and turned to public aka 'published'","# TODO: Consider switching blocknumber for updated at?","publish_playlists_query","=","session",".","query","(","Playlist",")","publish_playlists_query","=","publish_playlists_query",".","filter","(","Playlist",".","is_private","==","False",",","Playlist",".","created_at","!=","Playlist",".","updated_at",",","Playlist",".","blocknumber",">","min_block_number",",","Playlist",".","blocknumber","<=","max_block_number",",",")","publish_playlist_results","=","publish_playlists_query",".","all","(",")","for","entry","in","publish_playlist_results",":","prev_entry_query","=","(","session",".","query","(","Playlist",")",".","filter","(","Playlist",".","playlist_id","==","entry",".","playlist_id",",","Playlist",".","blocknumber","<","entry",".","blocknumber",",",")",".","order_by","(","desc","(","Playlist",".","blocknumber",")",")",")","# Previous private entry indicates transition to public, triggering a notification","prev_entry","=","prev_entry_query",".","first","(",")","if","prev_entry",".","is_private","==","True",":","publish_playlist_notif","=","{","const",".","notification_type",":","const",".","notification_type_create",",","const",".","notification_blocknumber",":","entry",".","blocknumber",",","const",".","notification_timestamp",":","entry",".","created_at",",","const",".","notification_initiator",":","entry",".","playlist_owner_id",",","}","metadata","=","{","const",".","notification_entity_id",":","entry",".","playlist_id",",","const",".","notification_entity_owner_id",":","entry",".","playlist_owner_id",",","const",".","notification_collection_content",":","entry",".","playlist_contents",",","const",".","notification_entity_type",":","\"playlist\"",",","}","publish_playlist_notif","[","const",".","notification_metadata","]","=","metadata","created_notifications",".","append","(","publish_playlist_notif",")","notifications_unsorted",".","extend","(","created_notifications",")","logger",".","info","(","f\"notifications.py | playlists at {datetime.now() - start_time}\"",")","# Get additional owner info as requested for listen counts","tracks_owner_query","=","session",".","query","(","Track",")",".","filter","(","Track",".","is_current","==","True",",","Track",".","track_id",".","in_","(","track_ids_to_owner",")",")","track_owner_results","=","tracks_owner_query",".","all","(",")","for","entry","in","track_owner_results",":","owner","=","entry",".","owner_id","track_id","=","entry",".","track_id","owner_info","[","const",".","tracks","]","[","track_id","]","=","owner","# Get playlist updates","today","=","date",".","today","(",")","thirty_days_ago","=","today","-","timedelta","(","days","=","30",")","thirty_days_ago_time","=","datetime","(","thirty_days_ago",".","year",",","thirty_days_ago",".","month",",","thirty_days_ago",".","day",",","0",",","0",",","0",")","playlist_update_query","=","session",".","query","(","Playlist",")","playlist_update_query","=","playlist_update_query",".","filter","(","Playlist",".","is_current","==","True",",","Playlist",".","is_delete","==","False",",","Playlist",".","last_added_to",">=","thirty_days_ago_time",",","Playlist",".","blocknumber",">","min_block_number",",","Playlist",".","blocknumber","<=","max_block_number",",",")","playlist_update_results","=","playlist_update_query",".","all","(",")","# Represents all playlist update notifications","playlist_update_notifications","=","[","]","playlist_update_notifs_by_playlist_id","=","{","}","for","entry","in","playlist_update_results",":","playlist_update_notifs_by_playlist_id","[","entry",".","playlist_id","]","=","{","const",".","notification_type",":","const",".","notification_type_playlist_update",",","const",".","notification_blocknumber",":","entry",".","blocknumber",",","const",".","notification_timestamp",":","entry",".","created_at",",","const",".","notification_initiator",":","entry",".","playlist_owner_id",",","const",".","notification_metadata",":","{","const",".","notification_entity_id",":","entry",".","playlist_id",",","const",".","notification_entity_type",":","\"playlist\"",",","const",".","notification_playlist_update_timestamp",":","entry",".","last_added_to",",","}",",","}","# get all favorited playlists","# playlists may have been favorited outside the blocknumber bounds","# e.g. before the min_block_number","playlist_favorites_query","=","session",".","query","(","Save",")","playlist_favorites_query","=","playlist_favorites_query",".","filter","(","Save",".","is_current","==","True",",","Save",".","is_delete","==","False",",","Save",".","save_type","==","SaveType",".","playlist",",","Save",".","save_item_id",".","in_","(","playlist_update_notifs_by_playlist_id",".","keys","(",")",")",",",")","playlist_favorites_results","=","playlist_favorites_query",".","all","(",")","# dictionary of playlist id => users that favorited said playlist","# e.g. { playlist1: [user1, user2, ...], ... }","# we need this dictionary to know which users need to be notified of a playlist update","users_that_favorited_playlists_dict","=","ft",".","reduce","(","lambda","accumulator",",","current",":","accumulator",".","update","(","{","current",".","save_item_id",":","accumulator","[","current",".","save_item_id","]","+","[","current",".","user_id","]","if","current",".","save_item_id","in","accumulator","else","[","current",".","user_id","]","}",")","or","accumulator",",","playlist_favorites_results",",","{","}",",",")","for","playlist_id","in","users_that_favorited_playlists_dict",":","# TODO: We probably do not need this check because we are filtering","# playlist_favorites_query to only matching ids","if","playlist_id","not","in","playlist_update_notifs_by_playlist_id",":","continue","playlist_update_notif","=","playlist_update_notifs_by_playlist_id","[","playlist_id","]","playlist_update_notif","[","const",".","notification_metadata","]",".","update","(","{","const",".","notification_playlist_update_users",":","users_that_favorited_playlists_dict","[","playlist_id","]","}",")","playlist_update_notifications",".","append","(","playlist_update_notif",")","notifications_unsorted",".","extend","(","playlist_update_notifications",")","logger",".","info","(","f\"notifications.py | playlist updates at {datetime.now() - start_time}\"",")","# Final sort - TODO: can we sort by timestamp?","sorted_notifications","=","sorted","(","notifications_unsorted",",","key","=","lambda","i",":","i","[","const",".","notification_blocknumber","]",",","reverse","=","False",",",")","return","api_helpers",".","success_response","(","{","\"notifications\"",":","sorted_notifications",",","\"info\"",":","notification_metadata",",","\"milestones\"",":","milestone_info",",","\"owners\"",":","owner_info",",","}",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/notifications.py#L203-L976"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/notifications.py","language":"python","identifier":"solana_notifications","parameters":"()","argument_list":"","return_statement":"return api_helpers.success_response(\n {\n \"notifications\": sorted_notifications,\n \"info\": notification_metadata,\n }\n )","docstring":"Fetches the notifications events that occurred between the given slot numbers\n\n URL Params:\n min_slot_number: (int) The start slot number for querying for notifications\n max_slot_number?: (int) The end slot number for querying for notifications\n\n Response - Json object w\/ the following fields\n notifications: Array of notifications of shape:\n type: 'ChallengeReward' | 'MilestoneListen'\n slot: (int) slot number of notification\n initiator: (int) the user id that caused this notification\n metadata?: (any) additional information about the notification\n challenge_id?: (int) completed challenge id for challenge reward notifications\n\n info: Dictionary of metadata w\/ min_slot_number & max_slot_number fields","docstring_summary":"Fetches the notifications events that occurred between the given slot numbers","docstring_tokens":["Fetches","the","notifications","events","that","occurred","between","the","given","slot","numbers"],"function":"def solana_notifications():\n \"\"\"\n Fetches the notifications events that occurred between the given slot numbers\n\n URL Params:\n min_slot_number: (int) The start slot number for querying for notifications\n max_slot_number?: (int) The end slot number for querying for notifications\n\n Response - Json object w\/ the following fields\n notifications: Array of notifications of shape:\n type: 'ChallengeReward' | 'MilestoneListen'\n slot: (int) slot number of notification\n initiator: (int) the user id that caused this notification\n metadata?: (any) additional information about the notification\n challenge_id?: (int) completed challenge id for challenge reward notifications\n\n info: Dictionary of metadata w\/ min_slot_number & max_slot_number fields\n \"\"\"\n db = get_db_read_replica()\n redis = get_redis()\n min_slot_number = request.args.get(\"min_slot_number\", type=int)\n max_slot_number = request.args.get(\"max_slot_number\", type=int)\n\n # Max slot number is not explicitly required (yet)\n if not min_slot_number and min_slot_number != 0:\n return api_helpers.error_response({\"msg\": \"Missing min slot number\"}, 400)\n\n if not max_slot_number or (max_slot_number - min_slot_number) > max_slot_diff:\n max_slot_number = min_slot_number + max_slot_diff\n\n max_valid_slot = get_max_slot(redis)\n max_slot_number = min(max_slot_number, max_valid_slot)\n\n notifications_unsorted = []\n notification_metadata = {\n \"min_slot_number\": min_slot_number,\n \"max_slot_number\": max_slot_number,\n }\n\n with db.scoped_session() as session:\n #\n # Query relevant challenge disbursement information for challenge reward notifications\n #\n challenge_disbursement_results = (\n session.query(ChallengeDisbursement)\n .filter(\n ChallengeDisbursement.slot >= min_slot_number,\n ChallengeDisbursement.slot <= max_slot_number,\n )\n .all()\n )\n\n challenge_reward_notifications = []\n for result in challenge_disbursement_results:\n challenge_reward_notifications.append(\n {\n const.solana_notification_type: const.solana_notification_type_challenge_reward,\n const.solana_notification_slot: result.slot,\n const.solana_notification_initiator: result.user_id,\n const.solana_notification_metadata: {\n const.solana_notification_challenge_id: result.challenge_id,\n },\n }\n )\n\n track_listen_milestone: List[Tuple(Milestone, int)] = (\n session.query(Milestone, Track.owner_id)\n .filter(\n Milestone.name == LISTEN_COUNT_MILESTONE,\n Milestone.slot >= min_slot_number,\n Milestone.slot <= max_slot_number,\n )\n .join(Track, Track.track_id == Milestone.id and Track.is_current == True)\n .all()\n )\n\n track_listen_milestones = []\n for result in track_listen_milestone:\n track_milestone, track_owner_id = result\n track_listen_milestones.append(\n {\n const.solana_notification_type: const.solana_notification_type_listen_milestone,\n const.solana_notification_slot: track_milestone.slot,\n const.solana_notification_initiator: track_owner_id, # owner_id\n const.solana_notification_metadata: {\n const.solana_notification_threshold: track_milestone.threshold,\n const.notification_entity_id: track_milestone.id, # track_id\n const.notification_entity_type: \"track\",\n },\n }\n )\n\n notifications_unsorted.extend(challenge_reward_notifications)\n notifications_unsorted.extend(track_listen_milestones)\n\n # Final sort\n sorted_notifications = sorted(\n notifications_unsorted,\n key=lambda i: i[const.solana_notification_slot],\n reverse=False,\n )\n\n return api_helpers.success_response(\n {\n \"notifications\": sorted_notifications,\n \"info\": notification_metadata,\n }\n )","function_tokens":["def","solana_notifications","(",")",":","db","=","get_db_read_replica","(",")","redis","=","get_redis","(",")","min_slot_number","=","request",".","args",".","get","(","\"min_slot_number\"",",","type","=","int",")","max_slot_number","=","request",".","args",".","get","(","\"max_slot_number\"",",","type","=","int",")","# Max slot number is not explicitly required (yet)","if","not","min_slot_number","and","min_slot_number","!=","0",":","return","api_helpers",".","error_response","(","{","\"msg\"",":","\"Missing min slot number\"","}",",","400",")","if","not","max_slot_number","or","(","max_slot_number","-","min_slot_number",")",">","max_slot_diff",":","max_slot_number","=","min_slot_number","+","max_slot_diff","max_valid_slot","=","get_max_slot","(","redis",")","max_slot_number","=","min","(","max_slot_number",",","max_valid_slot",")","notifications_unsorted","=","[","]","notification_metadata","=","{","\"min_slot_number\"",":","min_slot_number",",","\"max_slot_number\"",":","max_slot_number",",","}","with","db",".","scoped_session","(",")","as","session",":","#","# Query relevant challenge disbursement information for challenge reward notifications","#","challenge_disbursement_results","=","(","session",".","query","(","ChallengeDisbursement",")",".","filter","(","ChallengeDisbursement",".","slot",">=","min_slot_number",",","ChallengeDisbursement",".","slot","<=","max_slot_number",",",")",".","all","(",")",")","challenge_reward_notifications","=","[","]","for","result","in","challenge_disbursement_results",":","challenge_reward_notifications",".","append","(","{","const",".","solana_notification_type",":","const",".","solana_notification_type_challenge_reward",",","const",".","solana_notification_slot",":","result",".","slot",",","const",".","solana_notification_initiator",":","result",".","user_id",",","const",".","solana_notification_metadata",":","{","const",".","solana_notification_challenge_id",":","result",".","challenge_id",",","}",",","}",")","track_listen_milestone",":","List","[","Tuple","(","Milestone",",","int",")","]","=","(","session",".","query","(","Milestone",",","Track",".","owner_id",")",".","filter","(","Milestone",".","name","==","LISTEN_COUNT_MILESTONE",",","Milestone",".","slot",">=","min_slot_number",",","Milestone",".","slot","<=","max_slot_number",",",")",".","join","(","Track",",","Track",".","track_id","==","Milestone",".","id","and","Track",".","is_current","==","True",")",".","all","(",")",")","track_listen_milestones","=","[","]","for","result","in","track_listen_milestone",":","track_milestone",",","track_owner_id","=","result","track_listen_milestones",".","append","(","{","const",".","solana_notification_type",":","const",".","solana_notification_type_listen_milestone",",","const",".","solana_notification_slot",":","track_milestone",".","slot",",","const",".","solana_notification_initiator",":","track_owner_id",",","# owner_id","const",".","solana_notification_metadata",":","{","const",".","solana_notification_threshold",":","track_milestone",".","threshold",",","const",".","notification_entity_id",":","track_milestone",".","id",",","# track_id","const",".","notification_entity_type",":","\"track\"",",","}",",","}",")","notifications_unsorted",".","extend","(","challenge_reward_notifications",")","notifications_unsorted",".","extend","(","track_listen_milestones",")","# Final sort","sorted_notifications","=","sorted","(","notifications_unsorted",",","key","=","lambda","i",":","i","[","const",".","solana_notification_slot","]",",","reverse","=","False",",",")","return","api_helpers",".","success_response","(","{","\"notifications\"",":","sorted_notifications",",","\"info\"",":","notification_metadata",",","}",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/notifications.py#L997-L1104"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_user_listening_history.py","language":"python","identifier":"get_user_listening_history","parameters":"(args: GetUserListeningHistoryArgs)","argument_list":"","return_statement":"","docstring":"Returns a user's listening history\n\n Args:\n args: GetUserListeningHistoryArgs The parsed args from the request\n\n Returns:\n Array of tracks the user listened to starting from most recently listened","docstring_summary":"Returns a user's listening history","docstring_tokens":["Returns","a","user","s","listening","history"],"function":"def get_user_listening_history(args: GetUserListeningHistoryArgs):\n \"\"\"\n Returns a user's listening history\n\n Args:\n args: GetUserListeningHistoryArgs The parsed args from the request\n\n Returns:\n Array of tracks the user listened to starting from most recently listened\n \"\"\"\n\n db = get_db_read_replica()\n with db.scoped_session() as session:\n return _get_user_listening_history(session, args)","function_tokens":["def","get_user_listening_history","(","args",":","GetUserListeningHistoryArgs",")",":","db","=","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","return","_get_user_listening_history","(","session",",","args",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_user_listening_history.py#L26-L39"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_repost_feed_for_user.py","language":"python","identifier":"get_repost_feed_for_user","parameters":"(user_id: int, args: GetRepostFeedForUserArgs)","argument_list":"","return_statement":"","docstring":"Gets the repost feed for a user (e.g. stalking a user)\n\n Args:\n user_id: number The user id to request the repost feed for\n args: GetRepostFeedForUserArgs The parsed args from the request\n\n Returns:\n Array of tracks and playlists (albums) interspersed ordered by\n most recent repost","docstring_summary":"Gets the repost feed for a user (e.g. stalking a user)","docstring_tokens":["Gets","the","repost","feed","for","a","user","(","e",".","g",".","stalking","a","user",")"],"function":"def get_repost_feed_for_user(user_id: int, args: GetRepostFeedForUserArgs):\n \"\"\"\n Gets the repost feed for a user (e.g. stalking a user)\n\n Args:\n user_id: number The user id to request the repost feed for\n args: GetRepostFeedForUserArgs The parsed args from the request\n\n Returns:\n Array of tracks and playlists (albums) interspersed ordered by\n most recent repost\n \"\"\"\n db = get_db_read_replica()\n with db.scoped_session() as session:\n return _get_repost_feed_for_user(session, user_id, args)","function_tokens":["def","get_repost_feed_for_user","(","user_id",":","int",",","args",":","GetRepostFeedForUserArgs",")",":","db","=","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","return","_get_repost_feed_for_user","(","session",",","user_id",",","args",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_repost_feed_for_user.py#L27-L41"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_underground_trending.py","language":"python","identifier":"get_scorable_track_data","parameters":"(session, redis_instance, strategy)","argument_list":"","return_statement":"return list(tracks_map.values())","docstring":"Returns a map: {\n \"track_id\": string\n \"created_at\": string\n \"owner_id\": number\n \"windowed_save_count\": number\n \"save_count\": number\n \"repost_count\": number\n \"windowed_repost_count\": number\n \"owner_follower_count\": number\n \"karma\": number\n \"listens\": number\n \"owner_verified\": boolean\n }","docstring_summary":"Returns a map: {\n \"track_id\": string\n \"created_at\": string\n \"owner_id\": number\n \"windowed_save_count\": number\n \"save_count\": number\n \"repost_count\": number\n \"windowed_repost_count\": number\n \"owner_follower_count\": number\n \"karma\": number\n \"listens\": number\n \"owner_verified\": boolean\n }","docstring_tokens":["Returns","a","map",":","{","track_id",":","string","created_at",":","string","owner_id",":","number","windowed_save_count",":","number","save_count",":","number","repost_count",":","number","windowed_repost_count",":","number","owner_follower_count",":","number","karma",":","number","listens",":","number","owner_verified",":","boolean","}"],"function":"def get_scorable_track_data(session, redis_instance, strategy):\n \"\"\"\n Returns a map: {\n \"track_id\": string\n \"created_at\": string\n \"owner_id\": number\n \"windowed_save_count\": number\n \"save_count\": number\n \"repost_count\": number\n \"windowed_repost_count\": number\n \"owner_follower_count\": number\n \"karma\": number\n \"listens\": number\n \"owner_verified\": boolean\n }\n \"\"\"\n\n score_params = strategy.get_score_params()\n S = score_params[\"S\"]\n r = score_params[\"r\"]\n q = score_params[\"q\"]\n o = score_params[\"o\"]\n f = score_params[\"f\"]\n qr = score_params[\"qr\"]\n xf = score_params[\"xf\"]\n pt = score_params[\"pt\"]\n trending_key = make_trending_cache_key(\"week\", None, strategy.version)\n track_ids = []\n old_trending = get_pickled_key(redis_instance, trending_key)\n if old_trending:\n track_ids = old_trending[1]\n exclude_track_ids = track_ids[:qr]\n\n # Get followers\n follower_query = (\n session.query(\n Follow.followee_user_id.label(\"user_id\"),\n User.is_verified.label(\"is_verified\"),\n func.count(Follow.followee_user_id).label(\"follower_count\"),\n )\n .join(User, User.user_id == Follow.followee_user_id)\n .filter(\n Follow.is_current == True,\n Follow.is_delete == False,\n User.is_current == True,\n Follow.created_at < (datetime.now() - timedelta(days=f)),\n )\n .group_by(Follow.followee_user_id, User.is_verified)\n ).subquery()\n\n base_query = (\n session.query(\n AggregatePlays.play_item_id.label(\"track_id\"),\n follower_query.c.user_id,\n follower_query.c.follower_count,\n AggregatePlays.count,\n Track.created_at,\n follower_query.c.is_verified,\n )\n .join(Track, Track.track_id == AggregatePlays.play_item_id)\n .join(follower_query, follower_query.c.user_id == Track.owner_id)\n .join(AggregateUser, AggregateUser.user_id == Track.owner_id)\n .filter(\n Track.is_current == True,\n Track.is_delete == False,\n Track.is_unlisted == False,\n Track.stem_of == None,\n Track.track_id.notin_(exclude_track_ids),\n Track.created_at >= (datetime.now() - timedelta(days=o)),\n follower_query.c.follower_count < S,\n follower_query.c.follower_count >= pt,\n AggregateUser.following_count < r,\n AggregatePlays.count >= q,\n )\n ).all()\n\n tracks_map = {\n record[0]: {\n \"track_id\": record[0],\n \"created_at\": record[4].isoformat(timespec=\"seconds\"),\n \"owner_id\": record[1],\n \"windowed_save_count\": 0,\n \"save_count\": 0,\n \"repost_count\": 0,\n \"windowed_repost_count\": 0,\n \"owner_follower_count\": record[2],\n \"karma\": 1,\n \"listens\": record[3],\n \"owner_verified\": record[5],\n }\n for record in base_query\n }\n\n track_ids = [record[0] for record in base_query]\n\n # Get all the extra values\n repost_counts = get_repost_counts(\n session, False, False, track_ids, [RepostType.track]\n )\n\n windowed_repost_counts = get_repost_counts(\n session, False, False, track_ids, [RepostType.track], None, \"week\"\n )\n\n save_counts = get_save_counts(session, False, False, track_ids, [SaveType.track])\n\n windowed_save_counts = get_save_counts(\n session, False, False, track_ids, [SaveType.track], None, \"week\"\n )\n\n karma_scores = get_karma(session, tuple(track_ids), strategy, None, False, xf)\n\n # Associate all the extra data\n for (track_id, repost_count) in repost_counts:\n tracks_map[track_id][\"repost_count\"] = repost_count\n for (track_id, repost_count) in windowed_repost_counts:\n tracks_map[track_id][\"windowed_repost_count\"] = repost_count\n for (track_id, save_count) in save_counts:\n tracks_map[track_id][\"save_count\"] = save_count\n for (track_id, save_count) in windowed_save_counts:\n tracks_map[track_id][\"windowed_save_count\"] = save_count\n for (track_id, karma) in karma_scores:\n tracks_map[track_id][\"karma\"] = karma\n\n return list(tracks_map.values())","function_tokens":["def","get_scorable_track_data","(","session",",","redis_instance",",","strategy",")",":","score_params","=","strategy",".","get_score_params","(",")","S","=","score_params","[","\"S\"","]","r","=","score_params","[","\"r\"","]","q","=","score_params","[","\"q\"","]","o","=","score_params","[","\"o\"","]","f","=","score_params","[","\"f\"","]","qr","=","score_params","[","\"qr\"","]","xf","=","score_params","[","\"xf\"","]","pt","=","score_params","[","\"pt\"","]","trending_key","=","make_trending_cache_key","(","\"week\"",",","None",",","strategy",".","version",")","track_ids","=","[","]","old_trending","=","get_pickled_key","(","redis_instance",",","trending_key",")","if","old_trending",":","track_ids","=","old_trending","[","1","]","exclude_track_ids","=","track_ids","[",":","qr","]","# Get followers","follower_query","=","(","session",".","query","(","Follow",".","followee_user_id",".","label","(","\"user_id\"",")",",","User",".","is_verified",".","label","(","\"is_verified\"",")",",","func",".","count","(","Follow",".","followee_user_id",")",".","label","(","\"follower_count\"",")",",",")",".","join","(","User",",","User",".","user_id","==","Follow",".","followee_user_id",")",".","filter","(","Follow",".","is_current","==","True",",","Follow",".","is_delete","==","False",",","User",".","is_current","==","True",",","Follow",".","created_at","<","(","datetime",".","now","(",")","-","timedelta","(","days","=","f",")",")",",",")",".","group_by","(","Follow",".","followee_user_id",",","User",".","is_verified",")",")",".","subquery","(",")","base_query","=","(","session",".","query","(","AggregatePlays",".","play_item_id",".","label","(","\"track_id\"",")",",","follower_query",".","c",".","user_id",",","follower_query",".","c",".","follower_count",",","AggregatePlays",".","count",",","Track",".","created_at",",","follower_query",".","c",".","is_verified",",",")",".","join","(","Track",",","Track",".","track_id","==","AggregatePlays",".","play_item_id",")",".","join","(","follower_query",",","follower_query",".","c",".","user_id","==","Track",".","owner_id",")",".","join","(","AggregateUser",",","AggregateUser",".","user_id","==","Track",".","owner_id",")",".","filter","(","Track",".","is_current","==","True",",","Track",".","is_delete","==","False",",","Track",".","is_unlisted","==","False",",","Track",".","stem_of","==","None",",","Track",".","track_id",".","notin_","(","exclude_track_ids",")",",","Track",".","created_at",">=","(","datetime",".","now","(",")","-","timedelta","(","days","=","o",")",")",",","follower_query",".","c",".","follower_count","<","S",",","follower_query",".","c",".","follower_count",">=","pt",",","AggregateUser",".","following_count","<","r",",","AggregatePlays",".","count",">=","q",",",")",")",".","all","(",")","tracks_map","=","{","record","[","0","]",":","{","\"track_id\"",":","record","[","0","]",",","\"created_at\"",":","record","[","4","]",".","isoformat","(","timespec","=","\"seconds\"",")",",","\"owner_id\"",":","record","[","1","]",",","\"windowed_save_count\"",":","0",",","\"save_count\"",":","0",",","\"repost_count\"",":","0",",","\"windowed_repost_count\"",":","0",",","\"owner_follower_count\"",":","record","[","2","]",",","\"karma\"",":","1",",","\"listens\"",":","record","[","3","]",",","\"owner_verified\"",":","record","[","5","]",",","}","for","record","in","base_query","}","track_ids","=","[","record","[","0","]","for","record","in","base_query","]","# Get all the extra values","repost_counts","=","get_repost_counts","(","session",",","False",",","False",",","track_ids",",","[","RepostType",".","track","]",")","windowed_repost_counts","=","get_repost_counts","(","session",",","False",",","False",",","track_ids",",","[","RepostType",".","track","]",",","None",",","\"week\"",")","save_counts","=","get_save_counts","(","session",",","False",",","False",",","track_ids",",","[","SaveType",".","track","]",")","windowed_save_counts","=","get_save_counts","(","session",",","False",",","False",",","track_ids",",","[","SaveType",".","track","]",",","None",",","\"week\"",")","karma_scores","=","get_karma","(","session",",","tuple","(","track_ids",")",",","strategy",",","None",",","False",",","xf",")","# Associate all the extra data","for","(","track_id",",","repost_count",")","in","repost_counts",":","tracks_map","[","track_id","]","[","\"repost_count\"","]","=","repost_count","for","(","track_id",",","repost_count",")","in","windowed_repost_counts",":","tracks_map","[","track_id","]","[","\"windowed_repost_count\"","]","=","repost_count","for","(","track_id",",","save_count",")","in","save_counts",":","tracks_map","[","track_id","]","[","\"save_count\"","]","=","save_count","for","(","track_id",",","save_count",")","in","windowed_save_counts",":","tracks_map","[","track_id","]","[","\"windowed_save_count\"","]","=","save_count","for","(","track_id",",","karma",")","in","karma_scores",":","tracks_map","[","track_id","]","[","\"karma\"","]","=","karma","return","list","(","tracks_map",".","values","(",")",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_underground_trending.py#L52-L176"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_associated_user_id.py","language":"python","identifier":"get_associated_user_id","parameters":"(args)","argument_list":"","return_statement":"","docstring":"Returns a user_id the associated wallet\n\n Args:\n args: dict The parsed args from the request\n args.wallet: string The wallet to find associated with an user id\n\n Returns:\n number representing the user id","docstring_summary":"Returns a user_id the associated wallet","docstring_tokens":["Returns","a","user_id","the","associated","wallet"],"function":"def get_associated_user_id(args):\n \"\"\"\n Returns a user_id the associated wallet\n\n Args:\n args: dict The parsed args from the request\n args.wallet: string The wallet to find associated with an user id\n\n Returns:\n number representing the user id\n \"\"\"\n db = db_session.get_db_read_replica()\n with db.scoped_session() as session:\n user_id = (\n session.query(AssociatedWallet.user_id)\n .filter(AssociatedWallet.is_current == True)\n .filter(AssociatedWallet.is_delete == False)\n .filter(AssociatedWallet.wallet == args.get(\"wallet\"))\n .first()\n )\n return user_id[0] if user_id else None","function_tokens":["def","get_associated_user_id","(","args",")",":","db","=","db_session",".","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","user_id","=","(","session",".","query","(","AssociatedWallet",".","user_id",")",".","filter","(","AssociatedWallet",".","is_current","==","True",")",".","filter","(","AssociatedWallet",".","is_delete","==","False",")",".","filter","(","AssociatedWallet",".","wallet","==","args",".","get","(","\"wallet\"",")",")",".","first","(",")",")","return","user_id","[","0","]","if","user_id","else","None"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_associated_user_id.py#L9-L29"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_unpopulated_tracks.py","language":"python","identifier":"get_unpopulated_tracks","parameters":"(\n session, track_ids, filter_deleted=False, filter_unlisted=True\n)","argument_list":"","return_statement":"return tracks_response","docstring":"Fetches tracks by checking the redis cache first then\n going to DB and writes to cache if not present\n\n Args:\n session: DB session\n track_ids: array A list of track ids\n\n Returns:\n Array of tracks","docstring_summary":"Fetches tracks by checking the redis cache first then\n going to DB and writes to cache if not present","docstring_tokens":["Fetches","tracks","by","checking","the","redis","cache","first","then","going","to","DB","and","writes","to","cache","if","not","present"],"function":"def get_unpopulated_tracks(\n session, track_ids, filter_deleted=False, filter_unlisted=True\n):\n \"\"\"\n Fetches tracks by checking the redis cache first then\n going to DB and writes to cache if not present\n\n Args:\n session: DB session\n track_ids: array A list of track ids\n\n Returns:\n Array of tracks\n \"\"\"\n # Check the cached tracks\n cached_tracks_results = get_cached_tracks(track_ids)\n has_all_tracks_cached = cached_tracks_results.count(None) == 0\n if has_all_tracks_cached:\n res = cached_tracks_results\n if filter_deleted:\n res = list(filter(lambda track: not track[\"is_delete\"], res))\n if filter_unlisted:\n res = list(filter(lambda track: not track[\"is_unlisted\"], res))\n return res\n\n # Create a dict of cached tracks\n cached_tracks = {}\n for cached_track in cached_tracks_results:\n if cached_track:\n cached_tracks[cached_track[\"track_id\"]] = cached_track\n\n track_ids_to_fetch = filter(\n lambda track_id: track_id not in cached_tracks, track_ids\n )\n\n tracks_query = (\n session.query(Track)\n .filter(Track.is_current == True, Track.stem_of == None)\n .filter(Track.track_id.in_(track_ids_to_fetch))\n )\n\n if filter_unlisted:\n tracks_query = tracks_query.filter(Track.is_unlisted == False)\n\n if filter_deleted:\n tracks_query = tracks_query.filter(Track.is_delete == False)\n\n tracks = tracks_query.all()\n tracks = helpers.query_result_to_list(tracks)\n queried_tracks = {track[\"track_id\"]: track for track in tracks}\n\n # cache tracks for future use\n set_tracks_in_cache(tracks)\n\n tracks_response = []\n for track_id in track_ids:\n if track_id in cached_tracks:\n if filter_unlisted and cached_tracks[track_id][\"is_unlisted\"]:\n continue\n if filter_deleted and cached_tracks[track_id][\"is_delete\"]:\n continue\n tracks_response.append(cached_tracks[track_id])\n elif track_id in queried_tracks:\n tracks_response.append(queried_tracks[track_id])\n\n return tracks_response","function_tokens":["def","get_unpopulated_tracks","(","session",",","track_ids",",","filter_deleted","=","False",",","filter_unlisted","=","True",")",":","# Check the cached tracks","cached_tracks_results","=","get_cached_tracks","(","track_ids",")","has_all_tracks_cached","=","cached_tracks_results",".","count","(","None",")","==","0","if","has_all_tracks_cached",":","res","=","cached_tracks_results","if","filter_deleted",":","res","=","list","(","filter","(","lambda","track",":","not","track","[","\"is_delete\"","]",",","res",")",")","if","filter_unlisted",":","res","=","list","(","filter","(","lambda","track",":","not","track","[","\"is_unlisted\"","]",",","res",")",")","return","res","# Create a dict of cached tracks","cached_tracks","=","{","}","for","cached_track","in","cached_tracks_results",":","if","cached_track",":","cached_tracks","[","cached_track","[","\"track_id\"","]","]","=","cached_track","track_ids_to_fetch","=","filter","(","lambda","track_id",":","track_id","not","in","cached_tracks",",","track_ids",")","tracks_query","=","(","session",".","query","(","Track",")",".","filter","(","Track",".","is_current","==","True",",","Track",".","stem_of","==","None",")",".","filter","(","Track",".","track_id",".","in_","(","track_ids_to_fetch",")",")",")","if","filter_unlisted",":","tracks_query","=","tracks_query",".","filter","(","Track",".","is_unlisted","==","False",")","if","filter_deleted",":","tracks_query","=","tracks_query",".","filter","(","Track",".","is_delete","==","False",")","tracks","=","tracks_query",".","all","(",")","tracks","=","helpers",".","query_result_to_list","(","tracks",")","queried_tracks","=","{","track","[","\"track_id\"","]",":","track","for","track","in","tracks","}","# cache tracks for future use","set_tracks_in_cache","(","tracks",")","tracks_response","=","[","]","for","track_id","in","track_ids",":","if","track_id","in","cached_tracks",":","if","filter_unlisted","and","cached_tracks","[","track_id","]","[","\"is_unlisted\"","]",":","continue","if","filter_deleted","and","cached_tracks","[","track_id","]","[","\"is_delete\"","]",":","continue","tracks_response",".","append","(","cached_tracks","[","track_id","]",")","elif","track_id","in","queried_tracks",":","tracks_response",".","append","(","queried_tracks","[","track_id","]",")","return","tracks_response"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_unpopulated_tracks.py#L41-L106"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/skipped_transactions.py","language":"python","identifier":"check_skipped_transactions","parameters":"()","argument_list":"","return_statement":"return success_response(skipped_transactions)","docstring":"Returns skipped transactions during indexing\n Takes query params 'blocknumber', 'blockhash', and 'transactionhash'\n Filters by query params if they are not null","docstring_summary":"Returns skipped transactions during indexing\n Takes query params 'blocknumber', 'blockhash', and 'transactionhash'\n Filters by query params if they are not null","docstring_tokens":["Returns","skipped","transactions","during","indexing","Takes","query","params","blocknumber","blockhash","and","transactionhash","Filters","by","query","params","if","they","are","not","null"],"function":"def check_skipped_transactions():\n \"\"\"\n Returns skipped transactions during indexing\n Takes query params 'blocknumber', 'blockhash', and 'transactionhash'\n Filters by query params if they are not null\n \"\"\"\n blocknumber = request.args.get(\"blocknumber\", type=int)\n blockhash = request.args.get(\"blockhash\")\n transactionhash = request.args.get(\"transactionhash\")\n skipped_transactions = get_skipped_transactions(\n blocknumber, blockhash, transactionhash\n )\n return success_response(skipped_transactions)","function_tokens":["def","check_skipped_transactions","(",")",":","blocknumber","=","request",".","args",".","get","(","\"blocknumber\"",",","type","=","int",")","blockhash","=","request",".","args",".","get","(","\"blockhash\"",")","transactionhash","=","request",".","args",".","get","(","\"transactionhash\"",")","skipped_transactions","=","get_skipped_transactions","(","blocknumber",",","blockhash",",","transactionhash",")","return","success_response","(","skipped_transactions",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/skipped_transactions.py#L20-L32"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/skipped_transactions.py","language":"python","identifier":"check_transaction_status","parameters":"()","argument_list":"","return_statement":"return success_response(transaction_status)","docstring":"Returns whether a transaction 'PASSED' | 'FAILED' | 'NOT_FOUND'\n based on all 3 query params 'blocknumber', 'blockhash', and 'transactionhash'","docstring_summary":"Returns whether a transaction 'PASSED' | 'FAILED' | 'NOT_FOUND'\n based on all 3 query params 'blocknumber', 'blockhash', and 'transactionhash'","docstring_tokens":["Returns","whether","a","transaction","PASSED","|","FAILED","|","NOT_FOUND","based","on","all","3","query","params","blocknumber","blockhash","and","transactionhash"],"function":"def check_transaction_status():\n \"\"\"\n Returns whether a transaction 'PASSED' | 'FAILED' | 'NOT_FOUND'\n based on all 3 query params 'blocknumber', 'blockhash', and 'transactionhash'\n \"\"\"\n blocknumber = request.args.get(\"blocknumber\", type=int)\n blockhash = request.args.get(\"blockhash\")\n transactionhash = request.args.get(\"transactionhash\")\n if blocknumber is None or blockhash is None or transactionhash is None:\n return error_response(\n \"Please pass in required query parameters 'blocknumber', 'blockhash', and 'transactionhash'\",\n 400,\n )\n try:\n transaction_status = get_transaction_status(\n blocknumber, blockhash, transactionhash\n )\n except Exception as e:\n return error_response(e)\n return success_response(transaction_status)","function_tokens":["def","check_transaction_status","(",")",":","blocknumber","=","request",".","args",".","get","(","\"blocknumber\"",",","type","=","int",")","blockhash","=","request",".","args",".","get","(","\"blockhash\"",")","transactionhash","=","request",".","args",".","get","(","\"transactionhash\"",")","if","blocknumber","is","None","or","blockhash","is","None","or","transactionhash","is","None",":","return","error_response","(","\"Please pass in required query parameters 'blocknumber', 'blockhash', and 'transactionhash'\"",",","400",",",")","try",":","transaction_status","=","get_transaction_status","(","blocknumber",",","blockhash",",","transactionhash",")","except","Exception","as","e",":","return","error_response","(","e",")","return","success_response","(","transaction_status",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/skipped_transactions.py#L36-L55"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/health_check.py","language":"python","identifier":"play_check","parameters":"()","argument_list":"","return_statement":"return success_response(latest_play, 500 if error else 200, sign_response=False)","docstring":"max_drift: maximum duration in seconds between `now` and the\n latest recorded play record to be considered healthy","docstring_summary":"max_drift: maximum duration in seconds between `now` and the\n latest recorded play record to be considered healthy","docstring_tokens":["max_drift",":","maximum","duration","in","seconds","between","now","and","the","latest","recorded","play","record","to","be","considered","healthy"],"function":"def play_check():\n \"\"\"\n max_drift: maximum duration in seconds between `now` and the\n latest recorded play record to be considered healthy\n \"\"\"\n max_drift = request.args.get(\"max_drift\", type=int)\n\n latest_play = get_latest_play()\n drift = (datetime.now() - latest_play).total_seconds()\n\n # Error if max drift was provided and the drift is greater than max_drift\n error = max_drift and drift > max_drift\n\n return success_response(latest_play, 500 if error else 200, sign_response=False)","function_tokens":["def","play_check","(",")",":","max_drift","=","request",".","args",".","get","(","\"max_drift\"",",","type","=","int",")","latest_play","=","get_latest_play","(",")","drift","=","(","datetime",".","now","(",")","-","latest_play",")",".","total_seconds","(",")","# Error if max drift was provided and the drift is greater than max_drift","error","=","max_drift","and","drift",">","max_drift","return","success_response","(","latest_play",",","500","if","error","else","200",",","sign_response","=","False",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/health_check.py#L68-L81"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/health_check.py","language":"python","identifier":"sol_play_check","parameters":"()","argument_list":"","return_statement":"return success_response(response, 500 if error else 200, sign_response=False)","docstring":"limit: number of latest plays to return\n max_drift: maximum duration in seconds between `now` and the\n latest recorded play record to be considered healthy","docstring_summary":"limit: number of latest plays to return\n max_drift: maximum duration in seconds between `now` and the\n latest recorded play record to be considered healthy","docstring_tokens":["limit",":","number","of","latest","plays","to","return","max_drift",":","maximum","duration","in","seconds","between","now","and","the","latest","recorded","play","record","to","be","considered","healthy"],"function":"def sol_play_check():\n \"\"\"\n limit: number of latest plays to return\n max_drift: maximum duration in seconds between `now` and the\n latest recorded play record to be considered healthy\n \"\"\"\n limit = request.args.get(\"limit\", type=int, default=20)\n max_drift = request.args.get(\"max_drift\", type=int)\n error = None\n redis = redis_connection.get_redis()\n\n response = {}\n response = get_latest_sol_play_check_info(redis, limit)\n latest_db_sol_plays = response[\"latest_db_sol_plays\"]\n\n if latest_db_sol_plays:\n latest_db_play = latest_db_sol_plays[0]\n latest_created_at = latest_db_play[\"created_at\"]\n drift = (datetime.now() - latest_created_at).total_seconds()\n\n # Error if max drift was provided and the drift is greater than max_drift\n error = max_drift and drift > max_drift\n\n return success_response(response, 500 if error else 200, sign_response=False)","function_tokens":["def","sol_play_check","(",")",":","limit","=","request",".","args",".","get","(","\"limit\"",",","type","=","int",",","default","=","20",")","max_drift","=","request",".","args",".","get","(","\"max_drift\"",",","type","=","int",")","error","=","None","redis","=","redis_connection",".","get_redis","(",")","response","=","{","}","response","=","get_latest_sol_play_check_info","(","redis",",","limit",")","latest_db_sol_plays","=","response","[","\"latest_db_sol_plays\"","]","if","latest_db_sol_plays",":","latest_db_play","=","latest_db_sol_plays","[","0","]","latest_created_at","=","latest_db_play","[","\"created_at\"","]","drift","=","(","datetime",".","now","(",")","-","latest_created_at",")",".","total_seconds","(",")","# Error if max drift was provided and the drift is greater than max_drift","error","=","max_drift","and","drift",">","max_drift","return","success_response","(","response",",","500","if","error","else","200",",","sign_response","=","False",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/health_check.py#L86-L109"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_tracks_including_unlisted.py","language":"python","identifier":"get_tracks_including_unlisted","parameters":"(args)","argument_list":"","return_statement":"return tracks","docstring":"Fetch a track, allowing unlisted.\n\n Args:\n args: dict\n args.identifiers: array of { handle, id, url_title} dicts\n args.current_user_id: optional current user ID\n args.filter_deleted: filter deleted tracks\n args.with_users: include users in unlisted tracks","docstring_summary":"Fetch a track, allowing unlisted.","docstring_tokens":["Fetch","a","track","allowing","unlisted","."],"function":"def get_tracks_including_unlisted(args):\n \"\"\"Fetch a track, allowing unlisted.\n\n Args:\n args: dict\n args.identifiers: array of { handle, id, url_title} dicts\n args.current_user_id: optional current user ID\n args.filter_deleted: filter deleted tracks\n args.with_users: include users in unlisted tracks\n \"\"\"\n tracks = []\n identifiers = args[\"identifiers\"]\n for i in identifiers:\n helpers.validate_arguments(i, [\"handle\", \"id\", \"url_title\"])\n\n current_user_id = args.get(\"current_user_id\")\n db = get_db_read_replica()\n with db.scoped_session() as session:\n\n def get_unpopulated_track():\n base_query = session.query(Track)\n filter_cond = []\n\n # Create filter conditions as a list of `and` clauses\n for i in identifiers:\n filter_cond.append(\n and_(Track.is_current == True, Track.track_id == i[\"id\"])\n )\n\n # Pass array of `and` clauses into an `or` clause as destructured *args\n base_query = base_query.filter(or_(*filter_cond))\n\n # Allow filtering of deletes\n # Note: There is no standard for boolean url parameters, and any value (including 'false')\n # will be evaluated as true, so an explicit check is made for true\n if \"filter_deleted\" in args:\n filter_deleted = args.get(\"filter_deleted\")\n if filter_deleted:\n base_query = base_query.filter(Track.is_delete == False)\n\n # Perform the query\n # TODO: pagination is broken with unlisted tracks\n query_results = paginate_query(base_query).all()\n tracks = helpers.query_result_to_list(query_results)\n\n # Mapping of track_id -> track object from request;\n # used to check route_id when iterating through identifiers\n identifiers_map = {track[\"id\"]: track for track in identifiers}\n\n # If the track is unlisted and the generated route_id does not match the route_id in db,\n # filter track out from response\n def filter_fn(track):\n input_track = identifiers_map[track[\"track_id\"]]\n route_id = helpers.create_track_route_id(\n input_track[\"url_title\"], input_track[\"handle\"]\n )\n\n return not track[\"is_unlisted\"] or track[\"route_id\"] == route_id\n\n tracks = list(filter(filter_fn, tracks))\n\n track_ids = list(map(lambda track: track[\"track_id\"], tracks))\n return (tracks, track_ids)\n\n key = make_cache_key(args)\n (tracks, track_ids) = use_redis_cache(\n key, UNPOPULATED_TRACK_CACHE_DURATION_SEC, get_unpopulated_track\n )\n\n # Add users\n if args.get(\"with_users\", False):\n user_id_list = get_users_ids(tracks)\n users = get_users_by_id(session, user_id_list, current_user_id)\n for track in tracks:\n user = users[track[\"owner_id\"]]\n if user:\n track[\"user\"] = user\n # Populate metadata\n tracks = populate_track_metadata(session, track_ids, tracks, current_user_id)\n\n return tracks","function_tokens":["def","get_tracks_including_unlisted","(","args",")",":","tracks","=","[","]","identifiers","=","args","[","\"identifiers\"","]","for","i","in","identifiers",":","helpers",".","validate_arguments","(","i",",","[","\"handle\"",",","\"id\"",",","\"url_title\"","]",")","current_user_id","=","args",".","get","(","\"current_user_id\"",")","db","=","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","def","get_unpopulated_track","(",")",":","base_query","=","session",".","query","(","Track",")","filter_cond","=","[","]","# Create filter conditions as a list of `and` clauses","for","i","in","identifiers",":","filter_cond",".","append","(","and_","(","Track",".","is_current","==","True",",","Track",".","track_id","==","i","[","\"id\"","]",")",")","# Pass array of `and` clauses into an `or` clause as destructured *args","base_query","=","base_query",".","filter","(","or_","(","*","filter_cond",")",")","# Allow filtering of deletes","# Note: There is no standard for boolean url parameters, and any value (including 'false')","# will be evaluated as true, so an explicit check is made for true","if","\"filter_deleted\"","in","args",":","filter_deleted","=","args",".","get","(","\"filter_deleted\"",")","if","filter_deleted",":","base_query","=","base_query",".","filter","(","Track",".","is_delete","==","False",")","# Perform the query","# TODO: pagination is broken with unlisted tracks","query_results","=","paginate_query","(","base_query",")",".","all","(",")","tracks","=","helpers",".","query_result_to_list","(","query_results",")","# Mapping of track_id -> track object from request;","# used to check route_id when iterating through identifiers","identifiers_map","=","{","track","[","\"id\"","]",":","track","for","track","in","identifiers","}","# If the track is unlisted and the generated route_id does not match the route_id in db,","# filter track out from response","def","filter_fn","(","track",")",":","input_track","=","identifiers_map","[","track","[","\"track_id\"","]","]","route_id","=","helpers",".","create_track_route_id","(","input_track","[","\"url_title\"","]",",","input_track","[","\"handle\"","]",")","return","not","track","[","\"is_unlisted\"","]","or","track","[","\"route_id\"","]","==","route_id","tracks","=","list","(","filter","(","filter_fn",",","tracks",")",")","track_ids","=","list","(","map","(","lambda","track",":","track","[","\"track_id\"","]",",","tracks",")",")","return","(","tracks",",","track_ids",")","key","=","make_cache_key","(","args",")","(","tracks",",","track_ids",")","=","use_redis_cache","(","key",",","UNPOPULATED_TRACK_CACHE_DURATION_SEC",",","get_unpopulated_track",")","# Add users","if","args",".","get","(","\"with_users\"",",","False",")",":","user_id_list","=","get_users_ids","(","tracks",")","users","=","get_users_by_id","(","session",",","user_id_list",",","current_user_id",")","for","track","in","tracks",":","user","=","users","[","track","[","\"owner_id\"","]","]","if","user",":","track","[","\"user\"","]","=","user","# Populate metadata","tracks","=","populate_track_metadata","(","session",",","track_ids",",","tracks",",","current_user_id",")","return","tracks"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_tracks_including_unlisted.py#L33-L113"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_balances.py","language":"python","identifier":"does_user_balance_need_refresh","parameters":"(user_balance: UserBalance)","argument_list":"","return_statement":"return needs_refresh","docstring":"Returns whether a given user_balance needs update.\n Very heuristic-y:\n - If we've never updated before (new balance entry), update now\n - If the balance has not been updated in BALANCE_REFRESH seconds","docstring_summary":"Returns whether a given user_balance needs update.\n Very heuristic-y:\n - If we've never updated before (new balance entry), update now\n - If the balance has not been updated in BALANCE_REFRESH seconds","docstring_tokens":["Returns","whether","a","given","user_balance","needs","update",".","Very","heuristic","-","y",":","-","If","we","ve","never","updated","before","(","new","balance","entry",")","update","now","-","If","the","balance","has","not","been","updated","in","BALANCE_REFRESH","seconds"],"function":"def does_user_balance_need_refresh(user_balance: UserBalance) -> bool:\n \"\"\"Returns whether a given user_balance needs update.\n Very heuristic-y:\n - If we've never updated before (new balance entry), update now\n - If the balance has not been updated in BALANCE_REFRESH seconds\n \"\"\"\n\n if user_balance.updated_at == user_balance.created_at:\n return True\n\n delta = timedelta(seconds=BALANCE_REFRESH)\n needs_refresh = user_balance.updated_at < (datetime.now() - delta)\n return needs_refresh","function_tokens":["def","does_user_balance_need_refresh","(","user_balance",":","UserBalance",")","->","bool",":","if","user_balance",".","updated_at","==","user_balance",".","created_at",":","return","True","delta","=","timedelta","(","seconds","=","BALANCE_REFRESH",")","needs_refresh","=","user_balance",".","updated_at","<","(","datetime",".","now","(",")","-","delta",")","return","needs_refresh"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_balances.py#L19-L31"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_balances.py","language":"python","identifier":"get_balances","parameters":"(session: Session, redis: Redis, user_ids: List[int])","argument_list":"","return_statement":"return result","docstring":"Gets user balances.\n Returns mapping { user_id: balance }\n Enqueues in Redis user balances requiring refresh.","docstring_summary":"Gets user balances.\n Returns mapping { user_id: balance }\n Enqueues in Redis user balances requiring refresh.","docstring_tokens":["Gets","user","balances",".","Returns","mapping","{","user_id",":","balance","}","Enqueues","in","Redis","user","balances","requiring","refresh","."],"function":"def get_balances(session: Session, redis: Redis, user_ids: List[int]):\n \"\"\"Gets user balances.\n Returns mapping { user_id: balance }\n Enqueues in Redis user balances requiring refresh.\n \"\"\"\n # Find user balances\n query: List[UserBalance] = (\n (session.query(UserBalance)).filter(UserBalance.user_id.in_(user_ids)).all()\n )\n\n # Construct result dict from query result\n result = {\n user_balance.user_id: {\n \"owner_wallet_balance\": user_balance.balance,\n \"associated_wallets_balance\": user_balance.associated_wallets_balance,\n \"associated_sol_wallets_balance\": user_balance.associated_sol_wallets_balance,\n \"waudio_balance\": user_balance.waudio,\n \"total_balance\": str(\n int(user_balance.balance)\n + int(user_balance.associated_wallets_balance)\n + int(user_balance.associated_sol_wallets_balance)\n * 10 ** WAUDIO_DECIMALS\n + int(user_balance.waudio) * 10 ** WAUDIO_DECIMALS\n ),\n }\n for user_balance in query\n }\n\n # Find user_ids that don't yet have a balance\n user_ids_set = set(user_ids)\n fetched_user_ids_set = {x.user_id for x in query}\n needs_balance_set = user_ids_set - fetched_user_ids_set\n\n # Add new balances to result set\n no_balance_dict = {\n user_id: {\n \"owner_wallet_balance\": \"0\",\n \"associated_wallets_balance\": \"0\",\n \"associated_sol_wallets_balance\": \"0\",\n \"total_balance\": \"0\",\n \"waudio_balance\": \"0\",\n }\n for user_id in needs_balance_set\n }\n result.update(no_balance_dict)\n\n # Get old balances that need refresh\n needs_refresh = [\n user_balance.user_id\n for user_balance in query\n if does_user_balance_need_refresh(user_balance)\n ]\n\n # Enqueue new balances to Redis refresh queue\n # 1. All users who need a new balance\n # 2. All users who need a balance refresh\n enqueue_lazy_balance_refresh(redis, list(needs_balance_set) + needs_refresh)\n\n return result","function_tokens":["def","get_balances","(","session",":","Session",",","redis",":","Redis",",","user_ids",":","List","[","int","]",")",":","# Find user balances","query",":","List","[","UserBalance","]","=","(","(","session",".","query","(","UserBalance",")",")",".","filter","(","UserBalance",".","user_id",".","in_","(","user_ids",")",")",".","all","(",")",")","# Construct result dict from query result","result","=","{","user_balance",".","user_id",":","{","\"owner_wallet_balance\"",":","user_balance",".","balance",",","\"associated_wallets_balance\"",":","user_balance",".","associated_wallets_balance",",","\"associated_sol_wallets_balance\"",":","user_balance",".","associated_sol_wallets_balance",",","\"waudio_balance\"",":","user_balance",".","waudio",",","\"total_balance\"",":","str","(","int","(","user_balance",".","balance",")","+","int","(","user_balance",".","associated_wallets_balance",")","+","int","(","user_balance",".","associated_sol_wallets_balance",")","*","10","**","WAUDIO_DECIMALS","+","int","(","user_balance",".","waudio",")","*","10","**","WAUDIO_DECIMALS",")",",","}","for","user_balance","in","query","}","# Find user_ids that don't yet have a balance","user_ids_set","=","set","(","user_ids",")","fetched_user_ids_set","=","{","x",".","user_id","for","x","in","query","}","needs_balance_set","=","user_ids_set","-","fetched_user_ids_set","# Add new balances to result set","no_balance_dict","=","{","user_id",":","{","\"owner_wallet_balance\"",":","\"0\"",",","\"associated_wallets_balance\"",":","\"0\"",",","\"associated_sol_wallets_balance\"",":","\"0\"",",","\"total_balance\"",":","\"0\"",",","\"waudio_balance\"",":","\"0\"",",","}","for","user_id","in","needs_balance_set","}","result",".","update","(","no_balance_dict",")","# Get old balances that need refresh","needs_refresh","=","[","user_balance",".","user_id","for","user_balance","in","query","if","does_user_balance_need_refresh","(","user_balance",")","]","# Enqueue new balances to Redis refresh queue","# 1. All users who need a new balance","# 2. All users who need a balance refresh","enqueue_lazy_balance_refresh","(","redis",",","list","(","needs_balance_set",")","+","needs_refresh",")","return","result"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_balances.py#L48-L106"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_users_cnode.py","language":"python","identifier":"get_users_cnode","parameters":"(cnode_endpoint_string, replica_type=ReplicaType.PRIMARY)","argument_list":"","return_statement":"return users_dict","docstring":"Query all users with `cnode_endpoint_string` in replica set\n If replica_type=ReplicaType.PRIMARY -> returns users with `cnode_endpoint_string` as primary\n Else if replica_type=ReplicaType.SECONDARY -> returns users with `cnode_endpoint_string` as secondary1 or secondary2\n Else (only other option is replica_type=ReplicaType.ALL)\n\n Only returns values where 1\/2 secondaries are non-null","docstring_summary":"Query all users with `cnode_endpoint_string` in replica set\n If replica_type=ReplicaType.PRIMARY -> returns users with `cnode_endpoint_string` as primary\n Else if replica_type=ReplicaType.SECONDARY -> returns users with `cnode_endpoint_string` as secondary1 or secondary2\n Else (only other option is replica_type=ReplicaType.ALL)","docstring_tokens":["Query","all","users","with","cnode_endpoint_string","in","replica","set","If","replica_type","=","ReplicaType",".","PRIMARY","-",">","returns","users","with","cnode_endpoint_string","as","primary","Else","if","replica_type","=","ReplicaType",".","SECONDARY","-",">","returns","users","with","cnode_endpoint_string","as","secondary1","or","secondary2","Else","(","only","other","option","is","replica_type","=","ReplicaType",".","ALL",")"],"function":"def get_users_cnode(cnode_endpoint_string, replica_type=ReplicaType.PRIMARY):\n \"\"\"\n Query all users with `cnode_endpoint_string` in replica set\n If replica_type=ReplicaType.PRIMARY -> returns users with `cnode_endpoint_string` as primary\n Else if replica_type=ReplicaType.SECONDARY -> returns users with `cnode_endpoint_string` as secondary1 or secondary2\n Else (only other option is replica_type=ReplicaType.ALL)\n\n Only returns values where 1\/2 secondaries are non-null\n \"\"\"\n users = []\n db = get_db_read_replica()\n with db.scoped_session() as session:\n users_res = sqlalchemy.text(\n f\"\"\"\n SELECT\n *\n FROM\n (\n SELECT\n \"user_id\",\n \"wallet\",\n (\"creator_node_endpoints\") [1] as \"primary\",\n (\"creator_node_endpoints\") [2] as \"secondary1\",\n (\"creator_node_endpoints\") [3] as \"secondary2\",\n \"primary_id\" as \"primarySpID\",\n (\"secondary_ids\") [1] as \"secondary1SpID\",\n (\"secondary_ids\") [2] as \"secondary2SpID\"\n FROM\n (\n SELECT\n \"user_id\",\n \"wallet\",\n string_to_array(\"creator_node_endpoint\", ',') as \"creator_node_endpoints\",\n \"primary_id\",\n \"secondary_ids\"\n FROM\n \"users\"\n WHERE\n \"creator_node_endpoint\" IS NOT NULL\n AND \"is_current\" IS TRUE\n ORDER BY\n \"user_id\" ASC\n ) as \"s\"\n ) as \"t\"\n WHERE\n {\n \"t.primary = :cnode_endpoint_string AND\"\n if replica_type == ReplicaType.PRIMARY\n else '(t.secondary1 = :cnode_endpoint_string OR t.secondary2 = :cnode_endpoint_string) AND'\n if replica_type == ReplicaType.SECONDARY\n else '(t.primary = :cnode_endpoint_string OR '\n 't.secondary1 = :cnode_endpoint_string OR '\n 't.secondary2 = :cnode_endpoint_string) AND'\n }\n t.secondary1 is not NULL;\n \"\"\"\n )\n users = session.execute(\n users_res, {\"cnode_endpoint_string\": cnode_endpoint_string}\n ).fetchall()\n users_dict = [dict(row) for row in users]\n return users_dict","function_tokens":["def","get_users_cnode","(","cnode_endpoint_string",",","replica_type","=","ReplicaType",".","PRIMARY",")",":","users","=","[","]","db","=","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","users_res","=","sqlalchemy",".","text","(","f\"\"\"\n SELECT\n *\n FROM\n (\n SELECT\n \"user_id\",\n \"wallet\",\n (\"creator_node_endpoints\") [1] as \"primary\",\n (\"creator_node_endpoints\") [2] as \"secondary1\",\n (\"creator_node_endpoints\") [3] as \"secondary2\",\n \"primary_id\" as \"primarySpID\",\n (\"secondary_ids\") [1] as \"secondary1SpID\",\n (\"secondary_ids\") [2] as \"secondary2SpID\"\n FROM\n (\n SELECT\n \"user_id\",\n \"wallet\",\n string_to_array(\"creator_node_endpoint\", ',') as \"creator_node_endpoints\",\n \"primary_id\",\n \"secondary_ids\"\n FROM\n \"users\"\n WHERE\n \"creator_node_endpoint\" IS NOT NULL\n AND \"is_current\" IS TRUE\n ORDER BY\n \"user_id\" ASC\n ) as \"s\"\n ) as \"t\"\n WHERE\n {\n \"t.primary = :cnode_endpoint_string AND\"\n if replica_type == ReplicaType.PRIMARY\n else '(t.secondary1 = :cnode_endpoint_string OR t.secondary2 = :cnode_endpoint_string) AND'\n if replica_type == ReplicaType.SECONDARY\n else '(t.primary = :cnode_endpoint_string OR '\n 't.secondary1 = :cnode_endpoint_string OR '\n 't.secondary2 = :cnode_endpoint_string) AND'\n }\n t.secondary1 is not NULL;\n \"\"\"",")","users","=","session",".","execute","(","users_res",",","{","\"cnode_endpoint_string\"",":","cnode_endpoint_string","}",")",".","fetchall","(",")","users_dict","=","[","dict","(","row",")","for","row","in","users","]","return","users_dict"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_users_cnode.py#L16-L77"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_trending_ids.py","language":"python","identifier":"get_trending_ids","parameters":"(args, strategy)","argument_list":"","return_statement":"return {\n \"week\": week_trending_track_ids,\n \"month\": month_trending_track_ids,\n \"year\": year_trending_track_ids,\n }","docstring":"Fetches the ids of the trending tracks using the route's cache\n\n Args:\n args: (dict) The args of the request\n args.limit: (number) The number of track ids to return\n args.genre: (string?) The genre to fetch the trending tracks for\n strategy: (string?) The strategy to apply to compute trending\n\n Returns:\n trending_times_id: (dict) Dictionary containing the week\/month\/year trending track ids","docstring_summary":"Fetches the ids of the trending tracks using the route's cache","docstring_tokens":["Fetches","the","ids","of","the","trending","tracks","using","the","route","s","cache"],"function":"def get_trending_ids(args, strategy):\n \"\"\"\n Fetches the ids of the trending tracks using the route's cache\n\n Args:\n args: (dict) The args of the request\n args.limit: (number) The number of track ids to return\n args.genre: (string?) The genre to fetch the trending tracks for\n strategy: (string?) The strategy to apply to compute trending\n\n Returns:\n trending_times_id: (dict) Dictionary containing the week\/month\/year trending track ids\n \"\"\"\n\n cache_args = {}\n limit = args[\"limit\"]\n if \"genre\" in args:\n cache_args[\"genre\"] = args[\"genre\"]\n\n week_trending_track_ids = get_time_trending(cache_args, \"week\", limit, strategy)\n month_trending_track_ids = get_time_trending(cache_args, \"month\", limit, strategy)\n year_trending_track_ids = get_time_trending(cache_args, \"year\", limit, strategy)\n\n return {\n \"week\": week_trending_track_ids,\n \"month\": month_trending_track_ids,\n \"year\": year_trending_track_ids,\n }","function_tokens":["def","get_trending_ids","(","args",",","strategy",")",":","cache_args","=","{","}","limit","=","args","[","\"limit\"","]","if","\"genre\"","in","args",":","cache_args","[","\"genre\"","]","=","args","[","\"genre\"","]","week_trending_track_ids","=","get_time_trending","(","cache_args",",","\"week\"",",","limit",",","strategy",")","month_trending_track_ids","=","get_time_trending","(","cache_args",",","\"month\"",",","limit",",","strategy",")","year_trending_track_ids","=","get_time_trending","(","cache_args",",","\"year\"",",","limit",",","strategy",")","return","{","\"week\"",":","week_trending_track_ids",",","\"month\"",":","month_trending_track_ids",",","\"year\"",":","year_trending_track_ids",",","}"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_trending_ids.py#L32-L59"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_related_artists.py","language":"python","identifier":"_calculate_related_artists_scores","parameters":"(\n session: Session, user_id, sample_size=None, limit=MAX_RELATED_ARTIST_COUNT\n)","argument_list":"","return_statement":"return related_artists","docstring":"Calculates the scores of related artists to the given user_id by\n querying who followers of the user_id also follow and using the scoring algorithm:\n\n `score = mutual_follower_count * percentage_of_suggested_artist_followers`","docstring_summary":"Calculates the scores of related artists to the given user_id by\n querying who followers of the user_id also follow and using the scoring algorithm:","docstring_tokens":["Calculates","the","scores","of","related","artists","to","the","given","user_id","by","querying","who","followers","of","the","user_id","also","follow","and","using","the","scoring","algorithm",":"],"function":"def _calculate_related_artists_scores(\n session: Session, user_id, sample_size=None, limit=MAX_RELATED_ARTIST_COUNT\n) -> List[RelatedArtist]:\n \"\"\"Calculates the scores of related artists to the given user_id by\n querying who followers of the user_id also follow and using the scoring algorithm:\n\n `score = mutual_follower_count * percentage_of_suggested_artist_followers`\n \"\"\"\n\n # Get all the followers of the artist\n followers_subquery = aliased(\n Follow,\n session.query(Follow.follower_user_id)\n .filter(\n Follow.followee_user_id == user_id,\n Follow.is_current,\n Follow.is_delete == False,\n )\n .subquery(name=\"followers\"),\n )\n if sample_size is None:\n followers_sampled = aliased(Follow)\n else:\n followers_sampled = aliased(\n Follow, tablesample(cast(FromClause, Follow), func.system_rows(sample_size))\n )\n\n # Find out who the followers are following\n mutual_followers_subquery = (\n session.query(\n followers_sampled.followee_user_id.label(\"suggested_artist_id\"),\n func.count(followers_subquery.follower_user_id).label(\n \"mutual_follower_count\"\n ),\n )\n .select_from(followers_subquery)\n .join(\n followers_sampled,\n followers_subquery.follower_user_id == followers_sampled.follower_user_id,\n )\n .filter(\n followers_sampled.is_current,\n followers_sampled.is_delete == False,\n followers_sampled.followee_user_id != user_id,\n )\n .group_by(followers_sampled.followee_user_id)\n .subquery(name=\"mutual_followers\")\n )\n\n # Score the artists gathered from the above queries by:\n #\n # score = mutual_follower_count * percentage_of_suggested_artist_followers\n #\n # Examples:\n #\n # If we're finding related artists to artist A and artist B shares 20 followers\n # with artist A, and 50% of artist B's following are followers of artist A, then\n # the score for artist B is 20 * 0.50 = 10.\n #\n # If artists A and C share 1000 followers but C has 100,000 followers total, then\n # that's only 1% of artist C's following. Artist C gets a score of 1,000 * 0.01 = 10\n #\n scoring_query = (\n session.query(\n User.user_id.label(\"related_artist_user_id\"),\n func.round(\n 1.0\n * column(\"mutual_follower_count\")\n * column(\"mutual_follower_count\")\n \/ AggregateUser.follower_count,\n 3,\n ).label(\"score\"),\n )\n .select_from(mutual_followers_subquery)\n .join(AggregateUser, AggregateUser.user_id == column(\"suggested_artist_id\"))\n .join(User, User.user_id == column(\"suggested_artist_id\"))\n .filter(\n User.is_current,\n AggregateUser.track_count > 0,\n # Should never be true, but occasionally this is the mutual artist's\n # first follow and aggregate user hasn't updated yet\n AggregateUser.follower_count > 0,\n )\n .order_by(desc(column(\"score\")), User.user_id)\n .limit(limit)\n )\n rows = scoring_query.all()\n related_artists = [\n RelatedArtist(\n user_id=user_id,\n related_artist_user_id=row.related_artist_user_id,\n score=row.score,\n )\n for row in rows\n ]\n return related_artists","function_tokens":["def","_calculate_related_artists_scores","(","session",":","Session",",","user_id",",","sample_size","=","None",",","limit","=","MAX_RELATED_ARTIST_COUNT",")","->","List","[","RelatedArtist","]",":","# Get all the followers of the artist","followers_subquery","=","aliased","(","Follow",",","session",".","query","(","Follow",".","follower_user_id",")",".","filter","(","Follow",".","followee_user_id","==","user_id",",","Follow",".","is_current",",","Follow",".","is_delete","==","False",",",")",".","subquery","(","name","=","\"followers\"",")",",",")","if","sample_size","is","None",":","followers_sampled","=","aliased","(","Follow",")","else",":","followers_sampled","=","aliased","(","Follow",",","tablesample","(","cast","(","FromClause",",","Follow",")",",","func",".","system_rows","(","sample_size",")",")",")","# Find out who the followers are following","mutual_followers_subquery","=","(","session",".","query","(","followers_sampled",".","followee_user_id",".","label","(","\"suggested_artist_id\"",")",",","func",".","count","(","followers_subquery",".","follower_user_id",")",".","label","(","\"mutual_follower_count\"",")",",",")",".","select_from","(","followers_subquery",")",".","join","(","followers_sampled",",","followers_subquery",".","follower_user_id","==","followers_sampled",".","follower_user_id",",",")",".","filter","(","followers_sampled",".","is_current",",","followers_sampled",".","is_delete","==","False",",","followers_sampled",".","followee_user_id","!=","user_id",",",")",".","group_by","(","followers_sampled",".","followee_user_id",")",".","subquery","(","name","=","\"mutual_followers\"",")",")","# Score the artists gathered from the above queries by:","#","# score = mutual_follower_count * percentage_of_suggested_artist_followers","#","# Examples:","#","# If we're finding related artists to artist A and artist B shares 20 followers","# with artist A, and 50% of artist B's following are followers of artist A, then","# the score for artist B is 20 * 0.50 = 10.","#","# If artists A and C share 1000 followers but C has 100,000 followers total, then","# that's only 1% of artist C's following. Artist C gets a score of 1,000 * 0.01 = 10","#","scoring_query","=","(","session",".","query","(","User",".","user_id",".","label","(","\"related_artist_user_id\"",")",",","func",".","round","(","1.0","*","column","(","\"mutual_follower_count\"",")","*","column","(","\"mutual_follower_count\"",")","\/","AggregateUser",".","follower_count",",","3",",",")",".","label","(","\"score\"",")",",",")",".","select_from","(","mutual_followers_subquery",")",".","join","(","AggregateUser",",","AggregateUser",".","user_id","==","column","(","\"suggested_artist_id\"",")",")",".","join","(","User",",","User",".","user_id","==","column","(","\"suggested_artist_id\"",")",")",".","filter","(","User",".","is_current",",","AggregateUser",".","track_count",">","0",",","# Should never be true, but occasionally this is the mutual artist's","# first follow and aggregate user hasn't updated yet","AggregateUser",".","follower_count",">","0",",",")",".","order_by","(","desc","(","column","(","\"score\"",")",")",",","User",".","user_id",")",".","limit","(","limit",")",")","rows","=","scoring_query",".","all","(",")","related_artists","=","[","RelatedArtist","(","user_id","=","user_id",",","related_artist_user_id","=","row",".","related_artist_user_id",",","score","=","row",".","score",",",")","for","row","in","rows","]","return","related_artists"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_related_artists.py#L27-L122"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_related_artists.py","language":"python","identifier":"update_related_artist_scores_if_needed","parameters":"(\n session: Session, user_id: int\n)","argument_list":"","return_statement":"return False, \"No results\"","docstring":"Checks to make sure the user specified has at least a minimum required number of followers,\n and that they don't already have fresh score calculation,\n and then if necessary calculates new related artist scores.\n\n Args:\n session (Session): the db sesssion to use for the connection\n user_id (int): the user_id of the user of which we're finding related artists\n\n Returns:\n bool: whether an update was needed\n str: the reason why an update was not needed, if applicable","docstring_summary":"Checks to make sure the user specified has at least a minimum required number of followers,\n and that they don't already have fresh score calculation,\n and then if necessary calculates new related artist scores.","docstring_tokens":["Checks","to","make","sure","the","user","specified","has","at","least","a","minimum","required","number","of","followers","and","that","they","don","t","already","have","fresh","score","calculation","and","then","if","necessary","calculates","new","related","artist","scores","."],"function":"def update_related_artist_scores_if_needed(\n session: Session, user_id: int\n) -> Tuple[bool, str]:\n \"\"\"Checks to make sure the user specified has at least a minimum required number of followers,\n and that they don't already have fresh score calculation,\n and then if necessary calculates new related artist scores.\n\n Args:\n session (Session): the db sesssion to use for the connection\n user_id (int): the user_id of the user of which we're finding related artists\n\n Returns:\n bool: whether an update was needed\n str: the reason why an update was not needed, if applicable\n \"\"\"\n\n # Filter by followers first, since that narrows down more users\n aggregate_user = (\n session.query(AggregateUser)\n .filter(AggregateUser.user_id == user_id)\n .one_or_none()\n )\n related_artists = []\n # Don't calculate if there's not enough followers\n if not aggregate_user or aggregate_user.follower_count < MIN_FOLLOWER_REQUIREMENT:\n return False, \"Not enough followers\"\n existing_score = (\n session.query(RelatedArtist).filter(RelatedArtist.user_id == user_id).first()\n )\n # Don't recalculate if we already have recently\n if (\n existing_score\n and existing_score.created_at > datetime.utcnow() - CALCULATION_TTL\n ):\n return (\n False,\n f\"Fresh calculation already exists. created_at={existing_score.created_at}\",\n )\n # Use table sampling if more than a certain number of followers\n if aggregate_user.follower_count >= MAX_FOLLOWERS_WITHOUT_SAMPLE:\n related_artists = _calculate_related_artists_scores(\n session, user_id, SAMPLE_SIZE_ROWS\n )\n else:\n related_artists = _calculate_related_artists_scores(session, user_id)\n if related_artists:\n session.query(RelatedArtist).filter(RelatedArtist.user_id == user_id).delete()\n session.bulk_save_objects(related_artists)\n return True, \"\"\n return False, \"No results\"","function_tokens":["def","update_related_artist_scores_if_needed","(","session",":","Session",",","user_id",":","int",")","->","Tuple","[","bool",",","str","]",":","# Filter by followers first, since that narrows down more users","aggregate_user","=","(","session",".","query","(","AggregateUser",")",".","filter","(","AggregateUser",".","user_id","==","user_id",")",".","one_or_none","(",")",")","related_artists","=","[","]","# Don't calculate if there's not enough followers","if","not","aggregate_user","or","aggregate_user",".","follower_count","<","MIN_FOLLOWER_REQUIREMENT",":","return","False",",","\"Not enough followers\"","existing_score","=","(","session",".","query","(","RelatedArtist",")",".","filter","(","RelatedArtist",".","user_id","==","user_id",")",".","first","(",")",")","# Don't recalculate if we already have recently","if","(","existing_score","and","existing_score",".","created_at",">","datetime",".","utcnow","(",")","-","CALCULATION_TTL",")",":","return","(","False",",","f\"Fresh calculation already exists. created_at={existing_score.created_at}\"",",",")","# Use table sampling if more than a certain number of followers","if","aggregate_user",".","follower_count",">=","MAX_FOLLOWERS_WITHOUT_SAMPLE",":","related_artists","=","_calculate_related_artists_scores","(","session",",","user_id",",","SAMPLE_SIZE_ROWS",")","else",":","related_artists","=","_calculate_related_artists_scores","(","session",",","user_id",")","if","related_artists",":","session",".","query","(","RelatedArtist",")",".","filter","(","RelatedArtist",".","user_id","==","user_id",")",".","delete","(",")","session",".","bulk_save_objects","(","related_artists",")","return","True",",","\"\"","return","False",",","\"No results\""],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_related_artists.py#L126-L175"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/confirm_indexing_transaction_error.py","language":"python","identifier":"confirm_indexing_transaction_error","parameters":"(\n redis, blocknumber, blockhash, transactionhash, message\n)","argument_list":"","return_statement":"","docstring":"Confirms that a transaction is causing an error indexing across the discovery nodes\n Gets all other discovery nodes and makes an api call to check the status of a transaction\n given a blocknumber, blockhash, and transactionhash","docstring_summary":"Confirms that a transaction is causing an error indexing across the discovery nodes\n Gets all other discovery nodes and makes an api call to check the status of a transaction\n given a blocknumber, blockhash, and transactionhash","docstring_tokens":["Confirms","that","a","transaction","is","causing","an","error","indexing","across","the","discovery","nodes","Gets","all","other","discovery","nodes","and","makes","an","api","call","to","check","the","status","of","a","transaction","given","a","blocknumber","blockhash","and","transactionhash"],"function":"def confirm_indexing_transaction_error(\n redis, blocknumber, blockhash, transactionhash, message\n):\n \"\"\"\n Confirms that a transaction is causing an error indexing across the discovery nodes\n Gets all other discovery nodes and makes an api call to check the status of a transaction\n given a blocknumber, blockhash, and transactionhash\n \"\"\"\n all_other_nodes = get_all_other_nodes()[0]\n num_other_nodes = len(all_other_nodes)\n num_transaction_failures = 0\n for node in all_other_nodes:\n try:\n endpoint = f\"{node}\/indexing\/transaction_status?blocknumber={blocknumber}&blockhash={blockhash}&transactionhash={transactionhash}\"\n response = requests.get(endpoint, timeout=10)\n if response.status_code != 200:\n raise Exception(\n f\"Query to indexing transaction status endpoint {endpoint} \\\n failed with status code {response.status_code}\"\n )\n if response.json()[\"data\"] == \"FAILED\":\n num_transaction_failures += 1\n except Exception as e:\n logger.error(e)\n\n # Mark the redis indexing error w\/ has_consensus = true so that it skips this transaction\n if (\n num_other_nodes >= 1\n and num_transaction_failures\n >= num_other_nodes * INDEXING_FAILURE_CONSENSUS_PERCENT\n ):\n set_indexing_error(\n redis, blocknumber, blockhash, transactionhash, message, True\n )","function_tokens":["def","confirm_indexing_transaction_error","(","redis",",","blocknumber",",","blockhash",",","transactionhash",",","message",")",":","all_other_nodes","=","get_all_other_nodes","(",")","[","0","]","num_other_nodes","=","len","(","all_other_nodes",")","num_transaction_failures","=","0","for","node","in","all_other_nodes",":","try",":","endpoint","=","f\"{node}\/indexing\/transaction_status?blocknumber={blocknumber}&blockhash={blockhash}&transactionhash={transactionhash}\"","response","=","requests",".","get","(","endpoint",",","timeout","=","10",")","if","response",".","status_code","!=","200",":","raise","Exception","(","f\"Query to indexing transaction status endpoint {endpoint} \\\n failed with status code {response.status_code}\"",")","if","response",".","json","(",")","[","\"data\"","]","==","\"FAILED\"",":","num_transaction_failures","+=","1","except","Exception","as","e",":","logger",".","error","(","e",")","# Mark the redis indexing error w\/ has_consensus = true so that it skips this transaction","if","(","num_other_nodes",">=","1","and","num_transaction_failures",">=","num_other_nodes","*","INDEXING_FAILURE_CONSENSUS_PERCENT",")",":","set_indexing_error","(","redis",",","blocknumber",",","blockhash",",","transactionhash",",","message",",","True",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/confirm_indexing_transaction_error.py#L13-L46"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_top_users.py","language":"python","identifier":"get_top_users","parameters":"(current_user_id)","argument_list":"","return_statement":"return top_users","docstring":"Gets the top users by follows of all of Audius","docstring_summary":"Gets the top users by follows of all of Audius","docstring_tokens":["Gets","the","top","users","by","follows","of","all","of","Audius"],"function":"def get_top_users(current_user_id):\n \"\"\"Gets the top users by follows of all of Audius\"\"\"\n top_users = []\n db = get_db_read_replica()\n with db.scoped_session() as session:\n top_users = (\n session.query(User)\n .select_from(AggregateUser)\n .join(User, User.user_id == AggregateUser.user_id)\n .filter(AggregateUser.track_count > 0, User.is_current)\n .order_by(desc(AggregateUser.follower_count), User.user_id)\n )\n top_users = paginate_query(top_users).all()\n top_users = helpers.query_result_to_list(top_users)\n user_ids = list(map(lambda user: user[\"user_id\"], top_users))\n top_users = populate_user_metadata(\n session, user_ids, top_users, current_user_id\n )\n return top_users","function_tokens":["def","get_top_users","(","current_user_id",")",":","top_users","=","[","]","db","=","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","top_users","=","(","session",".","query","(","User",")",".","select_from","(","AggregateUser",")",".","join","(","User",",","User",".","user_id","==","AggregateUser",".","user_id",")",".","filter","(","AggregateUser",".","track_count",">","0",",","User",".","is_current",")",".","order_by","(","desc","(","AggregateUser",".","follower_count",")",",","User",".","user_id",")",")","top_users","=","paginate_query","(","top_users",")",".","all","(",")","top_users","=","helpers",".","query_result_to_list","(","top_users",")","user_ids","=","list","(","map","(","lambda","user",":","user","[","\"user_id\"","]",",","top_users",")",")","top_users","=","populate_user_metadata","(","session",",","user_ids",",","top_users",",","current_user_id",")","return","top_users"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_top_users.py#L7-L25"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_unpopulated_users.py","language":"python","identifier":"get_unpopulated_users","parameters":"(session, user_ids)","argument_list":"","return_statement":"return users_response","docstring":"Fetches users by checking the redis cache first then\n going to DB and writes to cache if not present\n\n Args:\n session: DB session\n user_ids: array A list of user ids\n\n Returns:\n Array of users","docstring_summary":"Fetches users by checking the redis cache first then\n going to DB and writes to cache if not present","docstring_tokens":["Fetches","users","by","checking","the","redis","cache","first","then","going","to","DB","and","writes","to","cache","if","not","present"],"function":"def get_unpopulated_users(session, user_ids):\n \"\"\"\n Fetches users by checking the redis cache first then\n going to DB and writes to cache if not present\n\n Args:\n session: DB session\n user_ids: array A list of user ids\n\n Returns:\n Array of users\n \"\"\"\n cached_users_results = get_cached_users(user_ids)\n has_all_users_cached = cached_users_results.count(None) == 0\n if has_all_users_cached:\n return cached_users_results\n\n cached_users = {}\n for cached_user in cached_users_results:\n if cached_user:\n cached_users[cached_user[\"user_id\"]] = cached_user\n\n user_ids_to_fetch = filter(lambda user_id: user_id not in cached_users, user_ids)\n\n users = (\n session.query(User)\n .filter(User.is_current == True, User.wallet != None, User.handle != None)\n .filter(User.user_id.in_(user_ids_to_fetch))\n .all()\n )\n users = helpers.query_result_to_list(users)\n queried_users = {user[\"user_id\"]: user for user in users}\n\n set_users_in_cache(users)\n\n users_response = []\n for user_id in user_ids:\n if user_id in cached_users:\n users_response.append(cached_users[user_id])\n elif user_id in queried_users:\n users_response.append(queried_users[user_id])\n\n return users_response","function_tokens":["def","get_unpopulated_users","(","session",",","user_ids",")",":","cached_users_results","=","get_cached_users","(","user_ids",")","has_all_users_cached","=","cached_users_results",".","count","(","None",")","==","0","if","has_all_users_cached",":","return","cached_users_results","cached_users","=","{","}","for","cached_user","in","cached_users_results",":","if","cached_user",":","cached_users","[","cached_user","[","\"user_id\"","]","]","=","cached_user","user_ids_to_fetch","=","filter","(","lambda","user_id",":","user_id","not","in","cached_users",",","user_ids",")","users","=","(","session",".","query","(","User",")",".","filter","(","User",".","is_current","==","True",",","User",".","wallet","!=","None",",","User",".","handle","!=","None",")",".","filter","(","User",".","user_id",".","in_","(","user_ids_to_fetch",")",")",".","all","(",")",")","users","=","helpers",".","query_result_to_list","(","users",")","queried_users","=","{","user","[","\"user_id\"","]",":","user","for","user","in","users","}","set_users_in_cache","(","users",")","users_response","=","[","]","for","user_id","in","user_ids",":","if","user_id","in","cached_users",":","users_response",".","append","(","cached_users","[","user_id","]",")","elif","user_id","in","queried_users",":","users_response",".","append","(","queried_users","[","user_id","]",")","return","users_response"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_unpopulated_users.py#L41-L83"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_app_names.py","language":"python","identifier":"get_app_names","parameters":"(args)","argument_list":"","return_statement":"","docstring":"Returns a list of app names\n\n Args:\n args: dict The parsed args from the request\n args.offset: number The offset to start querying from\n args.limit: number The max number of queries to return\n args.start_time: date The start of the query\n args.include_unknown: bool Whether or not to include a line item for unknown\n\n Returns:\n Array of dictionaries with name, count, and unique_count fields","docstring_summary":"Returns a list of app names","docstring_tokens":["Returns","a","list","of","app","names"],"function":"def get_app_names(args):\n \"\"\"\n Returns a list of app names\n\n Args:\n args: dict The parsed args from the request\n args.offset: number The offset to start querying from\n args.limit: number The max number of queries to return\n args.start_time: date The start of the query\n args.include_unknown: bool Whether or not to include a line item for unknown\n\n Returns:\n Array of dictionaries with name, count, and unique_count fields\n \"\"\"\n db = db_session.get_db_read_replica()\n with db.scoped_session() as session:\n app_names = (\n session.query(\n AppNameMetrics.application_name,\n func.sum(AppNameMetrics.count).label(\"count\"),\n func.count(AppNameMetrics.ip.distinct()),\n )\n .filter(AppNameMetrics.timestamp > args.get(\"start_time\"))\n .group_by(AppNameMetrics.application_name)\n .order_by(desc(\"count\"), asc(AppNameMetrics.application_name))\n .limit(args.get(\"limit\"))\n .offset(args.get(\"offset\"))\n .all()\n )\n\n names = [\n {\"name\": app_name[0], \"count\": app_name[1], \"unique_count\": app_name[2]}\n for app_name in app_names\n ]\n\n if args.get(\"include_unknown\", False):\n existing_count = reduce(lambda x, y: x + y[\"count\"], names, 0)\n existing_unique_count = reduce(lambda x, y: x + y[\"unique_count\"], names, 0)\n total_requests = (\n session.query(\n func.sum(RouteMetrics.count).label(\"count\"),\n func.count(RouteMetrics.ip.distinct()),\n )\n .filter(RouteMetrics.timestamp > args.get(\"start_time\"))\n .first()\n )\n unknown_count = total_requests[0] - existing_count\n unique_count = total_requests[1] - existing_unique_count\n # Insert unique counts \"in order\" (desc by count)\n for i, name in enumerate(names[:]):\n if unknown_count > name[\"count\"] or i == len(names):\n names.insert(\n i,\n {\n \"name\": \"unknown\",\n \"count\": unknown_count,\n \"unique_count\": unique_count,\n },\n )\n break\n\n return names","function_tokens":["def","get_app_names","(","args",")",":","db","=","db_session",".","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","app_names","=","(","session",".","query","(","AppNameMetrics",".","application_name",",","func",".","sum","(","AppNameMetrics",".","count",")",".","label","(","\"count\"",")",",","func",".","count","(","AppNameMetrics",".","ip",".","distinct","(",")",")",",",")",".","filter","(","AppNameMetrics",".","timestamp",">","args",".","get","(","\"start_time\"",")",")",".","group_by","(","AppNameMetrics",".","application_name",")",".","order_by","(","desc","(","\"count\"",")",",","asc","(","AppNameMetrics",".","application_name",")",")",".","limit","(","args",".","get","(","\"limit\"",")",")",".","offset","(","args",".","get","(","\"offset\"",")",")",".","all","(",")",")","names","=","[","{","\"name\"",":","app_name","[","0","]",",","\"count\"",":","app_name","[","1","]",",","\"unique_count\"",":","app_name","[","2","]","}","for","app_name","in","app_names","]","if","args",".","get","(","\"include_unknown\"",",","False",")",":","existing_count","=","reduce","(","lambda","x",",","y",":","x","+","y","[","\"count\"","]",",","names",",","0",")","existing_unique_count","=","reduce","(","lambda","x",",","y",":","x","+","y","[","\"unique_count\"","]",",","names",",","0",")","total_requests","=","(","session",".","query","(","func",".","sum","(","RouteMetrics",".","count",")",".","label","(","\"count\"",")",",","func",".","count","(","RouteMetrics",".","ip",".","distinct","(",")",")",",",")",".","filter","(","RouteMetrics",".","timestamp",">","args",".","get","(","\"start_time\"",")",")",".","first","(",")",")","unknown_count","=","total_requests","[","0","]","-","existing_count","unique_count","=","total_requests","[","1","]","-","existing_unique_count","# Insert unique counts \"in order\" (desc by count)","for","i",",","name","in","enumerate","(","names","[",":","]",")",":","if","unknown_count",">","name","[","\"count\"","]","or","i","==","len","(","names",")",":","names",".","insert","(","i",",","{","\"name\"",":","\"unknown\"",",","\"count\"",":","unknown_count",",","\"unique_count\"",":","unique_count",",","}",",",")","break","return","names"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_app_names.py#L11-L72"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_remixable_tracks.py","language":"python","identifier":"get_remixable_tracks","parameters":"(args)","argument_list":"","return_statement":"return tracks","docstring":"Gets a list of remixable tracks","docstring_summary":"Gets a list of remixable tracks","docstring_tokens":["Gets","a","list","of","remixable","tracks"],"function":"def get_remixable_tracks(args):\n \"\"\"Gets a list of remixable tracks\"\"\"\n db = get_db_read_replica()\n limit = args.get(\"limit\", 25)\n current_user_id = args.get(\"current_user_id\", None)\n\n StemTrack = aliased(Track)\n\n with db.scoped_session() as session:\n # Subquery to get current tracks that have stems\n remixable_tracks_subquery = (\n session.query(Track)\n .join(Stem, Stem.parent_track_id == Track.track_id)\n .join(StemTrack, Stem.child_track_id == StemTrack.track_id)\n .filter(\n Track.is_current == True,\n Track.is_unlisted == False,\n Track.is_delete == False,\n StemTrack.is_current == True,\n StemTrack.is_unlisted == False,\n StemTrack.is_delete == False,\n )\n .distinct(Track.track_id)\n .subquery()\n )\n track_alias = aliased(Track, remixable_tracks_subquery)\n\n count_subquery = session.query(\n AggregateTrack.track_id.label(\"id\"),\n (AggregateTrack.repost_count + AggregateTrack.save_count).label(\"count\"),\n ).subquery()\n\n query = (\n session.query(\n track_alias,\n count_subquery.c[\"count\"],\n decayed_score(count_subquery.c[\"count\"], track_alias.created_at).label(\n \"score\"\n ),\n )\n .join(\n count_subquery,\n count_subquery.c[\"id\"] == track_alias.track_id,\n )\n .order_by(desc(\"score\"), desc(track_alias.track_id))\n .limit(limit)\n )\n\n results = query.all()\n\n tracks = []\n for result in results:\n track = result[0]\n score = result[-1]\n track = helpers.model_to_dictionary(track)\n track[\"score\"] = score\n tracks.append(track)\n\n track_ids = list(map(lambda track: track[\"track_id\"], tracks))\n\n # Get user specific data for tracks\n tracks = populate_track_metadata(session, track_ids, tracks, current_user_id)\n\n if args.get(\"with_users\", False):\n add_users_to_tracks(session, tracks, current_user_id)\n else:\n # Remove the user from the tracks\n tracks = [\n {key: val for key, val in dict.items() if key != \"user\"}\n for dict in tracks\n ]\n\n return tracks","function_tokens":["def","get_remixable_tracks","(","args",")",":","db","=","get_db_read_replica","(",")","limit","=","args",".","get","(","\"limit\"",",","25",")","current_user_id","=","args",".","get","(","\"current_user_id\"",",","None",")","StemTrack","=","aliased","(","Track",")","with","db",".","scoped_session","(",")","as","session",":","# Subquery to get current tracks that have stems","remixable_tracks_subquery","=","(","session",".","query","(","Track",")",".","join","(","Stem",",","Stem",".","parent_track_id","==","Track",".","track_id",")",".","join","(","StemTrack",",","Stem",".","child_track_id","==","StemTrack",".","track_id",")",".","filter","(","Track",".","is_current","==","True",",","Track",".","is_unlisted","==","False",",","Track",".","is_delete","==","False",",","StemTrack",".","is_current","==","True",",","StemTrack",".","is_unlisted","==","False",",","StemTrack",".","is_delete","==","False",",",")",".","distinct","(","Track",".","track_id",")",".","subquery","(",")",")","track_alias","=","aliased","(","Track",",","remixable_tracks_subquery",")","count_subquery","=","session",".","query","(","AggregateTrack",".","track_id",".","label","(","\"id\"",")",",","(","AggregateTrack",".","repost_count","+","AggregateTrack",".","save_count",")",".","label","(","\"count\"",")",",",")",".","subquery","(",")","query","=","(","session",".","query","(","track_alias",",","count_subquery",".","c","[","\"count\"","]",",","decayed_score","(","count_subquery",".","c","[","\"count\"","]",",","track_alias",".","created_at",")",".","label","(","\"score\"",")",",",")",".","join","(","count_subquery",",","count_subquery",".","c","[","\"id\"","]","==","track_alias",".","track_id",",",")",".","order_by","(","desc","(","\"score\"",")",",","desc","(","track_alias",".","track_id",")",")",".","limit","(","limit",")",")","results","=","query",".","all","(",")","tracks","=","[","]","for","result","in","results",":","track","=","result","[","0","]","score","=","result","[","-","1","]","track","=","helpers",".","model_to_dictionary","(","track",")","track","[","\"score\"","]","=","score","tracks",".","append","(","track",")","track_ids","=","list","(","map","(","lambda","track",":","track","[","\"track_id\"","]",",","tracks",")",")","# Get user specific data for tracks","tracks","=","populate_track_metadata","(","session",",","track_ids",",","tracks",",","current_user_id",")","if","args",".","get","(","\"with_users\"",",","False",")",":","add_users_to_tracks","(","session",",","tracks",",","current_user_id",")","else",":","# Remove the user from the tracks","tracks","=","[","{","key",":","val","for","key",",","val","in","dict",".","items","(",")","if","key","!=","\"user\"","}","for","dict","in","tracks","]","return","tracks"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_remixable_tracks.py#L13-L85"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_skipped_transactions.py","language":"python","identifier":"get_transaction_status","parameters":"(blocknumber, blockhash, txhash)","argument_list":"","return_statement":"","docstring":"Gets the indexing transaction status: 'PASSED', 'FAILED', or 'NOT_FOUND'\n given a blocknumber, blockhash, and transaction\n first checks whether there is an indexing error in reduis\n and whether the entry matches the given params\n otherwise checks the skipped_transactions in the database","docstring_summary":"Gets the indexing transaction status: 'PASSED', 'FAILED', or 'NOT_FOUND'\n given a blocknumber, blockhash, and transaction\n first checks whether there is an indexing error in reduis\n and whether the entry matches the given params\n otherwise checks the skipped_transactions in the database","docstring_tokens":["Gets","the","indexing","transaction","status",":","PASSED","FAILED","or","NOT_FOUND","given","a","blocknumber","blockhash","and","transaction","first","checks","whether","there","is","an","indexing","error","in","reduis","and","whether","the","entry","matches","the","given","params","otherwise","checks","the","skipped_transactions","in","the","database"],"function":"def get_transaction_status(blocknumber, blockhash, txhash):\n \"\"\"Gets the indexing transaction status: 'PASSED', 'FAILED', or 'NOT_FOUND'\n given a blocknumber, blockhash, and transaction\n first checks whether there is an indexing error in reduis\n and whether the entry matches the given params\n otherwise checks the skipped_transactions in the database\n \"\"\"\n indexing_error = get_indexing_error(REDIS)\n\n if indexing_error:\n blocknumber_match = (\n \"blocknumber\" in indexing_error\n and indexing_error[\"blocknumber\"] == blocknumber\n )\n blockhash_match = (\n \"blockhash\" in indexing_error and indexing_error[\"blockhash\"] == blockhash\n )\n txhash_match = \"txhash\" in indexing_error and indexing_error[\"txhash\"] == txhash\n if blocknumber_match and blockhash_match and txhash_match:\n return \"FAILED\"\n\n db = db_session.get_db_read_replica()\n with db.scoped_session() as session:\n skipped_transactions_results = (\n session.query(SkippedTransaction)\n .filter(\n SkippedTransaction.blocknumber == blocknumber,\n SkippedTransaction.blockhash == blockhash,\n SkippedTransaction.txhash == txhash,\n )\n .all()\n )\n if len(skipped_transactions_results) > 1:\n raise Exception(\n f\"Expected no more than 1 row for skipped indexing transaction with \\\n blocknumber={blocknumber}, blockhash={blockhash}, txhash={txhash}\"\n )\n if len(skipped_transactions_results) == 1:\n return \"FAILED\"\n\n block_transaction_results = (\n session.query(Block)\n .filter(Block.number == blocknumber, Block.blockhash == blockhash)\n .all()\n )\n if len(block_transaction_results) > 1:\n raise Exception(\n f\"Expected no more than 1 row for blocknumber={blocknumber}, blockhash={blockhash}\"\n )\n if len(block_transaction_results) == 1:\n return \"PASSED\"\n\n return \"NOT_FOUND\"","function_tokens":["def","get_transaction_status","(","blocknumber",",","blockhash",",","txhash",")",":","indexing_error","=","get_indexing_error","(","REDIS",")","if","indexing_error",":","blocknumber_match","=","(","\"blocknumber\"","in","indexing_error","and","indexing_error","[","\"blocknumber\"","]","==","blocknumber",")","blockhash_match","=","(","\"blockhash\"","in","indexing_error","and","indexing_error","[","\"blockhash\"","]","==","blockhash",")","txhash_match","=","\"txhash\"","in","indexing_error","and","indexing_error","[","\"txhash\"","]","==","txhash","if","blocknumber_match","and","blockhash_match","and","txhash_match",":","return","\"FAILED\"","db","=","db_session",".","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","skipped_transactions_results","=","(","session",".","query","(","SkippedTransaction",")",".","filter","(","SkippedTransaction",".","blocknumber","==","blocknumber",",","SkippedTransaction",".","blockhash","==","blockhash",",","SkippedTransaction",".","txhash","==","txhash",",",")",".","all","(",")",")","if","len","(","skipped_transactions_results",")",">","1",":","raise","Exception","(","f\"Expected no more than 1 row for skipped indexing transaction with \\\n blocknumber={blocknumber}, blockhash={blockhash}, txhash={txhash}\"",")","if","len","(","skipped_transactions_results",")","==","1",":","return","\"FAILED\"","block_transaction_results","=","(","session",".","query","(","Block",")",".","filter","(","Block",".","number","==","blocknumber",",","Block",".","blockhash","==","blockhash",")",".","all","(",")",")","if","len","(","block_transaction_results",")",">","1",":","raise","Exception","(","f\"Expected no more than 1 row for blocknumber={blocknumber}, blockhash={blockhash}\"",")","if","len","(","block_transaction_results",")","==","1",":","return","\"PASSED\"","return","\"NOT_FOUND\""],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_skipped_transactions.py#L41-L93"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/search_track_tags.py","language":"python","identifier":"search_track_tags","parameters":"(session, args)","argument_list":"","return_statement":"return play_count_sorted_tracks","docstring":"Gets the tracks with a given tag\n\n Args:\n session: sqlalchemy db session instance\n args: dict of arguments\n args.search_str: string the tag search string\n args.current_user_id: id | null The user id making the query\n args.limit: number the query limit of number of returns tracks\n args.offset: number the query offset for results\n\n Returns:\n list of tracks sorted by play count","docstring_summary":"Gets the tracks with a given tag","docstring_tokens":["Gets","the","tracks","with","a","given","tag"],"function":"def search_track_tags(session, args):\n \"\"\"\n Gets the tracks with a given tag\n\n Args:\n session: sqlalchemy db session instance\n args: dict of arguments\n args.search_str: string the tag search string\n args.current_user_id: id | null The user id making the query\n args.limit: number the query limit of number of returns tracks\n args.offset: number the query offset for results\n\n Returns:\n list of tracks sorted by play count\n \"\"\"\n\n track_ids = (\n session.query(TagTrackUserMatview.track_id)\n .filter(TagTrackUserMatview.tag == args[\"search_str\"].lower())\n .all()\n )\n\n # track_ids is list of tuples - simplify to 1-D list\n track_ids = [i[0] for i in track_ids]\n\n tracks = (\n session.query(Track)\n .filter(\n Track.is_current == True,\n Track.is_delete == False,\n Track.is_unlisted == False,\n Track.stem_of == None,\n Track.track_id.in_(track_ids),\n )\n .all()\n )\n\n tracks = helpers.query_result_to_list(tracks)\n track_play_counts = get_track_play_counts(session, track_ids)\n\n tracks = populate_track_metadata(\n session, track_ids, tracks, args[\"current_user_id\"]\n )\n\n for track in tracks:\n track_id = track[\"track_id\"]\n track[response_name_constants.play_count] = track_play_counts.get(track_id, 0)\n\n play_count_sorted_tracks = sorted(\n tracks, key=lambda i: i[response_name_constants.play_count], reverse=True\n )\n\n # Add pagination parameters to track and user results\n play_count_sorted_tracks = play_count_sorted_tracks[\n slice(args[\"offset\"], args[\"offset\"] + args[\"limit\"], 1)\n ]\n\n return play_count_sorted_tracks","function_tokens":["def","search_track_tags","(","session",",","args",")",":","track_ids","=","(","session",".","query","(","TagTrackUserMatview",".","track_id",")",".","filter","(","TagTrackUserMatview",".","tag","==","args","[","\"search_str\"","]",".","lower","(",")",")",".","all","(",")",")","# track_ids is list of tuples - simplify to 1-D list","track_ids","=","[","i","[","0","]","for","i","in","track_ids","]","tracks","=","(","session",".","query","(","Track",")",".","filter","(","Track",".","is_current","==","True",",","Track",".","is_delete","==","False",",","Track",".","is_unlisted","==","False",",","Track",".","stem_of","==","None",",","Track",".","track_id",".","in_","(","track_ids",")",",",")",".","all","(",")",")","tracks","=","helpers",".","query_result_to_list","(","tracks",")","track_play_counts","=","get_track_play_counts","(","session",",","track_ids",")","tracks","=","populate_track_metadata","(","session",",","track_ids",",","tracks",",","args","[","\"current_user_id\"","]",")","for","track","in","tracks",":","track_id","=","track","[","\"track_id\"","]","track","[","response_name_constants",".","play_count","]","=","track_play_counts",".","get","(","track_id",",","0",")","play_count_sorted_tracks","=","sorted","(","tracks",",","key","=","lambda","i",":","i","[","response_name_constants",".","play_count","]",",","reverse","=","True",")","# Add pagination parameters to track and user results","play_count_sorted_tracks","=","play_count_sorted_tracks","[","slice","(","args","[","\"offset\"","]",",","args","[","\"offset\"","]","+","args","[","\"limit\"","]",",","1",")","]","return","play_count_sorted_tracks"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/search_track_tags.py#L11-L68"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_trending_tracks.py","language":"python","identifier":"make_trending_cache_key","parameters":"(\n time_range, genre, version=DEFAULT_TRENDING_VERSIONS[TrendingType.TRACKS]\n)","argument_list":"","return_statement":"return f\"generated-trending{version_name}:{time_range}:{(genre.lower() if genre else '')}\"","docstring":"Makes a cache key resembling `generated-trending:week:electronic`","docstring_summary":"Makes a cache key resembling `generated-trending:week:electronic`","docstring_tokens":["Makes","a","cache","key","resembling","generated","-","trending",":","week",":","electronic"],"function":"def make_trending_cache_key(\n time_range, genre, version=DEFAULT_TRENDING_VERSIONS[TrendingType.TRACKS]\n):\n \"\"\"Makes a cache key resembling `generated-trending:week:electronic`\"\"\"\n version_name = (\n f\":{version.name}\"\n if version != DEFAULT_TRENDING_VERSIONS[TrendingType.TRACKS]\n else \"\"\n )\n return f\"generated-trending{version_name}:{time_range}:{(genre.lower() if genre else '')}\"","function_tokens":["def","make_trending_cache_key","(","time_range",",","genre",",","version","=","DEFAULT_TRENDING_VERSIONS","[","TrendingType",".","TRACKS","]",")",":","version_name","=","(","f\":{version.name}\"","if","version","!=","DEFAULT_TRENDING_VERSIONS","[","TrendingType",".","TRACKS","]","else","\"\"",")","return","f\"generated-trending{version_name}:{time_range}:{(genre.lower() if genre else '')}\""],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_trending_tracks.py#L26-L35"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_trending_tracks.py","language":"python","identifier":"make_generate_unpopulated_trending","parameters":"(session, genre, time_range, strategy)","argument_list":"","return_statement":"return wrapped","docstring":"Wraps a call to `generate_unpopulated_trending` for use in `use_redis_cache`, which\n expects to be passed a function with no arguments.","docstring_summary":"Wraps a call to `generate_unpopulated_trending` for use in `use_redis_cache`, which\n expects to be passed a function with no arguments.","docstring_tokens":["Wraps","a","call","to","generate_unpopulated_trending","for","use","in","use_redis_cache","which","expects","to","be","passed","a","function","with","no","arguments","."],"function":"def make_generate_unpopulated_trending(session, genre, time_range, strategy):\n \"\"\"Wraps a call to `generate_unpopulated_trending` for use in `use_redis_cache`, which\n expects to be passed a function with no arguments.\"\"\"\n\n def wrapped():\n if strategy.use_mat_view:\n return generate_unpopulated_trending_from_mat_views(\n session, genre, time_range, strategy\n )\n return generate_unpopulated_trending(session, genre, time_range, strategy)\n\n return wrapped","function_tokens":["def","make_generate_unpopulated_trending","(","session",",","genre",",","time_range",",","strategy",")",":","def","wrapped","(",")",":","if","strategy",".","use_mat_view",":","return","generate_unpopulated_trending_from_mat_views","(","session",",","genre",",","time_range",",","strategy",")","return","generate_unpopulated_trending","(","session",",","genre",",","time_range",",","strategy",")","return","wrapped"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_trending_tracks.py#L93-L104"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_trending_tracks.py","language":"python","identifier":"get_trending_tracks","parameters":"(args: GetTrendingTracksArgs, strategy: BaseTrendingStrategy)","argument_list":"","return_statement":"","docstring":"Gets trending by getting the currently cached tracks and then populating them.","docstring_summary":"Gets trending by getting the currently cached tracks and then populating them.","docstring_tokens":["Gets","trending","by","getting","the","currently","cached","tracks","and","then","populating","them","."],"function":"def get_trending_tracks(args: GetTrendingTracksArgs, strategy: BaseTrendingStrategy):\n \"\"\"Gets trending by getting the currently cached tracks and then populating them.\"\"\"\n db = get_db_read_replica()\n with db.scoped_session() as session:\n return _get_trending_tracks_with_session(session, args, strategy)","function_tokens":["def","get_trending_tracks","(","args",":","GetTrendingTracksArgs",",","strategy",":","BaseTrendingStrategy",")",":","db","=","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","return","_get_trending_tracks_with_session","(","session",",","args",",","strategy",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_trending_tracks.py#L113-L117"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_trending.py","language":"python","identifier":"get_trending","parameters":"(args, strategy)","argument_list":"","return_statement":"return list(map(extend_track, tracks))","docstring":"Get Trending, shared between full and regular endpoints.","docstring_summary":"Get Trending, shared between full and regular endpoints.","docstring_tokens":["Get","Trending","shared","between","full","and","regular","endpoints","."],"function":"def get_trending(args, strategy):\n \"\"\"Get Trending, shared between full and regular endpoints.\"\"\"\n # construct args\n time = args.get(\"time\") if args.get(\"time\") is not None else \"week\"\n current_user_id = args.get(\"user_id\")\n args = {\n \"time\": time,\n \"genre\": args.get(\"genre\", None),\n \"with_users\": True,\n \"limit\": TRENDING_LIMIT,\n \"offset\": 0,\n }\n\n # decode and add user_id if necessary\n if current_user_id:\n decoded_id = decode_string_id(current_user_id)\n args[\"current_user_id\"] = decoded_id\n\n tracks = get_trending_tracks(args, strategy)\n return list(map(extend_track, tracks))","function_tokens":["def","get_trending","(","args",",","strategy",")",":","# construct args","time","=","args",".","get","(","\"time\"",")","if","args",".","get","(","\"time\"",")","is","not","None","else","\"week\"","current_user_id","=","args",".","get","(","\"user_id\"",")","args","=","{","\"time\"",":","time",",","\"genre\"",":","args",".","get","(","\"genre\"",",","None",")",",","\"with_users\"",":","True",",","\"limit\"",":","TRENDING_LIMIT",",","\"offset\"",":","0",",","}","# decode and add user_id if necessary","if","current_user_id",":","decoded_id","=","decode_string_id","(","current_user_id",")","args","[","\"current_user_id\"","]","=","decoded_id","tracks","=","get_trending_tracks","(","args",",","strategy",")","return","list","(","map","(","extend_track",",","tracks",")",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_trending.py#L15-L34"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_associated_user_wallet.py","language":"python","identifier":"get_associated_user_wallet","parameters":"(args: AssociatedUserWalletArgs)","argument_list":"","return_statement":"","docstring":"Returns a list of associated wallets\n\n Args:\n args: dict The parsed args from the request\n args.user_id: number The blockchain user id\n\n Returns:\n Array of strings representing the user's associated wallets","docstring_summary":"Returns a list of associated wallets","docstring_tokens":["Returns","a","list","of","associated","wallets"],"function":"def get_associated_user_wallet(args: AssociatedUserWalletArgs) -> AssociatedUserWallet:\n \"\"\"\n Returns a list of associated wallets\n\n Args:\n args: dict The parsed args from the request\n args.user_id: number The blockchain user id\n\n Returns:\n Array of strings representing the user's associated wallets\n \"\"\"\n db = db_session.get_db_read_replica()\n with db.scoped_session() as session:\n user_wallet: List[Tuple[str, str]] = (\n session.query(AssociatedWallet.wallet, AssociatedWallet.chain)\n .filter(AssociatedWallet.is_current == True)\n .filter(AssociatedWallet.is_delete == False)\n .filter(AssociatedWallet.user_id == args.get(\"user_id\"))\n .all()\n )\n eth_wallets: List[str] = []\n sol_wallets: List[str] = []\n for wallet, chain in user_wallet:\n if chain == \"eth\":\n eth_wallets.append(wallet)\n elif chain == \"sol\":\n sol_wallets.append(wallet)\n return {\"eth\": eth_wallets, \"sol\": sol_wallets}","function_tokens":["def","get_associated_user_wallet","(","args",":","AssociatedUserWalletArgs",")","->","AssociatedUserWallet",":","db","=","db_session",".","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","user_wallet",":","List","[","Tuple","[","str",",","str","]","]","=","(","session",".","query","(","AssociatedWallet",".","wallet",",","AssociatedWallet",".","chain",")",".","filter","(","AssociatedWallet",".","is_current","==","True",")",".","filter","(","AssociatedWallet",".","is_delete","==","False",")",".","filter","(","AssociatedWallet",".","user_id","==","args",".","get","(","\"user_id\"",")",")",".","all","(",")",")","eth_wallets",":","List","[","str","]","=","[","]","sol_wallets",":","List","[","str","]","=","[","]","for","wallet",",","chain","in","user_wallet",":","if","chain","==","\"eth\"",":","eth_wallets",".","append","(","wallet",")","elif","chain","==","\"sol\"",":","sol_wallets",".","append","(","wallet",")","return","{","\"eth\"",":","eth_wallets",",","\"sol\"",":","sol_wallets","}"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_associated_user_wallet.py#L19-L46"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/search_queries.py","language":"python","identifier":"compare_users","parameters":"(user1, user2)","argument_list":"","return_statement":"return 0","docstring":"Comparison util for ordering user search results.","docstring_summary":"Comparison util for ordering user search results.","docstring_tokens":["Comparison","util","for","ordering","user","search","results","."],"function":"def compare_users(user1, user2):\n \"\"\"Comparison util for ordering user search results.\"\"\"\n # Any verified user is ranked higher\n if user1[\"is_verified\"] and not user2[\"is_verified\"]:\n return -1\n if user2[\"is_verified\"] and not user1[\"is_verified\"]:\n return 1\n return 0","function_tokens":["def","compare_users","(","user1",",","user2",")",":","# Any verified user is ranked higher","if","user1","[","\"is_verified\"","]","and","not","user2","[","\"is_verified\"","]",":","return","-","1","if","user2","[","\"is_verified\"","]","and","not","user1","[","\"is_verified\"","]",":","return","1","return","0"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/search_queries.py#L58-L65"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/search_queries.py","language":"python","identifier":"perform_search_query","parameters":"(db, search_type, args)","argument_list":"","return_statement":"","docstring":"Performs a search query of a given `search_type`. Handles it's own session. Used concurrently.","docstring_summary":"Performs a search query of a given `search_type`. Handles it's own session. Used concurrently.","docstring_tokens":["Performs","a","search","query","of","a","given","search_type",".","Handles","it","s","own","session",".","Used","concurrently","."],"function":"def perform_search_query(db, search_type, args):\n \"\"\"Performs a search query of a given `search_type`. Handles it's own session. Used concurrently.\"\"\"\n with db.scoped_session() as session:\n search_str = args.get(\"search_str\")\n limit = args.get(\"limit\")\n offset = args.get(\"offset\")\n is_auto_complete = args.get(\"is_auto_complete\")\n current_user_id = args.get(\"current_user_id\")\n only_downloadable = args.get(\"only_downloadable\")\n\n results = None\n if search_type == \"tracks\":\n results = track_search_query(\n session,\n search_str,\n limit,\n offset,\n is_auto_complete,\n current_user_id,\n only_downloadable,\n )\n elif search_type == \"users\":\n results = user_search_query(\n session,\n search_str,\n limit,\n offset,\n is_auto_complete,\n current_user_id,\n )\n elif search_type == \"playlists\":\n results = playlist_search_query(\n session,\n search_str,\n limit,\n offset,\n False,\n is_auto_complete,\n current_user_id,\n )\n elif search_type == \"albums\":\n results = playlist_search_query(\n session,\n search_str,\n limit,\n offset,\n True,\n is_auto_complete,\n current_user_id,\n )\n return results","function_tokens":["def","perform_search_query","(","db",",","search_type",",","args",")",":","with","db",".","scoped_session","(",")","as","session",":","search_str","=","args",".","get","(","\"search_str\"",")","limit","=","args",".","get","(","\"limit\"",")","offset","=","args",".","get","(","\"offset\"",")","is_auto_complete","=","args",".","get","(","\"is_auto_complete\"",")","current_user_id","=","args",".","get","(","\"current_user_id\"",")","only_downloadable","=","args",".","get","(","\"only_downloadable\"",")","results","=","None","if","search_type","==","\"tracks\"",":","results","=","track_search_query","(","session",",","search_str",",","limit",",","offset",",","is_auto_complete",",","current_user_id",",","only_downloadable",",",")","elif","search_type","==","\"users\"",":","results","=","user_search_query","(","session",",","search_str",",","limit",",","offset",",","is_auto_complete",",","current_user_id",",",")","elif","search_type","==","\"playlists\"",":","results","=","playlist_search_query","(","session",",","search_str",",","limit",",","offset",",","False",",","is_auto_complete",",","current_user_id",",",")","elif","search_type","==","\"albums\"",":","results","=","playlist_search_query","(","session",",","search_str",",","limit",",","offset",",","True",",","is_auto_complete",",","current_user_id",",",")","return","results"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/search_queries.py#L188-L238"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/search_queries.py","language":"python","identifier":"search","parameters":"(args)","argument_list":"","return_statement":"return results","docstring":"Perform a search. `args` should contain `is_auto_complete`,\n `query`, `kind`, `current_user_id`, and `only_downloadable`","docstring_summary":"Perform a search. `args` should contain `is_auto_complete`,\n `query`, `kind`, `current_user_id`, and `only_downloadable`","docstring_tokens":["Perform","a","search",".","args","should","contain","is_auto_complete","query","kind","current_user_id","and","only_downloadable"],"function":"def search(args):\n \"\"\"Perform a search. `args` should contain `is_auto_complete`,\n `query`, `kind`, `current_user_id`, and `only_downloadable`\n \"\"\"\n search_str = args.get(\"query\")\n\n # when creating query table, we substitute this too\n search_str = search_str.replace(\"&\", \"and\")\n\n kind = args.get(\"kind\", \"all\")\n is_auto_complete = args.get(\"is_auto_complete\")\n current_user_id = args.get(\"current_user_id\")\n only_downloadable = args.get(\"only_downloadable\")\n limit = args.get(\"limit\")\n offset = args.get(\"offset\")\n\n searchKind = SearchKind[kind]\n\n results = {}\n\n # Accumulate user_ids for later\n user_ids = set()\n\n # Create args for perform_search_query\n search_args = {\n \"search_str\": search_str,\n \"limit\": limit,\n \"offset\": offset,\n \"is_auto_complete\": is_auto_complete,\n \"current_user_id\": current_user_id,\n \"only_downloadable\": only_downloadable,\n }\n\n if search_str:\n db = get_db_read_replica()\n # Concurrency approach:\n # Spin up a ThreadPoolExecutor for each request to perform_search_query\n # to perform the different search types in parallel.\n # After each future resolves, we then add users for each entity in a single\n # db round trip.\n with concurrent.futures.ThreadPoolExecutor(max_workers=4) as executor:\n # Keep a mapping of future -> search_type\n futures_map = {}\n futures = []\n\n # Helper fn to submit a future and add it to bookkeeping data structures\n def submit_and_add(search_type):\n future = executor.submit(\n perform_search_query, db, search_type, search_args\n )\n futures.append(future)\n futures_map[future] = search_type\n\n if searchKind in [SearchKind.all, SearchKind.tracks]:\n submit_and_add(\"tracks\")\n\n if searchKind in [SearchKind.all, SearchKind.users]:\n submit_and_add(\"users\")\n if searchKind in [SearchKind.all, SearchKind.playlists]:\n submit_and_add(\"playlists\")\n\n if searchKind in [SearchKind.all, SearchKind.albums]:\n submit_and_add(\"albums\")\n\n for future in concurrent.futures.as_completed(futures):\n search_result = future.result()\n future_type = futures_map[future]\n\n # Add to the final results\n # Add to user_ids\n if future_type == \"tracks\":\n results[\"tracks\"] = search_result[\"all\"]\n results[\"saved_tracks\"] = search_result[\"saved\"]\n elif future_type == \"users\":\n results[\"users\"] = search_result[\"all\"]\n results[\"followed_users\"] = search_result[\"followed\"]\n elif future_type == \"playlists\":\n results[\"playlists\"] = search_result[\"all\"]\n results[\"saved_playlists\"] = search_result[\"saved\"]\n elif future_type == \"albums\":\n results[\"albums\"] = search_result[\"all\"]\n results[\"saved_albums\"] = search_result[\"saved\"]\n user_ids.update(get_users_ids(search_result[\"all\"]))\n\n with db.scoped_session() as session:\n # Add users back\n users = get_users_by_id(session, list(user_ids), current_user_id)\n\n for (_, result_list) in results.items():\n for result in result_list:\n user_id = None\n if \"playlist_owner_id\" in result:\n user_id = result[\"playlist_owner_id\"]\n elif \"owner_id\" in result:\n user_id = result[\"owner_id\"]\n\n if user_id is not None:\n user = users[user_id]\n result[\"user\"] = user\n return results","function_tokens":["def","search","(","args",")",":","search_str","=","args",".","get","(","\"query\"",")","# when creating query table, we substitute this too","search_str","=","search_str",".","replace","(","\"&\"",",","\"and\"",")","kind","=","args",".","get","(","\"kind\"",",","\"all\"",")","is_auto_complete","=","args",".","get","(","\"is_auto_complete\"",")","current_user_id","=","args",".","get","(","\"current_user_id\"",")","only_downloadable","=","args",".","get","(","\"only_downloadable\"",")","limit","=","args",".","get","(","\"limit\"",")","offset","=","args",".","get","(","\"offset\"",")","searchKind","=","SearchKind","[","kind","]","results","=","{","}","# Accumulate user_ids for later","user_ids","=","set","(",")","# Create args for perform_search_query","search_args","=","{","\"search_str\"",":","search_str",",","\"limit\"",":","limit",",","\"offset\"",":","offset",",","\"is_auto_complete\"",":","is_auto_complete",",","\"current_user_id\"",":","current_user_id",",","\"only_downloadable\"",":","only_downloadable",",","}","if","search_str",":","db","=","get_db_read_replica","(",")","# Concurrency approach:","# Spin up a ThreadPoolExecutor for each request to perform_search_query","# to perform the different search types in parallel.","# After each future resolves, we then add users for each entity in a single","# db round trip.","with","concurrent",".","futures",".","ThreadPoolExecutor","(","max_workers","=","4",")","as","executor",":","# Keep a mapping of future -> search_type","futures_map","=","{","}","futures","=","[","]","# Helper fn to submit a future and add it to bookkeeping data structures","def","submit_and_add","(","search_type",")",":","future","=","executor",".","submit","(","perform_search_query",",","db",",","search_type",",","search_args",")","futures",".","append","(","future",")","futures_map","[","future","]","=","search_type","if","searchKind","in","[","SearchKind",".","all",",","SearchKind",".","tracks","]",":","submit_and_add","(","\"tracks\"",")","if","searchKind","in","[","SearchKind",".","all",",","SearchKind",".","users","]",":","submit_and_add","(","\"users\"",")","if","searchKind","in","[","SearchKind",".","all",",","SearchKind",".","playlists","]",":","submit_and_add","(","\"playlists\"",")","if","searchKind","in","[","SearchKind",".","all",",","SearchKind",".","albums","]",":","submit_and_add","(","\"albums\"",")","for","future","in","concurrent",".","futures",".","as_completed","(","futures",")",":","search_result","=","future",".","result","(",")","future_type","=","futures_map","[","future","]","# Add to the final results","# Add to user_ids","if","future_type","==","\"tracks\"",":","results","[","\"tracks\"","]","=","search_result","[","\"all\"","]","results","[","\"saved_tracks\"","]","=","search_result","[","\"saved\"","]","elif","future_type","==","\"users\"",":","results","[","\"users\"","]","=","search_result","[","\"all\"","]","results","[","\"followed_users\"","]","=","search_result","[","\"followed\"","]","elif","future_type","==","\"playlists\"",":","results","[","\"playlists\"","]","=","search_result","[","\"all\"","]","results","[","\"saved_playlists\"","]","=","search_result","[","\"saved\"","]","elif","future_type","==","\"albums\"",":","results","[","\"albums\"","]","=","search_result","[","\"all\"","]","results","[","\"saved_albums\"","]","=","search_result","[","\"saved\"","]","user_ids",".","update","(","get_users_ids","(","search_result","[","\"all\"","]",")",")","with","db",".","scoped_session","(",")","as","session",":","# Add users back","users","=","get_users_by_id","(","session",",","list","(","user_ids",")",",","current_user_id",")","for","(","_",",","result_list",")","in","results",".","items","(",")",":","for","result","in","result_list",":","user_id","=","None","if","\"playlist_owner_id\"","in","result",":","user_id","=","result","[","\"playlist_owner_id\"","]","elif","\"owner_id\"","in","result",":","user_id","=","result","[","\"owner_id\"","]","if","user_id","is","not","None",":","user","=","users","[","user_id","]","result","[","\"user\"","]","=","user","return","results"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/search_queries.py#L261-L360"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_playlist_tracks.py","language":"python","identifier":"get_playlist_tracks","parameters":"(session, args)","argument_list":"","return_statement":"","docstring":"Accepts args:\n {\n # optionally pass in full playlists to avoid having to fetch\n \"playlists\": Playlist[]\n\n # not needed if playlists are passed\n \"playlist_ids\": string[]\n \"current_user_id\": int\n \"populate_tracks\": boolean # whether to add users & metadata to tracks\n }\n\n Returns: {\n playlist_id: Playlist\n }","docstring_summary":"Accepts args:\n {\n # optionally pass in full playlists to avoid having to fetch\n \"playlists\": Playlist[]","docstring_tokens":["Accepts","args",":","{","#","optionally","pass","in","full","playlists","to","avoid","having","to","fetch","playlists",":","Playlist","[]"],"function":"def get_playlist_tracks(session, args):\n \"\"\"Accepts args:\n {\n # optionally pass in full playlists to avoid having to fetch\n \"playlists\": Playlist[]\n\n # not needed if playlists are passed\n \"playlist_ids\": string[]\n \"current_user_id\": int\n \"populate_tracks\": boolean # whether to add users & metadata to tracks\n }\n\n Returns: {\n playlist_id: Playlist\n }\n \"\"\"\n\n try:\n playlists = args.get(\"playlists\")\n if not playlists:\n playlist_ids = args.get(\"playlist_ids\", [])\n playlists = session.query(Playlist).filter(\n Playlist.is_current == True, Playlist.playlist_id.in_(playlist_ids)\n )\n playlists = list(map(helpers.model_to_dictionary, playlists))\n\n if not playlists:\n return {}\n\n # track_id -> [playlist_id]\n track_ids_set = set()\n for playlist in playlists:\n playlist_id = playlist[\"playlist_id\"]\n for track_id_dict in playlist[\"playlist_contents\"][\"track_ids\"]:\n track_id = track_id_dict[\"track\"]\n track_ids_set.add(track_id)\n\n playlist_tracks = (\n session.query(Track)\n .filter(Track.is_current == True, Track.track_id.in_(list(track_ids_set)))\n .all()\n )\n\n tracks = helpers.query_result_to_list(playlist_tracks)\n\n if args.get(\"populate_tracks\"):\n current_user_id = args.get(\"current_user_id\")\n tracks = populate_track_metadata(\n session, list(track_ids_set), tracks, current_user_id\n )\n\n add_users_to_tracks(session, tracks, current_user_id)\n\n # { track_id => track }\n track_ids_map = {track[\"track_id\"]: track for track in tracks}\n\n # { playlist_id => [track]}\n playlists_map = {}\n for playlist in playlists:\n playlist_id = playlist[\"playlist_id\"]\n playlists_map[playlist_id] = []\n for track_id_dict in playlist[\"playlist_contents\"][\"track_ids\"]:\n track_id = track_id_dict[\"track\"]\n track = track_ids_map[track_id]\n playlists_map[playlist_id].append(track)\n\n return playlists_map\n\n except sqlalchemy.orm.exc.NoResultFound:\n return {}","function_tokens":["def","get_playlist_tracks","(","session",",","args",")",":","try",":","playlists","=","args",".","get","(","\"playlists\"",")","if","not","playlists",":","playlist_ids","=","args",".","get","(","\"playlist_ids\"",",","[","]",")","playlists","=","session",".","query","(","Playlist",")",".","filter","(","Playlist",".","is_current","==","True",",","Playlist",".","playlist_id",".","in_","(","playlist_ids",")",")","playlists","=","list","(","map","(","helpers",".","model_to_dictionary",",","playlists",")",")","if","not","playlists",":","return","{","}","# track_id -> [playlist_id]","track_ids_set","=","set","(",")","for","playlist","in","playlists",":","playlist_id","=","playlist","[","\"playlist_id\"","]","for","track_id_dict","in","playlist","[","\"playlist_contents\"","]","[","\"track_ids\"","]",":","track_id","=","track_id_dict","[","\"track\"","]","track_ids_set",".","add","(","track_id",")","playlist_tracks","=","(","session",".","query","(","Track",")",".","filter","(","Track",".","is_current","==","True",",","Track",".","track_id",".","in_","(","list","(","track_ids_set",")",")",")",".","all","(",")",")","tracks","=","helpers",".","query_result_to_list","(","playlist_tracks",")","if","args",".","get","(","\"populate_tracks\"",")",":","current_user_id","=","args",".","get","(","\"current_user_id\"",")","tracks","=","populate_track_metadata","(","session",",","list","(","track_ids_set",")",",","tracks",",","current_user_id",")","add_users_to_tracks","(","session",",","tracks",",","current_user_id",")","# { track_id => track }","track_ids_map","=","{","track","[","\"track_id\"","]",":","track","for","track","in","tracks","}","# { playlist_id => [track]}","playlists_map","=","{","}","for","playlist","in","playlists",":","playlist_id","=","playlist","[","\"playlist_id\"","]","playlists_map","[","playlist_id","]","=","[","]","for","track_id_dict","in","playlist","[","\"playlist_contents\"","]","[","\"track_ids\"","]",":","track_id","=","track_id_dict","[","\"track\"","]","track","=","track_ids_map","[","track_id","]","playlists_map","[","playlist_id","]",".","append","(","track",")","return","playlists_map","except","sqlalchemy",".","orm",".","exc",".","NoResultFound",":","return","{","}"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_playlist_tracks.py#L11-L80"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_prev_track_entries.py","language":"python","identifier":"get_prev_track_entries","parameters":"(session, entries)","argument_list":"","return_statement":"return prev_entries_query.all()","docstring":"Gets the previous state of tracks in the database given a list of tracks.\n\n Args:\n session: (DB) sqlalchemy scoped db session\n entries: (List) List of current track entries\n\n Returns:\n prev_track_entries: (List) List of previous track entries corresponding to the passed track entries","docstring_summary":"Gets the previous state of tracks in the database given a list of tracks.","docstring_tokens":["Gets","the","previous","state","of","tracks","in","the","database","given","a","list","of","tracks","."],"function":"def get_prev_track_entries(session, entries):\n \"\"\"\n Gets the previous state of tracks in the database given a list of tracks.\n\n Args:\n session: (DB) sqlalchemy scoped db session\n entries: (List) List of current track entries\n\n Returns:\n prev_track_entries: (List) List of previous track entries corresponding to the passed track entries\n \"\"\"\n\n if len(entries) == 0:\n return []\n\n def get_prev_query_pairs(entry):\n return [entry[\"track_id\"], entry[\"blocknumber\"]]\n\n prev_query_pairs = map(get_prev_query_pairs, entries)\n\n prev_entries_subquery = (\n session.query(\n Track.track_id, func.max(Track.blocknumber).label(\"max_blocknumber\")\n )\n .filter(\n or_(\n and_(Track.track_id == pair[0], Track.blocknumber < pair[1])\n for pair in prev_query_pairs\n )\n )\n .group_by(Track.track_id)\n .subquery()\n )\n\n prev_entries_query = session.query(Track).join(\n prev_entries_subquery,\n and_(\n prev_entries_subquery.c.track_id == Track.track_id,\n prev_entries_subquery.c.max_blocknumber == Track.blocknumber,\n ),\n )\n\n return prev_entries_query.all()","function_tokens":["def","get_prev_track_entries","(","session",",","entries",")",":","if","len","(","entries",")","==","0",":","return","[","]","def","get_prev_query_pairs","(","entry",")",":","return","[","entry","[","\"track_id\"","]",",","entry","[","\"blocknumber\"","]","]","prev_query_pairs","=","map","(","get_prev_query_pairs",",","entries",")","prev_entries_subquery","=","(","session",".","query","(","Track",".","track_id",",","func",".","max","(","Track",".","blocknumber",")",".","label","(","\"max_blocknumber\"",")",")",".","filter","(","or_","(","and_","(","Track",".","track_id","==","pair","[","0","]",",","Track",".","blocknumber","<","pair","[","1","]",")","for","pair","in","prev_query_pairs",")",")",".","group_by","(","Track",".","track_id",")",".","subquery","(",")",")","prev_entries_query","=","session",".","query","(","Track",")",".","join","(","prev_entries_subquery",",","and_","(","prev_entries_subquery",".","c",".","track_id","==","Track",".","track_id",",","prev_entries_subquery",".","c",".","max_blocknumber","==","Track",".","blocknumber",",",")",",",")","return","prev_entries_query",".","all","(",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_prev_track_entries.py#L5-L47"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_unpopulated_playlists.py","language":"python","identifier":"get_unpopulated_playlists","parameters":"(session, playlist_ids, filter_deleted=False)","argument_list":"","return_statement":"return playlists_response","docstring":"Fetches playlists by checking the redis cache first then\n going to DB and writes to cache if not present\n\n Args:\n session: DB session\n playlist_ids: array A list of playlist ids\n\n Returns:\n Array of playlists","docstring_summary":"Fetches playlists by checking the redis cache first then\n going to DB and writes to cache if not present","docstring_tokens":["Fetches","playlists","by","checking","the","redis","cache","first","then","going","to","DB","and","writes","to","cache","if","not","present"],"function":"def get_unpopulated_playlists(session, playlist_ids, filter_deleted=False):\n \"\"\"\n Fetches playlists by checking the redis cache first then\n going to DB and writes to cache if not present\n\n Args:\n session: DB session\n playlist_ids: array A list of playlist ids\n\n Returns:\n Array of playlists\n \"\"\"\n # Check the cached playlists\n cached_playlists_results = get_cached_playlists(playlist_ids)\n has_all_playlists_cached = cached_playlists_results.count(None) == 0\n if has_all_playlists_cached:\n if filter_deleted:\n return list(\n filter(\n lambda playlist: not playlist[\"is_delete\"], cached_playlists_results\n )\n )\n return cached_playlists_results\n\n # Create a dict of cached playlists\n cached_playlists = {}\n for cached_playlist in cached_playlists_results:\n if cached_playlist:\n cached_playlists[cached_playlist[\"playlist_id\"]] = cached_playlist\n\n playlist_ids_to_fetch = filter(\n lambda playlist_id: playlist_id not in cached_playlists, playlist_ids\n )\n\n playlists_query = (\n session.query(Playlist)\n .filter(Playlist.is_current == True)\n .filter(Playlist.playlist_id.in_(playlist_ids_to_fetch))\n )\n if filter_deleted:\n playlists_query = playlists_query.filter(Playlist.is_delete == False)\n\n playlists = playlists_query.all()\n playlists = helpers.query_result_to_list(playlists)\n queried_playlists = {playlist[\"playlist_id\"]: playlist for playlist in playlists}\n\n # cache playlists for future use\n set_playlists_in_cache(playlists)\n\n playlists_response = []\n for playlist_id in playlist_ids:\n if playlist_id in cached_playlists:\n if not filter_deleted or not cached_playlists[playlist_id][\"is_delete\"]:\n playlists_response.append(cached_playlists[playlist_id])\n elif playlist_id in queried_playlists:\n playlists_response.append(queried_playlists[playlist_id])\n\n return playlists_response","function_tokens":["def","get_unpopulated_playlists","(","session",",","playlist_ids",",","filter_deleted","=","False",")",":","# Check the cached playlists","cached_playlists_results","=","get_cached_playlists","(","playlist_ids",")","has_all_playlists_cached","=","cached_playlists_results",".","count","(","None",")","==","0","if","has_all_playlists_cached",":","if","filter_deleted",":","return","list","(","filter","(","lambda","playlist",":","not","playlist","[","\"is_delete\"","]",",","cached_playlists_results",")",")","return","cached_playlists_results","# Create a dict of cached playlists","cached_playlists","=","{","}","for","cached_playlist","in","cached_playlists_results",":","if","cached_playlist",":","cached_playlists","[","cached_playlist","[","\"playlist_id\"","]","]","=","cached_playlist","playlist_ids_to_fetch","=","filter","(","lambda","playlist_id",":","playlist_id","not","in","cached_playlists",",","playlist_ids",")","playlists_query","=","(","session",".","query","(","Playlist",")",".","filter","(","Playlist",".","is_current","==","True",")",".","filter","(","Playlist",".","playlist_id",".","in_","(","playlist_ids_to_fetch",")",")",")","if","filter_deleted",":","playlists_query","=","playlists_query",".","filter","(","Playlist",".","is_delete","==","False",")","playlists","=","playlists_query",".","all","(",")","playlists","=","helpers",".","query_result_to_list","(","playlists",")","queried_playlists","=","{","playlist","[","\"playlist_id\"","]",":","playlist","for","playlist","in","playlists","}","# cache playlists for future use","set_playlists_in_cache","(","playlists",")","playlists_response","=","[","]","for","playlist_id","in","playlist_ids",":","if","playlist_id","in","cached_playlists",":","if","not","filter_deleted","or","not","cached_playlists","[","playlist_id","]","[","\"is_delete\"","]",":","playlists_response",".","append","(","cached_playlists","[","playlist_id","]",")","elif","playlist_id","in","queried_playlists",":","playlists_response",".","append","(","queried_playlists","[","playlist_id","]",")","return","playlists_response"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_unpopulated_playlists.py#L41-L98"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_remix_track_parents.py","language":"python","identifier":"get_remix_track_parents","parameters":"(args)","argument_list":"","return_statement":"return tracks","docstring":"Fetch remix parents for a given track.\n\n Args:\n args:dict\n args.track_id: track id\n args.limit: limit\n args.offset: offset\n args.with_users: with users\n args.current_user_id: current user ID","docstring_summary":"Fetch remix parents for a given track.","docstring_tokens":["Fetch","remix","parents","for","a","given","track","."],"function":"def get_remix_track_parents(args):\n \"\"\"Fetch remix parents for a given track.\n\n Args:\n args:dict\n args.track_id: track id\n args.limit: limit\n args.offset: offset\n args.with_users: with users\n args.current_user_id: current user ID\n \"\"\"\n track_id = args.get(\"track_id\")\n current_user_id = args.get(\"current_user_id\")\n limit = args.get(\"limit\")\n offset = args.get(\"offset\")\n db = get_db_read_replica()\n\n with db.scoped_session() as session:\n\n def get_unpopulated_remix_parents():\n base_query = (\n session.query(Track)\n .join(\n Remix,\n and_(\n Remix.parent_track_id == Track.track_id,\n Remix.child_track_id == track_id,\n ),\n )\n .filter(Track.is_current == True, Track.is_unlisted == False)\n .order_by(desc(Track.created_at), desc(Track.track_id))\n )\n\n tracks = add_query_pagination(base_query, limit, offset).all()\n tracks = helpers.query_result_to_list(tracks)\n track_ids = list(map(lambda track: track[\"track_id\"], tracks))\n return (tracks, track_ids)\n\n key = make_cache_key(args)\n (tracks, track_ids) = use_redis_cache(\n key,\n UNPOPULATED_REMIX_PARENTS_CACHE_DURATION_SEC,\n get_unpopulated_remix_parents,\n )\n\n tracks = populate_track_metadata(session, track_ids, tracks, current_user_id)\n if args.get(\"with_users\", False):\n add_users_to_tracks(session, tracks, current_user_id)\n\n return tracks","function_tokens":["def","get_remix_track_parents","(","args",")",":","track_id","=","args",".","get","(","\"track_id\"",")","current_user_id","=","args",".","get","(","\"current_user_id\"",")","limit","=","args",".","get","(","\"limit\"",")","offset","=","args",".","get","(","\"offset\"",")","db","=","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","def","get_unpopulated_remix_parents","(",")",":","base_query","=","(","session",".","query","(","Track",")",".","join","(","Remix",",","and_","(","Remix",".","parent_track_id","==","Track",".","track_id",",","Remix",".","child_track_id","==","track_id",",",")",",",")",".","filter","(","Track",".","is_current","==","True",",","Track",".","is_unlisted","==","False",")",".","order_by","(","desc","(","Track",".","created_at",")",",","desc","(","Track",".","track_id",")",")",")","tracks","=","add_query_pagination","(","base_query",",","limit",",","offset",")",".","all","(",")","tracks","=","helpers",".","query_result_to_list","(","tracks",")","track_ids","=","list","(","map","(","lambda","track",":","track","[","\"track_id\"","]",",","tracks",")",")","return","(","tracks",",","track_ids",")","key","=","make_cache_key","(","args",")","(","tracks",",","track_ids",")","=","use_redis_cache","(","key",",","UNPOPULATED_REMIX_PARENTS_CACHE_DURATION_SEC",",","get_unpopulated_remix_parents",",",")","tracks","=","populate_track_metadata","(","session",",","track_ids",",","tracks",",","current_user_id",")","if","args",".","get","(","\"with_users\"",",","False",")",":","add_users_to_tracks","(","session",",","tracks",",","current_user_id",")","return","tracks"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_remix_track_parents.py#L29-L78"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_health.py","language":"python","identifier":"get_health","parameters":"(args: GetHealthArgs, use_redis_cache: bool = True)","argument_list":"","return_statement":"return health_results, is_unhealthy","docstring":"Gets health status for the service\n\n Returns a tuple of health results and a boolean indicating an error","docstring_summary":"Gets health status for the service","docstring_tokens":["Gets","health","status","for","the","service"],"function":"def get_health(args: GetHealthArgs, use_redis_cache: bool = True) -> Tuple[Dict, bool]:\n \"\"\"\n Gets health status for the service\n\n Returns a tuple of health results and a boolean indicating an error\n \"\"\"\n redis = redis_connection.get_redis()\n web3 = web3_provider.get_web3()\n\n verbose = args.get(\"verbose\")\n enforce_block_diff = args.get(\"enforce_block_diff\")\n qs_healthy_block_diff = cast(Optional[int], args.get(\"healthy_block_diff\"))\n challenge_events_age_max_drift = args.get(\"challenge_events_age_max_drift\")\n plays_count_max_drift = args.get(\"plays_count_max_drift\")\n\n # If healthy block diff is given in url and positive, override config value\n healthy_block_diff = (\n qs_healthy_block_diff\n if qs_healthy_block_diff is not None and qs_healthy_block_diff >= 0\n else default_healthy_block_diff\n )\n\n latest_block_num = None\n latest_block_hash = None\n latest_indexed_block_num = None\n latest_indexed_block_hash = None\n\n if use_redis_cache:\n # get latest blockchain state from redis cache, or fallback to chain if None\n latest_block_num, latest_block_hash = get_latest_chain_block_set_if_nx(\n redis, web3\n )\n\n # get latest db state from redis cache\n latest_indexed_block_num = redis.get(most_recent_indexed_block_redis_key)\n if latest_indexed_block_num is not None:\n latest_indexed_block_num = int(latest_indexed_block_num)\n\n latest_indexed_block_hash = redis.get(most_recent_indexed_block_hash_redis_key)\n if latest_indexed_block_hash is not None:\n latest_indexed_block_hash = latest_indexed_block_hash.decode(\"utf-8\")\n\n # fetch latest blockchain state from web3 if:\n # we explicitly don't want to use redis cache or\n # value from redis cache is None\n if not use_redis_cache or latest_block_num is None or latest_block_hash is None:\n # get latest blockchain state from web3\n latest_block = web3.eth.getBlock(\"latest\", True)\n latest_block_num = latest_block.number\n latest_block_hash = latest_block.hash.hex()\n\n play_health_info = get_play_health_info(redis, plays_count_max_drift)\n rewards_manager_health_info = get_rewards_manager_health_info(redis)\n user_bank_health_info = get_user_bank_health_info(redis)\n\n # fetch latest db state if:\n # we explicitly don't want to use redis cache or\n # value from redis cache is None\n if (\n not use_redis_cache\n or latest_indexed_block_num is None\n or latest_indexed_block_hash is None\n ):\n db_block_state = _get_db_block_state()\n latest_indexed_block_num = db_block_state[\"number\"] or 0\n latest_indexed_block_hash = db_block_state[\"blockhash\"]\n\n trending_tracks_age_sec = get_elapsed_time_redis(\n redis, trending_tracks_last_completion_redis_key\n )\n trending_playlists_age_sec = get_elapsed_time_redis(\n redis, trending_playlists_last_completion_redis_key\n )\n challenge_events_age_sec = get_elapsed_time_redis(\n redis, challenges_last_processed_event_redis_key\n )\n user_balances_age_sec = get_elapsed_time_redis(\n redis, user_balances_refresh_last_completion_redis_key\n )\n num_users_in_lazy_balance_refresh_queue = len(\n redis.smembers(LAZY_REFRESH_REDIS_PREFIX)\n )\n num_users_in_immediate_balance_refresh_queue = len(\n redis.smembers(IMMEDIATE_REFRESH_REDIS_PREFIX)\n )\n last_scanned_block_for_balance_refresh = redis_get_or_restore(\n redis, eth_indexing_last_scanned_block_key\n )\n index_eth_age_sec = get_elapsed_time_redis(\n redis, index_eth_last_completion_redis_key\n )\n last_scanned_block_for_balance_refresh = (\n int(last_scanned_block_for_balance_refresh)\n if last_scanned_block_for_balance_refresh\n else None\n )\n # Get system information monitor values\n sys_info = monitors.get_monitors(\n [\n MONITORS[monitor_names.database_size],\n MONITORS[monitor_names.database_connections],\n MONITORS[monitor_names.total_memory],\n MONITORS[monitor_names.used_memory],\n MONITORS[monitor_names.filesystem_size],\n MONITORS[monitor_names.filesystem_used],\n MONITORS[monitor_names.received_bytes_per_sec],\n MONITORS[monitor_names.transferred_bytes_per_sec],\n MONITORS[monitor_names.redis_total_memory],\n ]\n )\n\n health_results = {\n \"web\": {\n \"blocknumber\": latest_block_num,\n \"blockhash\": latest_block_hash,\n },\n \"db\": {\n \"number\": latest_indexed_block_num,\n \"blockhash\": latest_indexed_block_hash,\n },\n \"git\": os.getenv(\"GIT_SHA\"),\n \"trending_tracks_age_sec\": trending_tracks_age_sec,\n \"trending_playlists_age_sec\": trending_playlists_age_sec,\n \"challenge_last_event_age_sec\": challenge_events_age_sec,\n \"user_balances_age_sec\": user_balances_age_sec,\n \"num_users_in_lazy_balance_refresh_queue\": num_users_in_lazy_balance_refresh_queue,\n \"num_users_in_immediate_balance_refresh_queue\": num_users_in_immediate_balance_refresh_queue,\n \"last_scanned_block_for_balance_refresh\": last_scanned_block_for_balance_refresh,\n \"index_eth_age_sec\": index_eth_age_sec,\n \"number_of_cpus\": number_of_cpus,\n **sys_info,\n \"plays\": play_health_info,\n \"rewards_manager\": rewards_manager_health_info,\n \"user_bank\": user_bank_health_info,\n \"openresty_public_key\": openresty_public_key,\n }\n\n block_difference = abs(latest_block_num - latest_indexed_block_num)\n health_results[\"block_difference\"] = block_difference\n health_results[\"maximum_healthy_block_difference\"] = default_healthy_block_diff\n health_results.update(disc_prov_version)\n\n # Check that this node meets the minimum system requirements\n num_cpus: int = cast(int, health_results[\"number_of_cpus\"] or 0)\n total_memory: int = cast(int, health_results[\"total_memory\"] or 0)\n filesystem_size: int = cast(int, health_results[\"filesystem_size\"] or 0)\n if (\n num_cpus < min_number_of_cpus\n or total_memory < min_total_memory\n or filesystem_size < min_filesystem_size\n ):\n health_results[\"meets_min_requirements\"] = False\n # TODO - this will become strictly enforced in upcoming service versions and return with error\n else:\n health_results[\"meets_min_requirements\"] = True\n\n if verbose:\n # DB connections check\n db_connections_json, db_connections_error = _get_db_conn_state()\n health_results[\"db_connections\"] = db_connections_json\n health_results[\"country\"] = shared_config[\"serviceLocation\"][\"serviceCountry\"]\n health_results[\"latitude\"] = shared_config[\"serviceLocation\"][\"serviceLatitude\"]\n health_results[\"longitude\"] = shared_config[\"serviceLocation\"][\n \"serviceLongitude\"\n ]\n\n if db_connections_error:\n return health_results, db_connections_error\n\n query_insights_json, query_insights_error = _get_query_insights()\n health_results[\"query_insights\"] = query_insights_json\n\n if query_insights_error:\n return health_results, query_insights_error\n\n table_size_info_json = monitors.get_monitors(\n [\n MONITORS[monitor_names.table_size_info],\n ]\n )\n\n health_results[\"tables\"] = table_size_info_json\n\n unhealthy_blocks = bool(\n enforce_block_diff and block_difference > healthy_block_diff\n )\n unhealthy_challenges = bool(\n challenge_events_age_max_drift\n and challenge_events_age_sec\n and challenge_events_age_sec > challenge_events_age_max_drift\n )\n\n is_unhealthy = (\n unhealthy_blocks or unhealthy_challenges or play_health_info[\"is_unhealthy\"]\n )\n\n return health_results, is_unhealthy","function_tokens":["def","get_health","(","args",":","GetHealthArgs",",","use_redis_cache",":","bool","=","True",")","->","Tuple","[","Dict",",","bool","]",":","redis","=","redis_connection",".","get_redis","(",")","web3","=","web3_provider",".","get_web3","(",")","verbose","=","args",".","get","(","\"verbose\"",")","enforce_block_diff","=","args",".","get","(","\"enforce_block_diff\"",")","qs_healthy_block_diff","=","cast","(","Optional","[","int","]",",","args",".","get","(","\"healthy_block_diff\"",")",")","challenge_events_age_max_drift","=","args",".","get","(","\"challenge_events_age_max_drift\"",")","plays_count_max_drift","=","args",".","get","(","\"plays_count_max_drift\"",")","# If healthy block diff is given in url and positive, override config value","healthy_block_diff","=","(","qs_healthy_block_diff","if","qs_healthy_block_diff","is","not","None","and","qs_healthy_block_diff",">=","0","else","default_healthy_block_diff",")","latest_block_num","=","None","latest_block_hash","=","None","latest_indexed_block_num","=","None","latest_indexed_block_hash","=","None","if","use_redis_cache",":","# get latest blockchain state from redis cache, or fallback to chain if None","latest_block_num",",","latest_block_hash","=","get_latest_chain_block_set_if_nx","(","redis",",","web3",")","# get latest db state from redis cache","latest_indexed_block_num","=","redis",".","get","(","most_recent_indexed_block_redis_key",")","if","latest_indexed_block_num","is","not","None",":","latest_indexed_block_num","=","int","(","latest_indexed_block_num",")","latest_indexed_block_hash","=","redis",".","get","(","most_recent_indexed_block_hash_redis_key",")","if","latest_indexed_block_hash","is","not","None",":","latest_indexed_block_hash","=","latest_indexed_block_hash",".","decode","(","\"utf-8\"",")","# fetch latest blockchain state from web3 if:","# we explicitly don't want to use redis cache or","# value from redis cache is None","if","not","use_redis_cache","or","latest_block_num","is","None","or","latest_block_hash","is","None",":","# get latest blockchain state from web3","latest_block","=","web3",".","eth",".","getBlock","(","\"latest\"",",","True",")","latest_block_num","=","latest_block",".","number","latest_block_hash","=","latest_block",".","hash",".","hex","(",")","play_health_info","=","get_play_health_info","(","redis",",","plays_count_max_drift",")","rewards_manager_health_info","=","get_rewards_manager_health_info","(","redis",")","user_bank_health_info","=","get_user_bank_health_info","(","redis",")","# fetch latest db state if:","# we explicitly don't want to use redis cache or","# value from redis cache is None","if","(","not","use_redis_cache","or","latest_indexed_block_num","is","None","or","latest_indexed_block_hash","is","None",")",":","db_block_state","=","_get_db_block_state","(",")","latest_indexed_block_num","=","db_block_state","[","\"number\"","]","or","0","latest_indexed_block_hash","=","db_block_state","[","\"blockhash\"","]","trending_tracks_age_sec","=","get_elapsed_time_redis","(","redis",",","trending_tracks_last_completion_redis_key",")","trending_playlists_age_sec","=","get_elapsed_time_redis","(","redis",",","trending_playlists_last_completion_redis_key",")","challenge_events_age_sec","=","get_elapsed_time_redis","(","redis",",","challenges_last_processed_event_redis_key",")","user_balances_age_sec","=","get_elapsed_time_redis","(","redis",",","user_balances_refresh_last_completion_redis_key",")","num_users_in_lazy_balance_refresh_queue","=","len","(","redis",".","smembers","(","LAZY_REFRESH_REDIS_PREFIX",")",")","num_users_in_immediate_balance_refresh_queue","=","len","(","redis",".","smembers","(","IMMEDIATE_REFRESH_REDIS_PREFIX",")",")","last_scanned_block_for_balance_refresh","=","redis_get_or_restore","(","redis",",","eth_indexing_last_scanned_block_key",")","index_eth_age_sec","=","get_elapsed_time_redis","(","redis",",","index_eth_last_completion_redis_key",")","last_scanned_block_for_balance_refresh","=","(","int","(","last_scanned_block_for_balance_refresh",")","if","last_scanned_block_for_balance_refresh","else","None",")","# Get system information monitor values","sys_info","=","monitors",".","get_monitors","(","[","MONITORS","[","monitor_names",".","database_size","]",",","MONITORS","[","monitor_names",".","database_connections","]",",","MONITORS","[","monitor_names",".","total_memory","]",",","MONITORS","[","monitor_names",".","used_memory","]",",","MONITORS","[","monitor_names",".","filesystem_size","]",",","MONITORS","[","monitor_names",".","filesystem_used","]",",","MONITORS","[","monitor_names",".","received_bytes_per_sec","]",",","MONITORS","[","monitor_names",".","transferred_bytes_per_sec","]",",","MONITORS","[","monitor_names",".","redis_total_memory","]",",","]",")","health_results","=","{","\"web\"",":","{","\"blocknumber\"",":","latest_block_num",",","\"blockhash\"",":","latest_block_hash",",","}",",","\"db\"",":","{","\"number\"",":","latest_indexed_block_num",",","\"blockhash\"",":","latest_indexed_block_hash",",","}",",","\"git\"",":","os",".","getenv","(","\"GIT_SHA\"",")",",","\"trending_tracks_age_sec\"",":","trending_tracks_age_sec",",","\"trending_playlists_age_sec\"",":","trending_playlists_age_sec",",","\"challenge_last_event_age_sec\"",":","challenge_events_age_sec",",","\"user_balances_age_sec\"",":","user_balances_age_sec",",","\"num_users_in_lazy_balance_refresh_queue\"",":","num_users_in_lazy_balance_refresh_queue",",","\"num_users_in_immediate_balance_refresh_queue\"",":","num_users_in_immediate_balance_refresh_queue",",","\"last_scanned_block_for_balance_refresh\"",":","last_scanned_block_for_balance_refresh",",","\"index_eth_age_sec\"",":","index_eth_age_sec",",","\"number_of_cpus\"",":","number_of_cpus",",","*","*","sys_info",",","\"plays\"",":","play_health_info",",","\"rewards_manager\"",":","rewards_manager_health_info",",","\"user_bank\"",":","user_bank_health_info",",","\"openresty_public_key\"",":","openresty_public_key",",","}","block_difference","=","abs","(","latest_block_num","-","latest_indexed_block_num",")","health_results","[","\"block_difference\"","]","=","block_difference","health_results","[","\"maximum_healthy_block_difference\"","]","=","default_healthy_block_diff","health_results",".","update","(","disc_prov_version",")","# Check that this node meets the minimum system requirements","num_cpus",":","int","=","cast","(","int",",","health_results","[","\"number_of_cpus\"","]","or","0",")","total_memory",":","int","=","cast","(","int",",","health_results","[","\"total_memory\"","]","or","0",")","filesystem_size",":","int","=","cast","(","int",",","health_results","[","\"filesystem_size\"","]","or","0",")","if","(","num_cpus","<","min_number_of_cpus","or","total_memory","<","min_total_memory","or","filesystem_size","<","min_filesystem_size",")",":","health_results","[","\"meets_min_requirements\"","]","=","False","# TODO - this will become strictly enforced in upcoming service versions and return with error","else",":","health_results","[","\"meets_min_requirements\"","]","=","True","if","verbose",":","# DB connections check","db_connections_json",",","db_connections_error","=","_get_db_conn_state","(",")","health_results","[","\"db_connections\"","]","=","db_connections_json","health_results","[","\"country\"","]","=","shared_config","[","\"serviceLocation\"","]","[","\"serviceCountry\"","]","health_results","[","\"latitude\"","]","=","shared_config","[","\"serviceLocation\"","]","[","\"serviceLatitude\"","]","health_results","[","\"longitude\"","]","=","shared_config","[","\"serviceLocation\"","]","[","\"serviceLongitude\"","]","if","db_connections_error",":","return","health_results",",","db_connections_error","query_insights_json",",","query_insights_error","=","_get_query_insights","(",")","health_results","[","\"query_insights\"","]","=","query_insights_json","if","query_insights_error",":","return","health_results",",","query_insights_error","table_size_info_json","=","monitors",".","get_monitors","(","[","MONITORS","[","monitor_names",".","table_size_info","]",",","]",")","health_results","[","\"tables\"","]","=","table_size_info_json","unhealthy_blocks","=","bool","(","enforce_block_diff","and","block_difference",">","healthy_block_diff",")","unhealthy_challenges","=","bool","(","challenge_events_age_max_drift","and","challenge_events_age_sec","and","challenge_events_age_sec",">","challenge_events_age_max_drift",")","is_unhealthy","=","(","unhealthy_blocks","or","unhealthy_challenges","or","play_health_info","[","\"is_unhealthy\"","]",")","return","health_results",",","is_unhealthy"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_health.py#L167-L363"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_health.py","language":"python","identifier":"get_latest_chain_block_set_if_nx","parameters":"(redis=None, web3=None)","argument_list":"","return_statement":"return latest_block_num, latest_block_hash","docstring":"Retrieves the latest block number and blockhash from redis if the keys exist.\n Otherwise it sets these values in redis by querying web3 and returns them\n\n :param redis: redis connection\n :param web3: web3 connection\n\n :rtype (int, string)","docstring_summary":"Retrieves the latest block number and blockhash from redis if the keys exist.\n Otherwise it sets these values in redis by querying web3 and returns them","docstring_tokens":["Retrieves","the","latest","block","number","and","blockhash","from","redis","if","the","keys","exist",".","Otherwise","it","sets","these","values","in","redis","by","querying","web3","and","returns","them"],"function":"def get_latest_chain_block_set_if_nx(redis=None, web3=None):\n \"\"\"\n Retrieves the latest block number and blockhash from redis if the keys exist.\n Otherwise it sets these values in redis by querying web3 and returns them\n\n :param redis: redis connection\n :param web3: web3 connection\n\n :rtype (int, string)\n \"\"\"\n\n latest_block_num = None\n latest_block_hash = None\n\n if redis is None or web3 is None:\n raise Exception(\"Invalid arguments for get_latest_chain_block_set_if_nx\")\n\n # also check for 'eth' attribute in web3 which means it's initialized and connected to a provider\n if not hasattr(web3, \"eth\"):\n raise Exception(\n \"Invalid web3 argument for get_latest_chain_block_set_if_nx, web3 is not initialized\"\n )\n\n stored_latest_block_num = redis.get(latest_block_redis_key)\n if stored_latest_block_num is not None:\n latest_block_num = int(stored_latest_block_num)\n\n stored_latest_blockhash = redis.get(latest_block_hash_redis_key)\n if stored_latest_blockhash is not None:\n latest_block_hash = stored_latest_blockhash.decode(\"utf-8\")\n\n if latest_block_num is None or latest_block_hash is None:\n latest_block = web3.eth.getBlock(\"latest\", True)\n latest_block_num = latest_block.number\n latest_block_hash = latest_block.hash.hex()\n\n # if we had attempted to use redis cache and the values weren't there, set the values now\n try:\n # ex sets expiration time and nx only sets if key doesn't exist in redis\n redis.set(\n latest_block_redis_key,\n latest_block_num,\n ex=default_indexing_interval_seconds,\n nx=True,\n )\n redis.set(\n latest_block_hash_redis_key,\n latest_block_hash,\n ex=default_indexing_interval_seconds,\n nx=True,\n )\n except Exception as e:\n logger.error(\n f\"Could not set values in redis for get_latest_chain_block_set_if_nx: {e}\"\n )\n\n return latest_block_num, latest_block_hash","function_tokens":["def","get_latest_chain_block_set_if_nx","(","redis","=","None",",","web3","=","None",")",":","latest_block_num","=","None","latest_block_hash","=","None","if","redis","is","None","or","web3","is","None",":","raise","Exception","(","\"Invalid arguments for get_latest_chain_block_set_if_nx\"",")","# also check for 'eth' attribute in web3 which means it's initialized and connected to a provider","if","not","hasattr","(","web3",",","\"eth\"",")",":","raise","Exception","(","\"Invalid web3 argument for get_latest_chain_block_set_if_nx, web3 is not initialized\"",")","stored_latest_block_num","=","redis",".","get","(","latest_block_redis_key",")","if","stored_latest_block_num","is","not","None",":","latest_block_num","=","int","(","stored_latest_block_num",")","stored_latest_blockhash","=","redis",".","get","(","latest_block_hash_redis_key",")","if","stored_latest_blockhash","is","not","None",":","latest_block_hash","=","stored_latest_blockhash",".","decode","(","\"utf-8\"",")","if","latest_block_num","is","None","or","latest_block_hash","is","None",":","latest_block","=","web3",".","eth",".","getBlock","(","\"latest\"",",","True",")","latest_block_num","=","latest_block",".","number","latest_block_hash","=","latest_block",".","hash",".","hex","(",")","# if we had attempted to use redis cache and the values weren't there, set the values now","try",":","# ex sets expiration time and nx only sets if key doesn't exist in redis","redis",".","set","(","latest_block_redis_key",",","latest_block_num",",","ex","=","default_indexing_interval_seconds",",","nx","=","True",",",")","redis",".","set","(","latest_block_hash_redis_key",",","latest_block_hash",",","ex","=","default_indexing_interval_seconds",",","nx","=","True",",",")","except","Exception","as","e",":","logger",".","error","(","f\"Could not set values in redis for get_latest_chain_block_set_if_nx: {e}\"",")","return","latest_block_num",",","latest_block_hash"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_health.py#L461-L517"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_trailing_metrics.py","language":"python","identifier":"get_aggregate_route_metrics_trailing_month","parameters":"()","argument_list":"","return_statement":"","docstring":"Returns trailing count and unique count for all routes in the last trailing 30 days\n\n Returns:\n { unique_count, total_count }","docstring_summary":"Returns trailing count and unique count for all routes in the last trailing 30 days","docstring_tokens":["Returns","trailing","count","and","unique","count","for","all","routes","in","the","last","trailing","30","days"],"function":"def get_aggregate_route_metrics_trailing_month():\n \"\"\"\n Returns trailing count and unique count for all routes in the last trailing 30 days\n\n Returns:\n { unique_count, total_count }\n \"\"\"\n db = db_session.get_db_read_replica()\n with db.scoped_session() as session:\n return _get_aggregate_route_metrics_trailing_month(session)","function_tokens":["def","get_aggregate_route_metrics_trailing_month","(",")",":","db","=","db_session",".","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","return","_get_aggregate_route_metrics_trailing_month","(","session",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_trailing_metrics.py#L22-L31"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_trailing_metrics.py","language":"python","identifier":"get_monthly_trailing_route_metrics","parameters":"()","argument_list":"","return_statement":"","docstring":"Returns trailing count and unique count for all routes in the last month,\n calculated from the RouteMetricsTrailingMonth matview.\n\n Returns:\n { count, unique_count }","docstring_summary":"Returns trailing count and unique count for all routes in the last month,\n calculated from the RouteMetricsTrailingMonth matview.","docstring_tokens":["Returns","trailing","count","and","unique","count","for","all","routes","in","the","last","month","calculated","from","the","RouteMetricsTrailingMonth","matview","."],"function":"def get_monthly_trailing_route_metrics():\n \"\"\"\n Returns trailing count and unique count for all routes in the last month,\n calculated from the RouteMetricsTrailingMonth matview.\n\n Returns:\n { count, unique_count }\n \"\"\"\n db = db_session.get_db_read_replica()\n with db.scoped_session() as session:\n metrics = session.query(RouteMetricsTrailingMonth).all()\n return {\"count\": metrics[0].count, \"unique_count\": metrics[0].unique_count}","function_tokens":["def","get_monthly_trailing_route_metrics","(",")",":","db","=","db_session",".","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","metrics","=","session",".","query","(","RouteMetricsTrailingMonth",")",".","all","(",")","return","{","\"count\"",":","metrics","[","0","]",".","count",",","\"unique_count\"",":","metrics","[","0","]",".","unique_count","}"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_trailing_metrics.py#L65-L76"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_trailing_metrics.py","language":"python","identifier":"get_trailing_app_metrics","parameters":"(args)","argument_list":"","return_statement":"","docstring":"Returns trailing app_name metrics for a given time period.\n\n Args:\n args: dict The parsed args from the request\n args.limit: number The max number of apps to return\n args.time_range: one of \"week\", \"month\", \"all_time\"\n Returns:\n [{ name: string, count: number }, ...]","docstring_summary":"Returns trailing app_name metrics for a given time period.","docstring_tokens":["Returns","trailing","app_name","metrics","for","a","given","time","period","."],"function":"def get_trailing_app_metrics(args):\n \"\"\"\n Returns trailing app_name metrics for a given time period.\n\n Args:\n args: dict The parsed args from the request\n args.limit: number The max number of apps to return\n args.time_range: one of \"week\", \"month\", \"all_time\"\n Returns:\n [{ name: string, count: number }, ...]\n \"\"\"\n db = db_session.get_db_read_replica()\n with db.scoped_session() as session:\n return _get_trailing_app_metrics(session, args)","function_tokens":["def","get_trailing_app_metrics","(","args",")",":","db","=","db_session",".","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","return","_get_trailing_app_metrics","(","session",",","args",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_trailing_metrics.py#L79-L92"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_alembic_version.py","language":"python","identifier":"get_alembic_version","parameters":"()","argument_list":"","return_statement":"","docstring":"Fetches the alembic version at head from the database","docstring_summary":"Fetches the alembic version at head from the database","docstring_tokens":["Fetches","the","alembic","version","at","head","from","the","database"],"function":"def get_alembic_version() -> AlembicVersion:\n \"\"\"\n Fetches the alembic version at head from the database\n \"\"\"\n db = db_session.get_db_read_replica()\n with db.scoped_session() as session:\n version = session.execute(\n sqlalchemy.text(\n \"\"\"\n SELECT * FROM \"alembic_version\";\n \"\"\"\n )\n ).first()\n return cast(AlembicVersion, dict(version))","function_tokens":["def","get_alembic_version","(",")","->","AlembicVersion",":","db","=","db_session",".","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","version","=","session",".","execute","(","sqlalchemy",".","text","(","\"\"\"\n SELECT * FROM \"alembic_version\";\n \"\"\"",")",")",".","first","(",")","return","cast","(","AlembicVersion",",","dict","(","version",")",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_alembic_version.py#L11-L24"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/query_helpers.py","language":"python","identifier":"get_track_remix_metadata","parameters":"(session, tracks, current_user_id)","argument_list":"","return_statement":"return remixes","docstring":"Fetches tracks' remix parent owners and if they have saved\/reposted the tracks\n\n Args:\n session: (DB) The scoped db session for running db queries\n tracks: (List) The tracks table objects to fetch remix parent user's information for\n current_user_id?: (int) Requesting user's id for adding additional metadata to the fetched users\n\n Returns:\n remixes: (dict) Mapping of child track ids to parent track ids to parent track user's metadata\n {\n [childTrackId] : {\n [parentTrackId]: {\n has_remix_author_saved: boolean,\n has_remix_author_reposted: boolean,\n user: populated user metadata\n }\n }\n }","docstring_summary":"Fetches tracks' remix parent owners and if they have saved\/reposted the tracks","docstring_tokens":["Fetches","tracks","remix","parent","owners","and","if","they","have","saved","\/","reposted","the","tracks"],"function":"def get_track_remix_metadata(session, tracks, current_user_id):\n \"\"\"\n Fetches tracks' remix parent owners and if they have saved\/reposted the tracks\n\n Args:\n session: (DB) The scoped db session for running db queries\n tracks: (List) The tracks table objects to fetch remix parent user's information for\n current_user_id?: (int) Requesting user's id for adding additional metadata to the fetched users\n\n Returns:\n remixes: (dict) Mapping of child track ids to parent track ids to parent track user's metadata\n {\n [childTrackId] : {\n [parentTrackId]: {\n has_remix_author_saved: boolean,\n has_remix_author_reposted: boolean,\n user: populated user metadata\n }\n }\n }\n \"\"\"\n track_ids_with_remix = []\n remix_query = []\n for track in tracks:\n if response_name_constants.remix_of in track:\n track_ids_with_remix.append(track[\"track_id\"])\n\n if track_ids_with_remix:\n # Fetch the remix parent track's user and if that user has saved\/favorited the child track\n remix_query = (\n session.query(\n Track.owner_id.label(\"track_owner_id\"),\n Remix.parent_track_id.label(\"parent_track_id\"),\n Remix.child_track_id.label(\"child_track_id\"),\n Save.is_current.label(\"has_remix_author_saved\"),\n Repost.is_current.label(\"has_remix_author_reposted\"),\n User,\n )\n .join(\n Remix,\n and_(\n Remix.parent_track_id == Track.track_id,\n Remix.child_track_id.in_(track_ids_with_remix),\n ),\n )\n .join(User, and_(User.user_id == Track.owner_id, User.is_current == True))\n .outerjoin(\n Save,\n and_(\n Save.save_item_id == Remix.child_track_id,\n Save.save_type == SaveType.track,\n Save.is_current == True,\n Save.is_delete == False,\n Save.user_id == Track.owner_id,\n ),\n )\n .outerjoin(\n Repost,\n and_(\n Repost.repost_item_id == Remix.child_track_id,\n Repost.user_id == Track.owner_id,\n Repost.repost_type == RepostType.track,\n Repost.is_current == True,\n Repost.is_delete == False,\n ),\n )\n .filter(Track.is_current == True, Track.is_unlisted == False)\n .all()\n )\n\n remixes = {}\n remix_parent_owners = {}\n populated_users = {}\n\n # Build a dict of user id -> user model obj of the remixed track's parent owner to dedupe users\n for remix_relationship in remix_query:\n [track_owner_id, _, _, _, _, user] = remix_relationship\n if track_owner_id not in remix_parent_owners:\n remix_parent_owners[track_owner_id] = user\n\n # populate the user's metadata for the remixed track's parent owner\n # build `populated_users` as a map of userId -> json user\n if remix_parent_owners:\n [remix_parent_owner_ids, remix_parent_owners] = list(\n zip(\n *[\n [k, remix_parent_owner]\n for k, remix_parent_owner in remix_parent_owners.items()\n ]\n )\n )\n remix_parent_owners = helpers.query_result_to_list(list(remix_parent_owners))\n populated_remix_parent_users = populate_user_metadata(\n session, list(remix_parent_owner_ids), remix_parent_owners, current_user_id\n )\n for user in populated_remix_parent_users:\n populated_users[user[\"user_id\"]] = user\n\n # Build a dict of child track id => parent track id => { user, has_remix_author_saved, has_remix_author_reposted }\n for remix_relationship in remix_query:\n [\n track_owner_id,\n parent_track_id,\n child_track_id,\n has_remix_author_saved,\n has_remix_author_reposted,\n _,\n ] = remix_relationship\n if child_track_id not in remixes:\n remixes[child_track_id] = {\n parent_track_id: {\n response_name_constants.has_remix_author_saved: bool(\n has_remix_author_saved\n ),\n response_name_constants.has_remix_author_reposted: bool(\n has_remix_author_reposted\n ),\n \"user\": populated_users[track_owner_id],\n }\n }\n else:\n remixes[child_track_id][parent_track_id] = {\n response_name_constants.has_remix_author_saved: bool(\n has_remix_author_saved\n ),\n response_name_constants.has_remix_author_reposted: bool(\n has_remix_author_reposted\n ),\n \"user\": populated_users[track_owner_id],\n }\n\n return remixes","function_tokens":["def","get_track_remix_metadata","(","session",",","tracks",",","current_user_id",")",":","track_ids_with_remix","=","[","]","remix_query","=","[","]","for","track","in","tracks",":","if","response_name_constants",".","remix_of","in","track",":","track_ids_with_remix",".","append","(","track","[","\"track_id\"","]",")","if","track_ids_with_remix",":","# Fetch the remix parent track's user and if that user has saved\/favorited the child track","remix_query","=","(","session",".","query","(","Track",".","owner_id",".","label","(","\"track_owner_id\"",")",",","Remix",".","parent_track_id",".","label","(","\"parent_track_id\"",")",",","Remix",".","child_track_id",".","label","(","\"child_track_id\"",")",",","Save",".","is_current",".","label","(","\"has_remix_author_saved\"",")",",","Repost",".","is_current",".","label","(","\"has_remix_author_reposted\"",")",",","User",",",")",".","join","(","Remix",",","and_","(","Remix",".","parent_track_id","==","Track",".","track_id",",","Remix",".","child_track_id",".","in_","(","track_ids_with_remix",")",",",")",",",")",".","join","(","User",",","and_","(","User",".","user_id","==","Track",".","owner_id",",","User",".","is_current","==","True",")",")",".","outerjoin","(","Save",",","and_","(","Save",".","save_item_id","==","Remix",".","child_track_id",",","Save",".","save_type","==","SaveType",".","track",",","Save",".","is_current","==","True",",","Save",".","is_delete","==","False",",","Save",".","user_id","==","Track",".","owner_id",",",")",",",")",".","outerjoin","(","Repost",",","and_","(","Repost",".","repost_item_id","==","Remix",".","child_track_id",",","Repost",".","user_id","==","Track",".","owner_id",",","Repost",".","repost_type","==","RepostType",".","track",",","Repost",".","is_current","==","True",",","Repost",".","is_delete","==","False",",",")",",",")",".","filter","(","Track",".","is_current","==","True",",","Track",".","is_unlisted","==","False",")",".","all","(",")",")","remixes","=","{","}","remix_parent_owners","=","{","}","populated_users","=","{","}","# Build a dict of user id -> user model obj of the remixed track's parent owner to dedupe users","for","remix_relationship","in","remix_query",":","[","track_owner_id",",","_",",","_",",","_",",","_",",","user","]","=","remix_relationship","if","track_owner_id","not","in","remix_parent_owners",":","remix_parent_owners","[","track_owner_id","]","=","user","# populate the user's metadata for the remixed track's parent owner","# build `populated_users` as a map of userId -> json user","if","remix_parent_owners",":","[","remix_parent_owner_ids",",","remix_parent_owners","]","=","list","(","zip","(","*","[","[","k",",","remix_parent_owner","]","for","k",",","remix_parent_owner","in","remix_parent_owners",".","items","(",")","]",")",")","remix_parent_owners","=","helpers",".","query_result_to_list","(","list","(","remix_parent_owners",")",")","populated_remix_parent_users","=","populate_user_metadata","(","session",",","list","(","remix_parent_owner_ids",")",",","remix_parent_owners",",","current_user_id",")","for","user","in","populated_remix_parent_users",":","populated_users","[","user","[","\"user_id\"","]","]","=","user","# Build a dict of child track id => parent track id => { user, has_remix_author_saved, has_remix_author_reposted }","for","remix_relationship","in","remix_query",":","[","track_owner_id",",","parent_track_id",",","child_track_id",",","has_remix_author_saved",",","has_remix_author_reposted",",","_",",","]","=","remix_relationship","if","child_track_id","not","in","remixes",":","remixes","[","child_track_id","]","=","{","parent_track_id",":","{","response_name_constants",".","has_remix_author_saved",":","bool","(","has_remix_author_saved",")",",","response_name_constants",".","has_remix_author_reposted",":","bool","(","has_remix_author_reposted",")",",","\"user\"",":","populated_users","[","track_owner_id","]",",","}","}","else",":","remixes","[","child_track_id","]","[","parent_track_id","]","=","{","response_name_constants",".","has_remix_author_saved",":","bool","(","has_remix_author_saved",")",",","response_name_constants",".","has_remix_author_reposted",":","bool","(","has_remix_author_reposted",")",",","\"user\"",":","populated_users","[","track_owner_id","]",",","}","return","remixes"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/query_helpers.py#L426-L557"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/query_helpers.py","language":"python","identifier":"get_karma","parameters":"(\n session: Session,\n ids: Tuple[int],\n strategy: TrendingVersion,\n time: str = None,\n is_playlist: bool = False,\n xf: bool = False,\n)","argument_list":"","return_statement":"return query.all()","docstring":"Gets the total karma for provided ids (track or playlist)","docstring_summary":"Gets the total karma for provided ids (track or playlist)","docstring_tokens":["Gets","the","total","karma","for","provided","ids","(","track","or","playlist",")"],"function":"def get_karma(\n session: Session,\n ids: Tuple[int],\n strategy: TrendingVersion,\n time: str = None,\n is_playlist: bool = False,\n xf: bool = False,\n):\n \"\"\"Gets the total karma for provided ids (track or playlist)\"\"\"\n\n repost_type = RepostType.playlist if is_playlist else RepostType.track\n save_type = SaveType.playlist if is_playlist else SaveType.track\n\n reposters = session.query(\n Repost.user_id.label(\"user_id\"), Repost.repost_item_id.label(\"item_id\")\n ).filter(\n Repost.repost_item_id.in_(ids),\n Repost.is_delete == False,\n Repost.is_current == True,\n Repost.repost_type == repost_type,\n )\n\n savers = session.query(\n Save.user_id.label(\"user_id\"), Save.save_item_id.label(\"item_id\")\n ).filter(\n Save.save_item_id.in_(ids),\n Save.is_current == True,\n Save.is_delete == False,\n Save.save_type == save_type,\n )\n if time is not None:\n interval = f\"NOW() - interval '1 {time}'\"\n savers = savers.filter(Save.created_at >= text(interval))\n reposters = reposters.filter(Repost.created_at >= text(interval))\n\n saves_and_reposts = reposters.union_all(savers).subquery()\n if xf:\n saves_and_reposts = (\n session.query(\n saves_and_reposts.c.user_id.label(\"user_id\"),\n saves_and_reposts.c.item_id.label(\"item_id\"),\n )\n .select_from(saves_and_reposts)\n .join(User, saves_and_reposts.c.user_id == User.user_id)\n )\n saves_and_reposts = saves_and_reposts.filter(\n or_(User.cover_photo != None, User.cover_photo_sizes != None),\n or_(User.profile_picture != None, User.profile_picture_sizes != None),\n User.bio != None,\n )\n saves_and_reposts = saves_and_reposts.subquery()\n\n query = (\n session.query(\n saves_and_reposts.c.item_id,\n cast(func.sum(AggregateUser.follower_count), Integer),\n )\n .select_from(saves_and_reposts)\n .join(AggregateUser, saves_and_reposts.c.user_id == AggregateUser.user_id)\n .group_by(saves_and_reposts.c.item_id)\n )\n\n return query.all()","function_tokens":["def","get_karma","(","session",":","Session",",","ids",":","Tuple","[","int","]",",","strategy",":","TrendingVersion",",","time",":","str","=","None",",","is_playlist",":","bool","=","False",",","xf",":","bool","=","False",",",")",":","repost_type","=","RepostType",".","playlist","if","is_playlist","else","RepostType",".","track","save_type","=","SaveType",".","playlist","if","is_playlist","else","SaveType",".","track","reposters","=","session",".","query","(","Repost",".","user_id",".","label","(","\"user_id\"",")",",","Repost",".","repost_item_id",".","label","(","\"item_id\"",")",")",".","filter","(","Repost",".","repost_item_id",".","in_","(","ids",")",",","Repost",".","is_delete","==","False",",","Repost",".","is_current","==","True",",","Repost",".","repost_type","==","repost_type",",",")","savers","=","session",".","query","(","Save",".","user_id",".","label","(","\"user_id\"",")",",","Save",".","save_item_id",".","label","(","\"item_id\"",")",")",".","filter","(","Save",".","save_item_id",".","in_","(","ids",")",",","Save",".","is_current","==","True",",","Save",".","is_delete","==","False",",","Save",".","save_type","==","save_type",",",")","if","time","is","not","None",":","interval","=","f\"NOW() - interval '1 {time}'\"","savers","=","savers",".","filter","(","Save",".","created_at",">=","text","(","interval",")",")","reposters","=","reposters",".","filter","(","Repost",".","created_at",">=","text","(","interval",")",")","saves_and_reposts","=","reposters",".","union_all","(","savers",")",".","subquery","(",")","if","xf",":","saves_and_reposts","=","(","session",".","query","(","saves_and_reposts",".","c",".","user_id",".","label","(","\"user_id\"",")",",","saves_and_reposts",".","c",".","item_id",".","label","(","\"item_id\"",")",",",")",".","select_from","(","saves_and_reposts",")",".","join","(","User",",","saves_and_reposts",".","c",".","user_id","==","User",".","user_id",")",")","saves_and_reposts","=","saves_and_reposts",".","filter","(","or_","(","User",".","cover_photo","!=","None",",","User",".","cover_photo_sizes","!=","None",")",",","or_","(","User",".","profile_picture","!=","None",",","User",".","profile_picture_sizes","!=","None",")",",","User",".","bio","!=","None",",",")","saves_and_reposts","=","saves_and_reposts",".","subquery","(",")","query","=","(","session",".","query","(","saves_and_reposts",".","c",".","item_id",",","cast","(","func",".","sum","(","AggregateUser",".","follower_count",")",",","Integer",")",",",")",".","select_from","(","saves_and_reposts",")",".","join","(","AggregateUser",",","saves_and_reposts",".","c",".","user_id","==","AggregateUser",".","user_id",")",".","group_by","(","saves_and_reposts",".","c",".","item_id",")",")","return","query",".","all","(",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/query_helpers.py#L793-L855"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/query_helpers.py","language":"python","identifier":"get_track_play_counts","parameters":"(db, track_ids)","argument_list":"","return_statement":"return track_listen_counts","docstring":"Gets the track play counts for the given track_ids\n Args:\n db: sqlalchemy db session instance\n track_ids: list of track ids\n\n Returns:\n dict of track id keys to track play count values","docstring_summary":"Gets the track play counts for the given track_ids\n Args:\n db: sqlalchemy db session instance\n track_ids: list of track ids","docstring_tokens":["Gets","the","track","play","counts","for","the","given","track_ids","Args",":","db",":","sqlalchemy","db","session","instance","track_ids",":","list","of","track","ids"],"function":"def get_track_play_counts(db, track_ids):\n \"\"\"Gets the track play counts for the given track_ids\n Args:\n db: sqlalchemy db session instance\n track_ids: list of track ids\n\n Returns:\n dict of track id keys to track play count values\n \"\"\"\n\n track_listen_counts = {}\n\n if not track_ids:\n return track_listen_counts\n\n track_plays = (\n db.query(AggregatePlays)\n .filter(AggregatePlays.play_item_id.in_(track_ids))\n .all()\n )\n\n for track_play in track_plays:\n track_listen_counts[track_play.play_item_id] = track_play.count\n\n for track_id in track_ids:\n if track_id not in track_listen_counts:\n track_listen_counts[track_id] = 0\n\n return track_listen_counts","function_tokens":["def","get_track_play_counts","(","db",",","track_ids",")",":","track_listen_counts","=","{","}","if","not","track_ids",":","return","track_listen_counts","track_plays","=","(","db",".","query","(","AggregatePlays",")",".","filter","(","AggregatePlays",".","play_item_id",".","in_","(","track_ids",")",")",".","all","(",")",")","for","track_play","in","track_plays",":","track_listen_counts","[","track_play",".","play_item_id","]","=","track_play",".","count","for","track_id","in","track_ids",":","if","track_id","not","in","track_listen_counts",":","track_listen_counts","[","track_id","]","=","0","return","track_listen_counts"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/query_helpers.py#L945-L973"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/query_helpers.py","language":"python","identifier":"get_sum_aggregate_plays","parameters":"(db)","argument_list":"","return_statement":"return int(plays)","docstring":"Gets the sum of all aggregate plays\n Args:\n db: sqlalchemy db session instance\n\n Returns:\n int of total play count","docstring_summary":"Gets the sum of all aggregate plays\n Args:\n db: sqlalchemy db session instance","docstring_tokens":["Gets","the","sum","of","all","aggregate","plays","Args",":","db",":","sqlalchemy","db","session","instance"],"function":"def get_sum_aggregate_plays(db):\n \"\"\"Gets the sum of all aggregate plays\n Args:\n db: sqlalchemy db session instance\n\n Returns:\n int of total play count\n \"\"\"\n\n plays = db.query(func.sum(AggregatePlays.count)).scalar()\n\n return int(plays)","function_tokens":["def","get_sum_aggregate_plays","(","db",")",":","plays","=","db",".","query","(","func",".","sum","(","AggregatePlays",".","count",")",")",".","scalar","(",")","return","int","(","plays",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/query_helpers.py#L976-L987"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/query_helpers.py","language":"python","identifier":"create_save_repost_count_subquery","parameters":"(session, type)","argument_list":"","return_statement":"return subquery","docstring":"Creates a subquery for `type` that represents a combined save + repost count.\n\n For example, to get the tracks with the largest combined save and repost count, use:\n subquery = create_save_repost_count_subquery(session, 'track')\n session\n .query(tracks)\n .join(subquery, tracks.track_id = subquery.c.id)\n .order_by(desc(subquery.c.count))\n\n Args:\n session: SQLAlchemy session.\n type: (string) The type of save\/repost (album, playlist, track)\n\n Returns: A subquery with two fields `id` and `count`.","docstring_summary":"Creates a subquery for `type` that represents a combined save + repost count.","docstring_tokens":["Creates","a","subquery","for","type","that","represents","a","combined","save","+","repost","count","."],"function":"def create_save_repost_count_subquery(session, type):\n \"\"\"\n Creates a subquery for `type` that represents a combined save + repost count.\n\n For example, to get the tracks with the largest combined save and repost count, use:\n subquery = create_save_repost_count_subquery(session, 'track')\n session\n .query(tracks)\n .join(subquery, tracks.track_id = subquery.c.id)\n .order_by(desc(subquery.c.count))\n\n Args:\n session: SQLAlchemy session.\n type: (string) The type of save\/repost (album, playlist, track)\n\n Returns: A subquery with two fields `id` and `count`.\n \"\"\"\n # Get reposts by item id\n reposts_count_subquery = (\n session.query(\n Repost.repost_item_id,\n )\n .filter(\n Repost.is_current == True,\n Repost.is_delete == False,\n Repost.repost_type == type,\n )\n .subquery()\n )\n\n # Query saves joined against reposts grouped by id and calculate\n # a combined count for each\n subquery = (\n session.query(\n Save.save_item_id.label(\"id\"),\n (\n func.count(Save.save_item_id)\n + func.count(reposts_count_subquery.c.repost_item_id)\n ).label(\"count\"),\n )\n # Join against reposts filtering to matching ids.\n # Inner-join drops no-match ids.\n .join(\n reposts_count_subquery,\n Save.save_item_id == reposts_count_subquery.c.repost_item_id,\n )\n .filter(\n Save.is_current == True, Save.is_delete == False, Save.save_type == type\n )\n .group_by(Save.save_item_id)\n .subquery()\n )\n return subquery","function_tokens":["def","create_save_repost_count_subquery","(","session",",","type",")",":","# Get reposts by item id","reposts_count_subquery","=","(","session",".","query","(","Repost",".","repost_item_id",",",")",".","filter","(","Repost",".","is_current","==","True",",","Repost",".","is_delete","==","False",",","Repost",".","repost_type","==","type",",",")",".","subquery","(",")",")","# Query saves joined against reposts grouped by id and calculate","# a combined count for each","subquery","=","(","session",".","query","(","Save",".","save_item_id",".","label","(","\"id\"",")",",","(","func",".","count","(","Save",".","save_item_id",")","+","func",".","count","(","reposts_count_subquery",".","c",".","repost_item_id",")",")",".","label","(","\"count\"",")",",",")","# Join against reposts filtering to matching ids.","# Inner-join drops no-match ids.",".","join","(","reposts_count_subquery",",","Save",".","save_item_id","==","reposts_count_subquery",".","c",".","repost_item_id",",",")",".","filter","(","Save",".","is_current","==","True",",","Save",".","is_delete","==","False",",","Save",".","save_type","==","type",")",".","group_by","(","Save",".","save_item_id",")",".","subquery","(",")",")","return","subquery"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/query_helpers.py#L1055-L1107"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/query_helpers.py","language":"python","identifier":"create_save_count_subquery","parameters":"(session, type)","argument_list":"","return_statement":"return subquery","docstring":"Creates a subquery for `type` that represents the save count.\n\n For example, to get the tracks with the largest save count:\n subquery = create_save_count_subquery(session, 'track')\n session\n .query(tracks)\n .join(subquery, tracks.track_id = subquery.c.id)\n .order_by(desc(subquery.c.save_count))\n\n Args:\n session: SQLAlchemy session.\n type: (string) The type of save\/repost (album, playlist, track)\n\n Returns: A subquery with two fields `id` and `save_count`.","docstring_summary":"Creates a subquery for `type` that represents the save count.","docstring_tokens":["Creates","a","subquery","for","type","that","represents","the","save","count","."],"function":"def create_save_count_subquery(session, type):\n \"\"\"\n Creates a subquery for `type` that represents the save count.\n\n For example, to get the tracks with the largest save count:\n subquery = create_save_count_subquery(session, 'track')\n session\n .query(tracks)\n .join(subquery, tracks.track_id = subquery.c.id)\n .order_by(desc(subquery.c.save_count))\n\n Args:\n session: SQLAlchemy session.\n type: (string) The type of save\/repost (album, playlist, track)\n\n Returns: A subquery with two fields `id` and `save_count`.\n \"\"\"\n subquery = (\n session.query(\n Save.save_item_id.label(\"id\"),\n func.count(Save.save_item_id).label(response_name_constants.save_count),\n )\n .filter(\n Save.is_current == True, Save.is_delete == False, Save.save_type == type\n )\n .group_by(Save.save_item_id)\n .subquery()\n )\n return subquery","function_tokens":["def","create_save_count_subquery","(","session",",","type",")",":","subquery","=","(","session",".","query","(","Save",".","save_item_id",".","label","(","\"id\"",")",",","func",".","count","(","Save",".","save_item_id",")",".","label","(","response_name_constants",".","save_count",")",",",")",".","filter","(","Save",".","is_current","==","True",",","Save",".","is_delete","==","False",",","Save",".","save_type","==","type",")",".","group_by","(","Save",".","save_item_id",")",".","subquery","(",")",")","return","subquery"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/query_helpers.py#L1110-L1138"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/query_helpers.py","language":"python","identifier":"create_repost_count_subquery","parameters":"(session, type)","argument_list":"","return_statement":"return subquery","docstring":"Creates a subquery for `type` that represents the repost count.\n\n For example, to get the tracks with the largest repost count:\n subquery = create_repost_count_subquery(session, 'track')\n session\n .query(tracks)\n .join(subquery, tracks.track_id = subquery.c.id)\n .order_by(desc(subquery.c.repost_count))\n\n Args:\n session: SQLAlchemy session.\n type: (string) The type of save\/repost (album, playlist, track)\n\n Returns: A subquery with two fields `id` and `repost_count`.","docstring_summary":"Creates a subquery for `type` that represents the repost count.","docstring_tokens":["Creates","a","subquery","for","type","that","represents","the","repost","count","."],"function":"def create_repost_count_subquery(session, type):\n \"\"\"\n Creates a subquery for `type` that represents the repost count.\n\n For example, to get the tracks with the largest repost count:\n subquery = create_repost_count_subquery(session, 'track')\n session\n .query(tracks)\n .join(subquery, tracks.track_id = subquery.c.id)\n .order_by(desc(subquery.c.repost_count))\n\n Args:\n session: SQLAlchemy session.\n type: (string) The type of save\/repost (album, playlist, track)\n\n Returns: A subquery with two fields `id` and `repost_count`.\n \"\"\"\n subquery = (\n session.query(\n Repost.repost_item_id.label(\"id\"),\n func.count(Repost.repost_item_id).label(\n response_name_constants.repost_count\n ),\n )\n .filter(\n Repost.is_current == True,\n Repost.is_delete == False,\n Repost.repost_type == type,\n )\n .group_by(Repost.repost_item_id)\n .subquery()\n )\n return subquery","function_tokens":["def","create_repost_count_subquery","(","session",",","type",")",":","subquery","=","(","session",".","query","(","Repost",".","repost_item_id",".","label","(","\"id\"",")",",","func",".","count","(","Repost",".","repost_item_id",")",".","label","(","response_name_constants",".","repost_count",")",",",")",".","filter","(","Repost",".","is_current","==","True",",","Repost",".","is_delete","==","False",",","Repost",".","repost_type","==","type",",",")",".","group_by","(","Repost",".","repost_item_id",")",".","subquery","(",")",")","return","subquery"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/query_helpers.py#L1141-L1173"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/query_helpers.py","language":"python","identifier":"create_followee_playlists_subquery","parameters":"(session, current_user_id)","argument_list":"","return_statement":"return followee_playlists_subquery","docstring":"Creates a subquery that returns playlists created by users that\n `current_user_id` follows.\n\n Args:\n session: SQLAlchemy session.\n current_user_id: The current user id to query against","docstring_summary":"Creates a subquery that returns playlists created by users that\n `current_user_id` follows.","docstring_tokens":["Creates","a","subquery","that","returns","playlists","created","by","users","that","current_user_id","follows","."],"function":"def create_followee_playlists_subquery(session, current_user_id):\n \"\"\"\n Creates a subquery that returns playlists created by users that\n `current_user_id` follows.\n\n Args:\n session: SQLAlchemy session.\n current_user_id: The current user id to query against\n \"\"\"\n # Get active followees\n followee_user_ids_subquery = (\n session.query(Follow.followee_user_id)\n .filter(\n Follow.follower_user_id == current_user_id,\n Follow.is_current == True,\n Follow.is_delete == False,\n )\n .subquery()\n )\n followee_playlists_subquery = (\n session.query(Playlist)\n .select_from(Playlist)\n .join(\n followee_user_ids_subquery,\n Playlist.playlist_owner_id == followee_user_ids_subquery.c.followee_user_id,\n )\n .subquery()\n )\n return followee_playlists_subquery","function_tokens":["def","create_followee_playlists_subquery","(","session",",","current_user_id",")",":","# Get active followees","followee_user_ids_subquery","=","(","session",".","query","(","Follow",".","followee_user_id",")",".","filter","(","Follow",".","follower_user_id","==","current_user_id",",","Follow",".","is_current","==","True",",","Follow",".","is_delete","==","False",",",")",".","subquery","(",")",")","followee_playlists_subquery","=","(","session",".","query","(","Playlist",")",".","select_from","(","Playlist",")",".","join","(","followee_user_ids_subquery",",","Playlist",".","playlist_owner_id","==","followee_user_ids_subquery",".","c",".","followee_user_id",",",")",".","subquery","(",")",")","return","followee_playlists_subquery"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/query_helpers.py#L1176-L1204"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/query_helpers.py","language":"python","identifier":"seconds_ago","parameters":"(timestamp)","argument_list":"","return_statement":"return func.extract(\"epoch\", (func.now() - timestamp))","docstring":"Gets the number of seconds ago `timestamp` was from now as a SqlAlchemy expression.","docstring_summary":"Gets the number of seconds ago `timestamp` was from now as a SqlAlchemy expression.","docstring_tokens":["Gets","the","number","of","seconds","ago","timestamp","was","from","now","as","a","SqlAlchemy","expression","."],"function":"def seconds_ago(timestamp):\n \"\"\"Gets the number of seconds ago `timestamp` was from now as a SqlAlchemy expression.\"\"\"\n return func.extract(\"epoch\", (func.now() - timestamp))","function_tokens":["def","seconds_ago","(","timestamp",")",":","return","func",".","extract","(","\"epoch\"",",","(","func",".","now","(",")","-","timestamp",")",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/query_helpers.py#L1207-L1209"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/query_helpers.py","language":"python","identifier":"decayed_score","parameters":"(score, created_at, peak=1000, nominal_timestamp=60 * 24 * 60 * 60)","argument_list":"","return_statement":"return score * decay_value","docstring":"Creates a decaying (over time) version of the provided `score`. The returned\n value is score * a multiplier determined by `peak` and `nominal_timestamp`.\n\n Args:\n score: (number) The base score to modify\n created_at: (timestamp) The timestamp the score is attributed to\n peak?: (number) The peak multipler possible\n\n Returns:\n A SQLAlchemy expression representing decayed score (score * multipler)\n where multipler is represented by:\n peak ^ 1 - min(time_ago \/ nominal_timestamp, 1)","docstring_summary":"Creates a decaying (over time) version of the provided `score`. The returned\n value is score * a multiplier determined by `peak` and `nominal_timestamp`.","docstring_tokens":["Creates","a","decaying","(","over","time",")","version","of","the","provided","score",".","The","returned","value","is","score","*","a","multiplier","determined","by","peak","and","nominal_timestamp","."],"function":"def decayed_score(score, created_at, peak=1000, nominal_timestamp=60 * 24 * 60 * 60):\n \"\"\"\n Creates a decaying (over time) version of the provided `score`. The returned\n value is score * a multiplier determined by `peak` and `nominal_timestamp`.\n\n Args:\n score: (number) The base score to modify\n created_at: (timestamp) The timestamp the score is attributed to\n peak?: (number) The peak multipler possible\n\n Returns:\n A SQLAlchemy expression representing decayed score (score * multipler)\n where multipler is represented by:\n peak ^ 1 - min(time_ago \/ nominal_timestamp, 1)\n \"\"\"\n decay_exponent = 1 - func.least(\n seconds_ago(created_at) \/ nominal_timestamp, 1\n ) # goes from 1 -> 0\n decay_value = func.pow(peak, decay_exponent) \/ peak # decay slope value\n return score * decay_value","function_tokens":["def","decayed_score","(","score",",","created_at",",","peak","=","1000",",","nominal_timestamp","=","60","*","24","*","60","*","60",")",":","decay_exponent","=","1","-","func",".","least","(","seconds_ago","(","created_at",")","\/","nominal_timestamp",",","1",")","# goes from 1 -> 0","decay_value","=","func",".","pow","(","peak",",","decay_exponent",")","\/","peak","# decay slope value","return","score","*","decay_value"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/query_helpers.py#L1212-L1231"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/query_helpers.py","language":"python","identifier":"filter_to_playlist_mood","parameters":"(session, mood, query, correlation)","argument_list":"","return_statement":"return query.filter(mood_exists_query.exists())","docstring":"Takes a session that is querying for playlists and filters the playlists\n to only those with the dominant mood provided.\n Dominant mood means that *most* of its tracks are of the specified mood.\n\n This method takes a query inserts a filter clause on it and returns the same query.\n We filter down those playlists to dominant mood by running an \"exists\" clause\n on a dominant mood subquery.\n\n Args:\n session: SQLALchemy session.\n mood: (string) The mood to query against.\n query: The base query to filter on\n correlation: An optional correlation \/ subquery to correlate against.\n\n Returns: A modified version of `query` with an extra filter clause.","docstring_summary":"Takes a session that is querying for playlists and filters the playlists\n to only those with the dominant mood provided.\n Dominant mood means that *most* of its tracks are of the specified mood.","docstring_tokens":["Takes","a","session","that","is","querying","for","playlists","and","filters","the","playlists","to","only","those","with","the","dominant","mood","provided",".","Dominant","mood","means","that","*","most","*","of","its","tracks","are","of","the","specified","mood","."],"function":"def filter_to_playlist_mood(session, mood, query, correlation):\n \"\"\"\n Takes a session that is querying for playlists and filters the playlists\n to only those with the dominant mood provided.\n Dominant mood means that *most* of its tracks are of the specified mood.\n\n This method takes a query inserts a filter clause on it and returns the same query.\n We filter down those playlists to dominant mood by running an \"exists\" clause\n on a dominant mood subquery.\n\n Args:\n session: SQLALchemy session.\n mood: (string) The mood to query against.\n query: The base query to filter on\n correlation: An optional correlation \/ subquery to correlate against.\n\n Returns: A modified version of `query` with an extra filter clause.\n \"\"\"\n if not mood:\n return query\n\n tracks_subquery = session.query(\n func.jsonb_array_elements(correlation.c.playlist_contents[\"track_ids\"])\n .op(\"->>\")(\"track\")\n .cast(Integer)\n )\n\n if correlation is not None:\n # If this query runs against a nested subquery, it might need to\n # be manually correlated to that subquery so it doesn't pull in all\n # playlists here.\n tracks_subquery = tracks_subquery.correlate(correlation)\n\n # Query for the most common mood in a playlist\n dominant_mood_subquery = (\n session.query(\n Track.mood.label(\"mood\"),\n func.max(Track.track_id).label(\"latest\"),\n func.count(Track.mood).label(\"cnt\"),\n )\n .filter(\n Track.is_current == True,\n Track.is_delete == False,\n Track.track_id.in_(tracks_subquery),\n )\n .group_by(Track.mood)\n .order_by(desc(\"cnt\"), desc(\"latest\"))\n .limit(1)\n .subquery()\n )\n\n # Match the provided mood against the dominant mood for playlists\n mood_exists_query = session.query(dominant_mood_subquery.c.mood).filter(\n func.lower(dominant_mood_subquery.c.mood) == func.lower(mood)\n )\n\n # Filter playlist query to those that have the most common mood checking that\n # there `exists` such a playlist with the dominant mood\n return query.filter(mood_exists_query.exists())","function_tokens":["def","filter_to_playlist_mood","(","session",",","mood",",","query",",","correlation",")",":","if","not","mood",":","return","query","tracks_subquery","=","session",".","query","(","func",".","jsonb_array_elements","(","correlation",".","c",".","playlist_contents","[","\"track_ids\"","]",")",".","op","(","\"->>\"",")","(","\"track\"",")",".","cast","(","Integer",")",")","if","correlation","is","not","None",":","# If this query runs against a nested subquery, it might need to","# be manually correlated to that subquery so it doesn't pull in all","# playlists here.","tracks_subquery","=","tracks_subquery",".","correlate","(","correlation",")","# Query for the most common mood in a playlist","dominant_mood_subquery","=","(","session",".","query","(","Track",".","mood",".","label","(","\"mood\"",")",",","func",".","max","(","Track",".","track_id",")",".","label","(","\"latest\"",")",",","func",".","count","(","Track",".","mood",")",".","label","(","\"cnt\"",")",",",")",".","filter","(","Track",".","is_current","==","True",",","Track",".","is_delete","==","False",",","Track",".","track_id",".","in_","(","tracks_subquery",")",",",")",".","group_by","(","Track",".","mood",")",".","order_by","(","desc","(","\"cnt\"",")",",","desc","(","\"latest\"",")",")",".","limit","(","1",")",".","subquery","(",")",")","# Match the provided mood against the dominant mood for playlists","mood_exists_query","=","session",".","query","(","dominant_mood_subquery",".","c",".","mood",")",".","filter","(","func",".","lower","(","dominant_mood_subquery",".","c",".","mood",")","==","func",".","lower","(","mood",")",")","# Filter playlist query to those that have the most common mood checking that","# there `exists` such a playlist with the dominant mood","return","query",".","filter","(","mood_exists_query",".","exists","(",")",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/query_helpers.py#L1234-L1292"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/query_helpers.py","language":"python","identifier":"add_users_to_tracks","parameters":"(session, tracks, current_user_id=None)","argument_list":"","return_statement":"","docstring":"Fetches the owners for the tracks and adds them to the track dict under the key 'user'\n\n Args:\n session: (DB) sqlalchemy scoped db session\n tracks: (Array) Array of tracks dict\n\n Side Effects:\n Modifies the track dictionaries to add a nested owner user\n\n Returns: None","docstring_summary":"Fetches the owners for the tracks and adds them to the track dict under the key 'user'","docstring_tokens":["Fetches","the","owners","for","the","tracks","and","adds","them","to","the","track","dict","under","the","key","user"],"function":"def add_users_to_tracks(session, tracks, current_user_id=None):\n \"\"\"\n Fetches the owners for the tracks and adds them to the track dict under the key 'user'\n\n Args:\n session: (DB) sqlalchemy scoped db session\n tracks: (Array) Array of tracks dict\n\n Side Effects:\n Modifies the track dictionaries to add a nested owner user\n\n Returns: None\n \"\"\"\n user_ids = get_users_ids(tracks)\n users = []\n if tracks and len(tracks) > 0 and tracks[0].get(\"user\"):\n users = list(map(lambda t: t[\"user\"][0], tracks))\n else:\n # This shouldn't happen - all tracks should come preloaded with their owners per the relationship\n users = get_unpopulated_users(session, user_ids)\n logger.warning(\"add_users_to_tracks() called but tracks have no users\")\n set_users_in_cache(users)\n # bundle peripheral info into user results\n populated_users = populate_user_metadata(session, user_ids, users, current_user_id)\n user_map = {}\n for user in populated_users:\n user_map[user[\"user_id\"]] = user\n\n for track in tracks:\n user = user_map[track[\"owner_id\"]]\n if user:\n track[\"user\"] = user","function_tokens":["def","add_users_to_tracks","(","session",",","tracks",",","current_user_id","=","None",")",":","user_ids","=","get_users_ids","(","tracks",")","users","=","[","]","if","tracks","and","len","(","tracks",")",">","0","and","tracks","[","0","]",".","get","(","\"user\"",")",":","users","=","list","(","map","(","lambda","t",":","t","[","\"user\"","]","[","0","]",",","tracks",")",")","else",":","# This shouldn't happen - all tracks should come preloaded with their owners per the relationship","users","=","get_unpopulated_users","(","session",",","user_ids",")","logger",".","warning","(","\"add_users_to_tracks() called but tracks have no users\"",")","set_users_in_cache","(","users",")","# bundle peripheral info into user results","populated_users","=","populate_user_metadata","(","session",",","user_ids",",","users",",","current_user_id",")","user_map","=","{","}","for","user","in","populated_users",":","user_map","[","user","[","\"user_id\"","]","]","=","user","for","track","in","tracks",":","user","=","user_map","[","track","[","\"owner_id\"","]","]","if","user",":","track","[","\"user\"","]","=","user"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/query_helpers.py#L1295-L1326"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_plays_metrics.py","language":"python","identifier":"get_plays_metrics","parameters":"(args: GetPlayMetricsArgs)","argument_list":"","return_statement":"","docstring":"Returns metrics for play counts\n\n Args:\n args: GetPlayMetrics the parsed args from the request\n\n Returns:\n Array of dictionaries with the play counts and timestamp","docstring_summary":"Returns metrics for play counts","docstring_tokens":["Returns","metrics","for","play","counts"],"function":"def get_plays_metrics(args: GetPlayMetricsArgs):\n \"\"\"\n Returns metrics for play counts\n\n Args:\n args: GetPlayMetrics the parsed args from the request\n\n Returns:\n Array of dictionaries with the play counts and timestamp\n \"\"\"\n db = db_session.get_db_read_replica()\n with db.scoped_session() as session:\n return _get_plays_metrics(session, args)","function_tokens":["def","get_plays_metrics","(","args",":","GetPlayMetricsArgs",")",":","db","=","db_session",".","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","return","_get_plays_metrics","(","session",",","args",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_plays_metrics.py#L24-L36"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_trending_playlists.py","language":"python","identifier":"get_scorable_playlist_data","parameters":"(session, time_range, strategy)","argument_list":"","return_statement":"return playlist_map.values()","docstring":"Gets data about playlists to be scored. Returns:\n Array<{\n \"playlist_id\": number\n \"created_at\": string\n \"owner_id\": string\n \"windowed_save_count\": number\n \"save_count\": number\n \"repost_count: number,\n \"windowed_repost_count: number\n \"listens\": number (always 1)\n }>","docstring_summary":"Gets data about playlists to be scored. Returns:\n Array<{\n \"playlist_id\": number\n \"created_at\": string\n \"owner_id\": string\n \"windowed_save_count\": number\n \"save_count\": number\n \"repost_count: number,\n \"windowed_repost_count: number\n \"listens\": number (always 1)\n }>","docstring_tokens":["Gets","data","about","playlists","to","be","scored",".","Returns",":","Array<","{","playlist_id",":","number","created_at",":","string","owner_id",":","string","windowed_save_count",":","number","save_count",":","number","repost_count",":","number","windowed_repost_count",":","number","listens",":","number","(","always","1",")","}",">"],"function":"def get_scorable_playlist_data(session, time_range, strategy):\n \"\"\"Gets data about playlists to be scored. Returns:\n Array<{\n \"playlist_id\": number\n \"created_at\": string\n \"owner_id\": string\n \"windowed_save_count\": number\n \"save_count\": number\n \"repost_count: number,\n \"windowed_repost_count: number\n \"listens\": number (always 1)\n }>\n \"\"\"\n score_params = strategy.get_score_params()\n zq = score_params[\"zq\"]\n xf = score_params[\"xf\"]\n pt = score_params[\"pt\"]\n mt = score_params[\"mt\"]\n\n delta = time_delta_map.get(time_range) or time_delta_map.get(\"week\")\n\n # Get all playlists saved within time range (windowed_save_count):\n # Queries by Playlists Joined with Saves,\n # where a given playlist was saved at least once in the past `time_delta`.\n # Limits to `TRENDING_LIMIT` and sorts by saves for later scoring.\n playlists = (\n session.query(\n Save.save_item_id,\n Playlist.created_at,\n Playlist.playlist_owner_id,\n func.count(Save.save_item_id),\n )\n .join(Playlist, Playlist.playlist_id == Save.save_item_id)\n .join(AggregateUser, AggregateUser.user_id == Playlist.playlist_owner_id)\n .filter(\n Save.is_current == True,\n Save.is_delete == False,\n Save.save_type == SaveType.playlist, # Albums are filtered out\n Save.created_at > datetime.now() - delta,\n Playlist.is_current == True,\n Playlist.is_delete == False,\n Playlist.is_private == False,\n jsonb_array_length(Playlist.playlist_contents[\"track_ids\"]) >= mt,\n AggregateUser.following_count < zq,\n )\n .group_by(Save.save_item_id, Playlist.created_at, Playlist.playlist_owner_id)\n .order_by(desc(func.count(Save.save_item_id)))\n .limit(TRENDING_LIMIT)\n ).all()\n\n # Build up a map of playlist data\n # playlist_id -> data\n # Some fields initialized at zero\n playlist_map = {\n record[0]: {\n response_name_constants.playlist_id: record[0],\n response_name_constants.created_at: record[1].isoformat(timespec=\"seconds\"),\n response_name_constants.owner_id: record[2],\n response_name_constants.windowed_save_count: record[3],\n response_name_constants.save_count: 0,\n response_name_constants.repost_count: 0,\n response_name_constants.windowed_repost_count: 0,\n response_name_constants.owner_follower_count: 0,\n \"karma\": 1,\n \"listens\": 1,\n }\n for record in playlists\n }\n\n playlist_ids = [record[0] for record in playlists]\n # map owner_id -> [playlist_id], accounting for multiple playlists with the same ID\n # used in follows\n playlist_owner_id_map = {}\n for (playlist_id, _, owner_id, _) in playlists:\n if owner_id not in playlist_owner_id_map:\n playlist_owner_id_map[owner_id] = [playlist_id]\n else:\n playlist_owner_id_map[owner_id].append(playlist_id)\n\n # Add repost counts\n repost_counts = get_repost_counts(\n session, False, False, playlist_ids, [RepostType.playlist]\n )\n for (playlist_id, repost_count) in repost_counts:\n playlist_map[playlist_id][response_name_constants.repost_count] = repost_count\n\n # Add windowed repost counts\n repost_counts_for_time = get_repost_counts(\n session, False, False, playlist_ids, [RepostType.playlist], None, time_range\n )\n for (playlist_id, repost_count) in repost_counts_for_time:\n playlist_map[playlist_id][\n response_name_constants.windowed_repost_count\n ] = repost_count\n\n # Add save counts\n save_counts = get_save_counts(\n session, False, False, playlist_ids, [SaveType.playlist]\n )\n for (playlist_id, save_count) in save_counts:\n playlist_map[playlist_id][response_name_constants.save_count] = save_count\n\n # Add follower counts\n follower_counts = (\n session.query(Follow.followee_user_id, func.count(Follow.followee_user_id))\n .filter(\n Follow.is_current == True,\n Follow.is_delete == False,\n Follow.followee_user_id.in_(list(playlist_owner_id_map.keys())),\n )\n .group_by(Follow.followee_user_id)\n .all()\n )\n for (followee_user_id, follower_count) in follower_counts:\n if follower_count >= pt:\n owned_playlist_ids = playlist_owner_id_map[followee_user_id]\n for playlist_id in owned_playlist_ids:\n playlist_map[playlist_id][\n response_name_constants.owner_follower_count\n ] = follower_count\n\n # Add karma\n karma_scores = get_karma(session, tuple(playlist_ids), strategy, None, True, xf)\n for (playlist_id, karma) in karma_scores:\n playlist_map[playlist_id][\"karma\"] = karma\n\n return playlist_map.values()","function_tokens":["def","get_scorable_playlist_data","(","session",",","time_range",",","strategy",")",":","score_params","=","strategy",".","get_score_params","(",")","zq","=","score_params","[","\"zq\"","]","xf","=","score_params","[","\"xf\"","]","pt","=","score_params","[","\"pt\"","]","mt","=","score_params","[","\"mt\"","]","delta","=","time_delta_map",".","get","(","time_range",")","or","time_delta_map",".","get","(","\"week\"",")","# Get all playlists saved within time range (windowed_save_count):","# Queries by Playlists Joined with Saves,","# where a given playlist was saved at least once in the past `time_delta`.","# Limits to `TRENDING_LIMIT` and sorts by saves for later scoring.","playlists","=","(","session",".","query","(","Save",".","save_item_id",",","Playlist",".","created_at",",","Playlist",".","playlist_owner_id",",","func",".","count","(","Save",".","save_item_id",")",",",")",".","join","(","Playlist",",","Playlist",".","playlist_id","==","Save",".","save_item_id",")",".","join","(","AggregateUser",",","AggregateUser",".","user_id","==","Playlist",".","playlist_owner_id",")",".","filter","(","Save",".","is_current","==","True",",","Save",".","is_delete","==","False",",","Save",".","save_type","==","SaveType",".","playlist",",","# Albums are filtered out","Save",".","created_at",">","datetime",".","now","(",")","-","delta",",","Playlist",".","is_current","==","True",",","Playlist",".","is_delete","==","False",",","Playlist",".","is_private","==","False",",","jsonb_array_length","(","Playlist",".","playlist_contents","[","\"track_ids\"","]",")",">=","mt",",","AggregateUser",".","following_count","<","zq",",",")",".","group_by","(","Save",".","save_item_id",",","Playlist",".","created_at",",","Playlist",".","playlist_owner_id",")",".","order_by","(","desc","(","func",".","count","(","Save",".","save_item_id",")",")",")",".","limit","(","TRENDING_LIMIT",")",")",".","all","(",")","# Build up a map of playlist data","# playlist_id -> data","# Some fields initialized at zero","playlist_map","=","{","record","[","0","]",":","{","response_name_constants",".","playlist_id",":","record","[","0","]",",","response_name_constants",".","created_at",":","record","[","1","]",".","isoformat","(","timespec","=","\"seconds\"",")",",","response_name_constants",".","owner_id",":","record","[","2","]",",","response_name_constants",".","windowed_save_count",":","record","[","3","]",",","response_name_constants",".","save_count",":","0",",","response_name_constants",".","repost_count",":","0",",","response_name_constants",".","windowed_repost_count",":","0",",","response_name_constants",".","owner_follower_count",":","0",",","\"karma\"",":","1",",","\"listens\"",":","1",",","}","for","record","in","playlists","}","playlist_ids","=","[","record","[","0","]","for","record","in","playlists","]","# map owner_id -> [playlist_id], accounting for multiple playlists with the same ID","# used in follows","playlist_owner_id_map","=","{","}","for","(","playlist_id",",","_",",","owner_id",",","_",")","in","playlists",":","if","owner_id","not","in","playlist_owner_id_map",":","playlist_owner_id_map","[","owner_id","]","=","[","playlist_id","]","else",":","playlist_owner_id_map","[","owner_id","]",".","append","(","playlist_id",")","# Add repost counts","repost_counts","=","get_repost_counts","(","session",",","False",",","False",",","playlist_ids",",","[","RepostType",".","playlist","]",")","for","(","playlist_id",",","repost_count",")","in","repost_counts",":","playlist_map","[","playlist_id","]","[","response_name_constants",".","repost_count","]","=","repost_count","# Add windowed repost counts","repost_counts_for_time","=","get_repost_counts","(","session",",","False",",","False",",","playlist_ids",",","[","RepostType",".","playlist","]",",","None",",","time_range",")","for","(","playlist_id",",","repost_count",")","in","repost_counts_for_time",":","playlist_map","[","playlist_id","]","[","response_name_constants",".","windowed_repost_count","]","=","repost_count","# Add save counts","save_counts","=","get_save_counts","(","session",",","False",",","False",",","playlist_ids",",","[","SaveType",".","playlist","]",")","for","(","playlist_id",",","save_count",")","in","save_counts",":","playlist_map","[","playlist_id","]","[","response_name_constants",".","save_count","]","=","save_count","# Add follower counts","follower_counts","=","(","session",".","query","(","Follow",".","followee_user_id",",","func",".","count","(","Follow",".","followee_user_id",")",")",".","filter","(","Follow",".","is_current","==","True",",","Follow",".","is_delete","==","False",",","Follow",".","followee_user_id",".","in_","(","list","(","playlist_owner_id_map",".","keys","(",")",")",")",",",")",".","group_by","(","Follow",".","followee_user_id",")",".","all","(",")",")","for","(","followee_user_id",",","follower_count",")","in","follower_counts",":","if","follower_count",">=","pt",":","owned_playlist_ids","=","playlist_owner_id_map","[","followee_user_id","]","for","playlist_id","in","owned_playlist_ids",":","playlist_map","[","playlist_id","]","[","response_name_constants",".","owner_follower_count","]","=","follower_count","# Add karma","karma_scores","=","get_karma","(","session",",","tuple","(","playlist_ids",")",",","strategy",",","None",",","True",",","xf",")","for","(","playlist_id",",","karma",")","in","karma_scores",":","playlist_map","[","playlist_id","]","[","\"karma\"","]","=","karma","return","playlist_map",".","values","(",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_trending_playlists.py#L62-L188"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_trending_playlists.py","language":"python","identifier":"make_get_unpopulated_playlists","parameters":"(session, time_range, strategy)","argument_list":"","return_statement":"return wrapped","docstring":"Gets scorable data, scores and sorts, then returns full unpopulated playlists.\n Returns a function, because this is used in a Redis cache hook","docstring_summary":"Gets scorable data, scores and sorts, then returns full unpopulated playlists.\n Returns a function, because this is used in a Redis cache hook","docstring_tokens":["Gets","scorable","data","scores","and","sorts","then","returns","full","unpopulated","playlists",".","Returns","a","function","because","this","is","used","in","a","Redis","cache","hook"],"function":"def make_get_unpopulated_playlists(session, time_range, strategy):\n \"\"\"Gets scorable data, scores and sorts, then returns full unpopulated playlists.\n Returns a function, because this is used in a Redis cache hook\"\"\"\n\n def wrapped():\n playlist_scoring_data = get_scorable_playlist_data(\n session, time_range, strategy\n )\n\n # score the playlists\n scored_playlists = [\n strategy.get_track_score(time_range, playlist)\n for playlist in playlist_scoring_data\n ]\n sorted_playlists = sorted(\n scored_playlists, key=lambda k: k[\"score\"], reverse=True\n )\n\n # Get the unpopulated playlist metadata\n playlist_ids = [playlist[\"playlist_id\"] for playlist in sorted_playlists]\n playlists = get_unpopulated_playlists(session, playlist_ids)\n\n playlist_tracks_map = get_playlist_tracks(session, {\"playlists\": playlists})\n\n for playlist in playlists:\n playlist[\"tracks\"] = playlist_tracks_map.get(playlist[\"playlist_id\"], [])\n\n return (playlists, playlist_ids)\n\n return wrapped","function_tokens":["def","make_get_unpopulated_playlists","(","session",",","time_range",",","strategy",")",":","def","wrapped","(",")",":","playlist_scoring_data","=","get_scorable_playlist_data","(","session",",","time_range",",","strategy",")","# score the playlists","scored_playlists","=","[","strategy",".","get_track_score","(","time_range",",","playlist",")","for","playlist","in","playlist_scoring_data","]","sorted_playlists","=","sorted","(","scored_playlists",",","key","=","lambda","k",":","k","[","\"score\"","]",",","reverse","=","True",")","# Get the unpopulated playlist metadata","playlist_ids","=","[","playlist","[","\"playlist_id\"","]","for","playlist","in","sorted_playlists","]","playlists","=","get_unpopulated_playlists","(","session",",","playlist_ids",")","playlist_tracks_map","=","get_playlist_tracks","(","session",",","{","\"playlists\"",":","playlists","}",")","for","playlist","in","playlists",":","playlist","[","\"tracks\"","]","=","playlist_tracks_map",".","get","(","playlist","[","\"playlist_id\"","]",",","[","]",")","return","(","playlists",",","playlist_ids",")","return","wrapped"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_trending_playlists.py#L191-L220"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_trending_playlists.py","language":"python","identifier":"_get_trending_playlists_with_session","parameters":"(\n session: Session, args: GetTrendingPlaylistsArgs, strategy, use_request_context=True\n)","argument_list":"","return_statement":"return sorted_playlists","docstring":"Returns Trending Playlists. Checks Redis cache for unpopulated playlists.","docstring_summary":"Returns Trending Playlists. Checks Redis cache for unpopulated playlists.","docstring_tokens":["Returns","Trending","Playlists",".","Checks","Redis","cache","for","unpopulated","playlists","."],"function":"def _get_trending_playlists_with_session(\n session: Session, args: GetTrendingPlaylistsArgs, strategy, use_request_context=True\n):\n \"\"\"Returns Trending Playlists. Checks Redis cache for unpopulated playlists.\"\"\"\n current_user_id = args.get(\"current_user_id\", None)\n with_tracks = args.get(\"with_tracks\", False)\n time = args.get(\"time\")\n limit, offset = args.get(\"limit\"), args.get(\"offset\")\n key = make_trending_cache_key(time, strategy.version)\n\n # Get unpopulated playlists,\n # cached if it exists.\n (playlists, playlist_ids) = use_redis_cache(\n key, None, make_get_unpopulated_playlists(session, time, strategy)\n )\n\n # Apply limit + offset early to reduce the amount of\n # population work we have to do\n if limit is not None and offset is not None:\n playlists = playlists[offset : limit + offset]\n playlist_ids = playlist_ids[offset : limit + offset]\n\n # Populate playlist metadata\n playlists = populate_playlist_metadata(\n session,\n playlist_ids,\n playlists,\n [RepostType.playlist, RepostType.album],\n [SaveType.playlist, SaveType.album],\n current_user_id,\n )\n\n for playlist in playlists:\n playlist[\"track_count\"] = len(playlist[\"tracks\"])\n playlist[\"tracks\"] = playlist[\"tracks\"][:PLAYLIST_TRACKS_LIMIT]\n # Trim track_ids, which ultimately become added_timestamps\n # and need to match the tracks.\n trimmed_track_ids = {track[\"track_id\"] for track in playlist[\"tracks\"]}\n playlist_track_ids = playlist[\"playlist_contents\"][\"track_ids\"]\n playlist_track_ids = list(\n filter(\n lambda track_id: track_id[\"track\"]\n in trimmed_track_ids, # pylint: disable=W0640\n playlist_track_ids,\n )\n )\n playlist[\"playlist_contents\"][\"track_ids\"] = playlist_track_ids\n\n playlists_map = {playlist[\"playlist_id\"]: playlist for playlist in playlists}\n\n if with_tracks:\n # populate track metadata\n tracks = []\n for playlist in playlists:\n playlist_tracks = playlist[\"tracks\"]\n tracks.extend(playlist_tracks)\n track_ids = [track[\"track_id\"] for track in tracks]\n populated_tracks = populate_track_metadata(\n session, track_ids, tracks, current_user_id\n )\n\n # Add users if necessary\n add_users_to_tracks(session, populated_tracks, current_user_id)\n\n # Re-associate tracks with playlists\n # track_id -> populated_track\n populated_track_map = {track[\"track_id\"]: track for track in populated_tracks}\n for playlist in playlists_map.values():\n for i in range(len(playlist[\"tracks\"])):\n track_id = playlist[\"tracks\"][i][\"track_id\"]\n populated = populated_track_map[track_id]\n playlist[\"tracks\"][i] = populated\n playlist[\"tracks\"] = list(map(extend_track, playlist[\"tracks\"]))\n\n # re-sort playlists to original order, because populate_playlist_metadata\n # unsorts.\n sorted_playlists = [playlists_map[playlist_id] for playlist_id in playlist_ids]\n\n # Add users to playlists\n user_id_list = get_users_ids(sorted_playlists)\n users = get_users_by_id(session, user_id_list, current_user_id, use_request_context)\n for playlist in sorted_playlists:\n user = users[playlist[\"playlist_owner_id\"]]\n if user:\n playlist[\"user\"] = user\n\n # Extend the playlists\n playlists = list(map(extend_playlist, playlists))\n return sorted_playlists","function_tokens":["def","_get_trending_playlists_with_session","(","session",":","Session",",","args",":","GetTrendingPlaylistsArgs",",","strategy",",","use_request_context","=","True",")",":","current_user_id","=","args",".","get","(","\"current_user_id\"",",","None",")","with_tracks","=","args",".","get","(","\"with_tracks\"",",","False",")","time","=","args",".","get","(","\"time\"",")","limit",",","offset","=","args",".","get","(","\"limit\"",")",",","args",".","get","(","\"offset\"",")","key","=","make_trending_cache_key","(","time",",","strategy",".","version",")","# Get unpopulated playlists,","# cached if it exists.","(","playlists",",","playlist_ids",")","=","use_redis_cache","(","key",",","None",",","make_get_unpopulated_playlists","(","session",",","time",",","strategy",")",")","# Apply limit + offset early to reduce the amount of","# population work we have to do","if","limit","is","not","None","and","offset","is","not","None",":","playlists","=","playlists","[","offset",":","limit","+","offset","]","playlist_ids","=","playlist_ids","[","offset",":","limit","+","offset","]","# Populate playlist metadata","playlists","=","populate_playlist_metadata","(","session",",","playlist_ids",",","playlists",",","[","RepostType",".","playlist",",","RepostType",".","album","]",",","[","SaveType",".","playlist",",","SaveType",".","album","]",",","current_user_id",",",")","for","playlist","in","playlists",":","playlist","[","\"track_count\"","]","=","len","(","playlist","[","\"tracks\"","]",")","playlist","[","\"tracks\"","]","=","playlist","[","\"tracks\"","]","[",":","PLAYLIST_TRACKS_LIMIT","]","# Trim track_ids, which ultimately become added_timestamps","# and need to match the tracks.","trimmed_track_ids","=","{","track","[","\"track_id\"","]","for","track","in","playlist","[","\"tracks\"","]","}","playlist_track_ids","=","playlist","[","\"playlist_contents\"","]","[","\"track_ids\"","]","playlist_track_ids","=","list","(","filter","(","lambda","track_id",":","track_id","[","\"track\"","]","in","trimmed_track_ids",",","# pylint: disable=W0640","playlist_track_ids",",",")",")","playlist","[","\"playlist_contents\"","]","[","\"track_ids\"","]","=","playlist_track_ids","playlists_map","=","{","playlist","[","\"playlist_id\"","]",":","playlist","for","playlist","in","playlists","}","if","with_tracks",":","# populate track metadata","tracks","=","[","]","for","playlist","in","playlists",":","playlist_tracks","=","playlist","[","\"tracks\"","]","tracks",".","extend","(","playlist_tracks",")","track_ids","=","[","track","[","\"track_id\"","]","for","track","in","tracks","]","populated_tracks","=","populate_track_metadata","(","session",",","track_ids",",","tracks",",","current_user_id",")","# Add users if necessary","add_users_to_tracks","(","session",",","populated_tracks",",","current_user_id",")","# Re-associate tracks with playlists","# track_id -> populated_track","populated_track_map","=","{","track","[","\"track_id\"","]",":","track","for","track","in","populated_tracks","}","for","playlist","in","playlists_map",".","values","(",")",":","for","i","in","range","(","len","(","playlist","[","\"tracks\"","]",")",")",":","track_id","=","playlist","[","\"tracks\"","]","[","i","]","[","\"track_id\"","]","populated","=","populated_track_map","[","track_id","]","playlist","[","\"tracks\"","]","[","i","]","=","populated","playlist","[","\"tracks\"","]","=","list","(","map","(","extend_track",",","playlist","[","\"tracks\"","]",")",")","# re-sort playlists to original order, because populate_playlist_metadata","# unsorts.","sorted_playlists","=","[","playlists_map","[","playlist_id","]","for","playlist_id","in","playlist_ids","]","# Add users to playlists","user_id_list","=","get_users_ids","(","sorted_playlists",")","users","=","get_users_by_id","(","session",",","user_id_list",",","current_user_id",",","use_request_context",")","for","playlist","in","sorted_playlists",":","user","=","users","[","playlist","[","\"playlist_owner_id\"","]","]","if","user",":","playlist","[","\"user\"","]","=","user","# Extend the playlists","playlists","=","list","(","map","(","extend_playlist",",","playlists",")",")","return","sorted_playlists"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_trending_playlists.py#L242-L330"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_trending_playlists.py","language":"python","identifier":"get_trending_playlists","parameters":"(args: GetTrendingPlaylistsArgs, strategy)","argument_list":"","return_statement":"","docstring":"Returns Trending Playlists. Checks Redis cache for unpopulated playlists.","docstring_summary":"Returns Trending Playlists. Checks Redis cache for unpopulated playlists.","docstring_tokens":["Returns","Trending","Playlists",".","Checks","Redis","cache","for","unpopulated","playlists","."],"function":"def get_trending_playlists(args: GetTrendingPlaylistsArgs, strategy):\n \"\"\"Returns Trending Playlists. Checks Redis cache for unpopulated playlists.\"\"\"\n db = get_db_read_replica()\n with db.scoped_session() as session:\n return _get_trending_playlists_with_session(session, args, strategy)","function_tokens":["def","get_trending_playlists","(","args",":","GetTrendingPlaylistsArgs",",","strategy",")",":","db","=","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","return","_get_trending_playlists_with_session","(","session",",","args",",","strategy",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_trending_playlists.py#L333-L337"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_genre_metrics.py","language":"python","identifier":"get_genre_metrics","parameters":"(args)","argument_list":"","return_statement":"","docstring":"Returns metrics for track genres over the provided bucket\n\n Args:\n args: dict The parsed args from the request\n args.offset: number The offset to start querying from\n args.limit: number The max number of queries to return\n args.start_time: date The start of the query\n\n Returns:\n Array of dictionaries with the play counts and timestamp","docstring_summary":"Returns metrics for track genres over the provided bucket","docstring_tokens":["Returns","metrics","for","track","genres","over","the","provided","bucket"],"function":"def get_genre_metrics(args):\n \"\"\"\n Returns metrics for track genres over the provided bucket\n\n Args:\n args: dict The parsed args from the request\n args.offset: number The offset to start querying from\n args.limit: number The max number of queries to return\n args.start_time: date The start of the query\n\n Returns:\n Array of dictionaries with the play counts and timestamp\n \"\"\"\n db = db_session.get_db_read_replica()\n with db.scoped_session() as session:\n return _get_genre_metrics(session, args)","function_tokens":["def","get_genre_metrics","(","args",")",":","db","=","db_session",".","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","return","_get_genre_metrics","(","session",",","args",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_genre_metrics.py#L10-L25"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_top_user_track_tags.py","language":"python","identifier":"get_top_user_track_tags","parameters":"(args)","argument_list":"","return_statement":"","docstring":"Gets the most used tags for tracks owned by the query user\n\n Args:\n args: dict The parsed args from the request\n args.limit: number optional The max number of tags to return\n args.user_id: number The user id used to query for tracks\n\n Returns:\n Array of strings ordered by most used tag in track","docstring_summary":"Gets the most used tags for tracks owned by the query user","docstring_tokens":["Gets","the","most","used","tags","for","tracks","owned","by","the","query","user"],"function":"def get_top_user_track_tags(args):\n \"\"\"\n Gets the most used tags for tracks owned by the query user\n\n Args:\n args: dict The parsed args from the request\n args.limit: number optional The max number of tags to return\n args.user_id: number The user id used to query for tracks\n\n Returns:\n Array of strings ordered by most used tag in track\n \"\"\"\n db = get_db_read_replica()\n with db.scoped_session() as session:\n return _get_top_user_track_tags(session, args)","function_tokens":["def","get_top_user_track_tags","(","args",")",":","db","=","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","return","_get_top_user_track_tags","(","session",",","args",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_top_user_track_tags.py#L10-L24"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_app_name_metrics.py","language":"python","identifier":"get_historical_app_metrics","parameters":"()","argument_list":"","return_statement":"","docstring":"Returns daily metrics for the last thirty days and all time monthly metrics\n\n Returns:\n {\n daily: {\n 2021\/01\/15: {app1: ..., app2: ...}\n ...\n },\n monthly: {\n 2021\/01\/01: {app1: ..., app2: ...}\n ...\n }\n }","docstring_summary":"Returns daily metrics for the last thirty days and all time monthly metrics","docstring_tokens":["Returns","daily","metrics","for","the","last","thirty","days","and","all","time","monthly","metrics"],"function":"def get_historical_app_metrics():\n \"\"\"\n Returns daily metrics for the last thirty days and all time monthly metrics\n\n Returns:\n {\n daily: {\n 2021\/01\/15: {app1: ..., app2: ...}\n ...\n },\n monthly: {\n 2021\/01\/01: {app1: ..., app2: ...}\n ...\n }\n }\n \"\"\"\n\n db = db_session.get_db_read_replica()\n with db.scoped_session() as session:\n return _get_historical_app_metrics(session)","function_tokens":["def","get_historical_app_metrics","(",")",":","db","=","db_session",".","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","return","_get_historical_app_metrics","(","session",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_app_name_metrics.py#L17-L36"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_app_name_metrics.py","language":"python","identifier":"_get_historical_app_metrics","parameters":"(session, min_count=100)","argument_list":"","return_statement":"return {\"daily\": daily_metrics, \"monthly\": monthly_metrics}","docstring":"gets historical app metrics monthly and daily request counts.\n\n Args:\n session: Database session\n min_count: Minimum count an app must have in order to be returned","docstring_summary":"gets historical app metrics monthly and daily request counts.","docstring_tokens":["gets","historical","app","metrics","monthly","and","daily","request","counts","."],"function":"def _get_historical_app_metrics(session, min_count=100):\n \"\"\"\n gets historical app metrics monthly and daily request counts.\n\n Args:\n session: Database session\n min_count: Minimum count an app must have in order to be returned\n \"\"\"\n today = date.today()\n thirty_days_ago = today - timedelta(days=30)\n first_day_of_month = today.replace(day=1)\n\n daily_query = (\n session.query(\n AggregateDailyAppNameMetrics.timestamp,\n AggregateDailyAppNameMetrics.application_name,\n AggregateDailyAppNameMetrics.count,\n )\n .filter(min_count <= AggregateDailyAppNameMetrics.count)\n .filter(thirty_days_ago <= AggregateDailyAppNameMetrics.timestamp)\n .filter(AggregateDailyAppNameMetrics.timestamp < today)\n .all()\n )\n daily_metrics = {}\n for attribute in daily_query:\n day = str(attribute[0])\n if day not in daily_metrics:\n daily_metrics[day] = {attribute[1]: attribute[2]}\n else:\n daily_metrics[day][attribute[1]] = attribute[2]\n\n monthly_query = (\n session.query(\n AggregateMonthlyAppNameMetrics.timestamp,\n AggregateMonthlyAppNameMetrics.application_name,\n AggregateMonthlyAppNameMetrics.count,\n )\n .filter(min_count <= AggregateMonthlyAppNameMetrics.count)\n .filter(AggregateMonthlyAppNameMetrics.timestamp < first_day_of_month)\n .all()\n )\n monthly_metrics = {}\n for attribute in monthly_query:\n month = str(attribute[0])\n if month not in monthly_metrics:\n monthly_metrics[month] = {attribute[1]: attribute[2]}\n else:\n monthly_metrics[month][attribute[1]] = attribute[2]\n\n return {\"daily\": daily_metrics, \"monthly\": monthly_metrics}","function_tokens":["def","_get_historical_app_metrics","(","session",",","min_count","=","100",")",":","today","=","date",".","today","(",")","thirty_days_ago","=","today","-","timedelta","(","days","=","30",")","first_day_of_month","=","today",".","replace","(","day","=","1",")","daily_query","=","(","session",".","query","(","AggregateDailyAppNameMetrics",".","timestamp",",","AggregateDailyAppNameMetrics",".","application_name",",","AggregateDailyAppNameMetrics",".","count",",",")",".","filter","(","min_count","<=","AggregateDailyAppNameMetrics",".","count",")",".","filter","(","thirty_days_ago","<=","AggregateDailyAppNameMetrics",".","timestamp",")",".","filter","(","AggregateDailyAppNameMetrics",".","timestamp","<","today",")",".","all","(",")",")","daily_metrics","=","{","}","for","attribute","in","daily_query",":","day","=","str","(","attribute","[","0","]",")","if","day","not","in","daily_metrics",":","daily_metrics","[","day","]","=","{","attribute","[","1","]",":","attribute","[","2","]","}","else",":","daily_metrics","[","day","]","[","attribute","[","1","]","]","=","attribute","[","2","]","monthly_query","=","(","session",".","query","(","AggregateMonthlyAppNameMetrics",".","timestamp",",","AggregateMonthlyAppNameMetrics",".","application_name",",","AggregateMonthlyAppNameMetrics",".","count",",",")",".","filter","(","min_count","<=","AggregateMonthlyAppNameMetrics",".","count",")",".","filter","(","AggregateMonthlyAppNameMetrics",".","timestamp","<","first_day_of_month",")",".","all","(",")",")","monthly_metrics","=","{","}","for","attribute","in","monthly_query",":","month","=","str","(","attribute","[","0","]",")","if","month","not","in","monthly_metrics",":","monthly_metrics","[","month","]","=","{","attribute","[","1","]",":","attribute","[","2","]","}","else",":","monthly_metrics","[","month","]","[","attribute","[","1","]","]","=","attribute","[","2","]","return","{","\"daily\"",":","daily_metrics",",","\"monthly\"",":","monthly_metrics","}"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_app_name_metrics.py#L39-L88"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_app_name_metrics.py","language":"python","identifier":"get_aggregate_app_metrics","parameters":"(time_range, limit)","argument_list":"","return_statement":"","docstring":"Returns app name metrics for a given time range\n\n Args:\n time_range: one of \"week\", \"month\", \"all_time\"\n limit: number The max number of apps to return\n Returns:\n [{ name: string, count: number }, ...]","docstring_summary":"Returns app name metrics for a given time range","docstring_tokens":["Returns","app","name","metrics","for","a","given","time","range"],"function":"def get_aggregate_app_metrics(time_range, limit):\n \"\"\"\n Returns app name metrics for a given time range\n\n Args:\n time_range: one of \"week\", \"month\", \"all_time\"\n limit: number The max number of apps to return\n Returns:\n [{ name: string, count: number }, ...]\n \"\"\"\n db = db_session.get_db_read_replica()\n with db.scoped_session() as session:\n return _get_aggregate_app_metrics(session, time_range, limit)","function_tokens":["def","get_aggregate_app_metrics","(","time_range",",","limit",")",":","db","=","db_session",".","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","return","_get_aggregate_app_metrics","(","session",",","time_range",",","limit",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_app_name_metrics.py#L91-L103"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_app_name_metrics.py","language":"python","identifier":"get_app_name_metrics","parameters":"(app_name, args)","argument_list":"","return_statement":"","docstring":"Returns the usage metrics for a specified app_name\n\n Args:\n app_name: string The name of the app to query for metrics\n args: dict The parsed args from the request\n args.start_time: date The date to start the query from\n args.limit: number The max number of metrics to return\n args.bucket_size: string A date_trunc operation to aggregate timestamps by\n\n Returns:\n Array of dictionaries with the timestamp, count, and unique_count","docstring_summary":"Returns the usage metrics for a specified app_name","docstring_tokens":["Returns","the","usage","metrics","for","a","specified","app_name"],"function":"def get_app_name_metrics(app_name, args):\n \"\"\"\n Returns the usage metrics for a specified app_name\n\n Args:\n app_name: string The name of the app to query for metrics\n args: dict The parsed args from the request\n args.start_time: date The date to start the query from\n args.limit: number The max number of metrics to return\n args.bucket_size: string A date_trunc operation to aggregate timestamps by\n\n Returns:\n Array of dictionaries with the timestamp, count, and unique_count\n \"\"\"\n db = db_session.get_db_read_replica()\n with db.scoped_session() as session:\n return _get_app_name_metrics(session, app_name, args)","function_tokens":["def","get_app_name_metrics","(","app_name",",","args",")",":","db","=","db_session",".","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","return","_get_app_name_metrics","(","session",",","app_name",",","args",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_app_name_metrics.py#L157-L173"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_route_metrics.py","language":"python","identifier":"get_historical_route_metrics","parameters":"()","argument_list":"","return_statement":"","docstring":"Returns daily metrics for the last thirty days and all time monthly metrics\n\n Returns:\n {\n daily: {\n 2021\/01\/15: {unique_count: ..., total_count: ...}\n ...\n },\n monthly: {\n 2021\/01\/01: {unique_count: ..., total_count: ...}\n ...\n }\n }","docstring_summary":"Returns daily metrics for the last thirty days and all time monthly metrics","docstring_tokens":["Returns","daily","metrics","for","the","last","thirty","days","and","all","time","monthly","metrics"],"function":"def get_historical_route_metrics():\n \"\"\"\n Returns daily metrics for the last thirty days and all time monthly metrics\n\n Returns:\n {\n daily: {\n 2021\/01\/15: {unique_count: ..., total_count: ...}\n ...\n },\n monthly: {\n 2021\/01\/01: {unique_count: ..., total_count: ...}\n ...\n }\n }\n \"\"\"\n\n db = db_session.get_db_read_replica()\n with db.scoped_session() as session:\n return _get_historical_route_metrics(session)","function_tokens":["def","get_historical_route_metrics","(",")",":","db","=","db_session",".","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","return","_get_historical_route_metrics","(","session",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_route_metrics.py#L22-L41"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_route_metrics.py","language":"python","identifier":"get_aggregate_route_metrics","parameters":"(time_range, bucket_size)","argument_list":"","return_statement":"","docstring":"Returns a list of timestamp with unique count and total count for all routes\n based on given time range and grouped by bucket size\n\n Returns:\n [{ timestamp, unique_count, total_count }]","docstring_summary":"Returns a list of timestamp with unique count and total count for all routes\n based on given time range and grouped by bucket size","docstring_tokens":["Returns","a","list","of","timestamp","with","unique","count","and","total","count","for","all","routes","based","on","given","time","range","and","grouped","by","bucket","size"],"function":"def get_aggregate_route_metrics(time_range, bucket_size):\n \"\"\"\n Returns a list of timestamp with unique count and total count for all routes\n based on given time range and grouped by bucket size\n\n Returns:\n [{ timestamp, unique_count, total_count }]\n \"\"\"\n db = db_session.get_db_read_replica()\n with db.scoped_session() as session:\n return _get_aggregate_route_metrics(session, time_range, bucket_size)","function_tokens":["def","get_aggregate_route_metrics","(","time_range",",","bucket_size",")",":","db","=","db_session",".","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","return","_get_aggregate_route_metrics","(","session",",","time_range",",","bucket_size",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_route_metrics.py#L136-L146"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/get_route_metrics.py","language":"python","identifier":"get_route_metrics","parameters":"(args)","argument_list":"","return_statement":"","docstring":"Returns the usage metrics for routes\n\n Args:\n args: dict The parsed args from the request\n args.path: string The route path of the query\n args.start_time: date The start of the query\n args.query_string: optional string The query string to filter on\n args.limit: number The max number of responses to return\n args.bucket_size: string date_trunc operation to aggregate timestamps by\n\n Returns:\n Array of dictionaries with the route, timestamp, count, and unique_count","docstring_summary":"Returns the usage metrics for routes","docstring_tokens":["Returns","the","usage","metrics","for","routes"],"function":"def get_route_metrics(args):\n \"\"\"\n Returns the usage metrics for routes\n\n Args:\n args: dict The parsed args from the request\n args.path: string The route path of the query\n args.start_time: date The start of the query\n args.query_string: optional string The query string to filter on\n args.limit: number The max number of responses to return\n args.bucket_size: string date_trunc operation to aggregate timestamps by\n\n Returns:\n Array of dictionaries with the route, timestamp, count, and unique_count\n \"\"\"\n db = db_session.get_db_read_replica()\n with db.scoped_session() as session:\n return _get_route_metrics(session, args)","function_tokens":["def","get_route_metrics","(","args",")",":","db","=","db_session",".","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","return","_get_route_metrics","(","session",",","args",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/get_route_metrics.py#L441-L458"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/queries.py","language":"python","identifier":"to_dict","parameters":"(multi_dict)","argument_list":"","return_statement":"return {\n k: v if len(v) > 1 else v[0]\n for (k, v) in multi_dict.to_dict(flat=False).items()\n }","docstring":"Converts a multi dict into a dict where only list entries are not flat","docstring_summary":"Converts a multi dict into a dict where only list entries are not flat","docstring_tokens":["Converts","a","multi","dict","into","a","dict","where","only","list","entries","are","not","flat"],"function":"def to_dict(multi_dict):\n \"\"\"Converts a multi dict into a dict where only list entries are not flat\"\"\"\n return {\n k: v if len(v) > 1 else v[0]\n for (k, v) in multi_dict.to_dict(flat=False).items()\n }","function_tokens":["def","to_dict","(","multi_dict",")",":","return","{","k",":","v","if","len","(","v",")",">","1","else","v","[","0","]","for","(","k",",","v",")","in","multi_dict",".","to_dict","(","flat","=","False",")",".","items","(",")","}"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/queries.py#L55-L60"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/queries.py","language":"python","identifier":"parse_bool_param","parameters":"(field)","argument_list":"","return_statement":"return field.lower() == \"true\" if field else False","docstring":"Converts a url param to a boolean value","docstring_summary":"Converts a url param to a boolean value","docstring_tokens":["Converts","a","url","param","to","a","boolean","value"],"function":"def parse_bool_param(field):\n \"\"\"Converts a url param to a boolean value\"\"\"\n return field.lower() == \"true\" if field else False","function_tokens":["def","parse_bool_param","(","field",")",":","return","field",".","lower","(",")","==","\"true\"","if","field","else","False"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/queries.py#L63-L65"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/queries.py","language":"python","identifier":"parse_id_array_param","parameters":"(list)","argument_list":"","return_statement":"return [int(y) for y in list]","docstring":"Converts a list of strings ids to int","docstring_summary":"Converts a list of strings ids to int","docstring_tokens":["Converts","a","list","of","strings","ids","to","int"],"function":"def parse_id_array_param(list):\n \"\"\"Converts a list of strings ids to int\"\"\"\n return [int(y) for y in list]","function_tokens":["def","parse_id_array_param","(","list",")",":","return","[","int","(","y",")","for","y","in","list","]"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/queries.py#L68-L70"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/queries.py","language":"python","identifier":"get_top_playlists_route","parameters":"(type)","argument_list":"","return_statement":"","docstring":"An endpoint to retrieve the \"top\" of a certain demographic of playlists or albums.\n This endpoint is useful in generating views like:\n - Top playlists\n - Top Albums\n - Top playlists of a certain mood\n - Top playlists of a certain mood from people you follow\n\n Args:\n type: (string) The `type` (same as repost\/save type) to query from.\n limit?: (number) default=16, max=100\n mood?: (string) default=None\n filter?: (string) Optional filter to include (supports 'followees') default=None","docstring_summary":"An endpoint to retrieve the \"top\" of a certain demographic of playlists or albums.\n This endpoint is useful in generating views like:\n - Top playlists\n - Top Albums\n - Top playlists of a certain mood\n - Top playlists of a certain mood from people you follow","docstring_tokens":["An","endpoint","to","retrieve","the","top","of","a","certain","demographic","of","playlists","or","albums",".","This","endpoint","is","useful","in","generating","views","like",":","-","Top","playlists","-","Top","Albums","-","Top","playlists","of","a","certain","mood","-","Top","playlists","of","a","certain","mood","from","people","you","follow"],"function":"def get_top_playlists_route(type):\n \"\"\"\n An endpoint to retrieve the \"top\" of a certain demographic of playlists or albums.\n This endpoint is useful in generating views like:\n - Top playlists\n - Top Albums\n - Top playlists of a certain mood\n - Top playlists of a certain mood from people you follow\n\n Args:\n type: (string) The `type` (same as repost\/save type) to query from.\n limit?: (number) default=16, max=100\n mood?: (string) default=None\n filter?: (string) Optional filter to include (supports 'followees') default=None\n \"\"\"\n args = to_dict(request.args)\n if \"limit\" in request.args:\n args[\"limit\"] = min(request.args.get(\"limit\", type=int), 100)\n else:\n args[\"limit\"] = 16\n\n if \"mood\" in request.args:\n args[\"mood\"] = request.args.get(\"mood\")\n else:\n args[\"mood\"] = None\n if \"with_users\" in request.args:\n args[\"with_users\"] = parse_bool_param(request.args.get(\"with_users\"))\n try:\n playlists = get_top_playlists(type, args)\n return api_helpers.success_response(playlists)\n except exceptions.ArgumentError as e:\n return api_helpers.error_response(str(e), 400)","function_tokens":["def","get_top_playlists_route","(","type",")",":","args","=","to_dict","(","request",".","args",")","if","\"limit\"","in","request",".","args",":","args","[","\"limit\"","]","=","min","(","request",".","args",".","get","(","\"limit\"",",","type","=","int",")",",","100",")","else",":","args","[","\"limit\"","]","=","16","if","\"mood\"","in","request",".","args",":","args","[","\"mood\"","]","=","request",".","args",".","get","(","\"mood\"",")","else",":","args","[","\"mood\"","]","=","None","if","\"with_users\"","in","request",".","args",":","args","[","\"with_users\"","]","=","parse_bool_param","(","request",".","args",".","get","(","\"with_users\"",")",")","try",":","playlists","=","get_top_playlists","(","type",",","args",")","return","api_helpers",".","success_response","(","playlists",")","except","exceptions",".","ArgumentError","as","e",":","return","api_helpers",".","error_response","(","str","(","e",")",",","400",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/queries.py#L409-L440"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/queries.py","language":"python","identifier":"get_top_followee_windowed_route","parameters":"(type, window)","argument_list":"","return_statement":"","docstring":"Gets a windowed (over a certain timerange) view into the \"top\" of a certain type\n amongst followees. Requires an account.\n This endpoint is useful in generating views like:\n - New releases\n\n Args:\n type: (string) The `type` (same as repost\/save type) to query from. Currently only\n track is supported.\n window: (string) The window from now() to look back over. Supports all standard\n SqlAlchemy interval notation (week, month, year, etc.).\n limit?: (number) default=25, max=100","docstring_summary":"Gets a windowed (over a certain timerange) view into the \"top\" of a certain type\n amongst followees. Requires an account.\n This endpoint is useful in generating views like:\n - New releases","docstring_tokens":["Gets","a","windowed","(","over","a","certain","timerange",")","view","into","the","top","of","a","certain","type","amongst","followees",".","Requires","an","account",".","This","endpoint","is","useful","in","generating","views","like",":","-","New","releases"],"function":"def get_top_followee_windowed_route(type, window):\n \"\"\"\n Gets a windowed (over a certain timerange) view into the \"top\" of a certain type\n amongst followees. Requires an account.\n This endpoint is useful in generating views like:\n - New releases\n\n Args:\n type: (string) The `type` (same as repost\/save type) to query from. Currently only\n track is supported.\n window: (string) The window from now() to look back over. Supports all standard\n SqlAlchemy interval notation (week, month, year, etc.).\n limit?: (number) default=25, max=100\n \"\"\"\n args = to_dict(request.args)\n if \"limit\" in request.args:\n args[\"limit\"] = min(request.args.get(\"limit\", type=int), 100)\n else:\n args[\"limit\"] = 25\n if \"with_users\" in request.args:\n args[\"with_users\"] = parse_bool_param(request.args.get(\"with_users\"))\n\n try:\n tracks = get_top_followee_windowed(type, window, args)\n return api_helpers.success_response(tracks)\n except exceptions.ArgumentError as e:\n return api_helpers.error_response(str(e), 400)","function_tokens":["def","get_top_followee_windowed_route","(","type",",","window",")",":","args","=","to_dict","(","request",".","args",")","if","\"limit\"","in","request",".","args",":","args","[","\"limit\"","]","=","min","(","request",".","args",".","get","(","\"limit\"",",","type","=","int",")",",","100",")","else",":","args","[","\"limit\"","]","=","25","if","\"with_users\"","in","request",".","args",":","args","[","\"with_users\"","]","=","parse_bool_param","(","request",".","args",".","get","(","\"with_users\"",")",")","try",":","tracks","=","get_top_followee_windowed","(","type",",","window",",","args",")","return","api_helpers",".","success_response","(","tracks",")","except","exceptions",".","ArgumentError","as","e",":","return","api_helpers",".","error_response","(","str","(","e",")",",","400",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/queries.py#L445-L471"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/queries\/queries.py","language":"python","identifier":"get_top_followee_saves_route","parameters":"(type)","argument_list":"","return_statement":"","docstring":"Gets a global view into the most saved of `type` amongst followees. Requires an account.\n This endpoint is useful in generating views like:\n - Most favorited\n\n Args:\n type: (string) The `type` (same as repost\/save type) to query from. Currently only\n track is supported.\n limit?: (number) default=25, max=100","docstring_summary":"Gets a global view into the most saved of `type` amongst followees. Requires an account.\n This endpoint is useful in generating views like:\n - Most favorited","docstring_tokens":["Gets","a","global","view","into","the","most","saved","of","type","amongst","followees",".","Requires","an","account",".","This","endpoint","is","useful","in","generating","views","like",":","-","Most","favorited"],"function":"def get_top_followee_saves_route(type):\n \"\"\"\n Gets a global view into the most saved of `type` amongst followees. Requires an account.\n This endpoint is useful in generating views like:\n - Most favorited\n\n Args:\n type: (string) The `type` (same as repost\/save type) to query from. Currently only\n track is supported.\n limit?: (number) default=25, max=100\n \"\"\"\n args = to_dict(request.args)\n if \"limit\" in request.args:\n args[\"limit\"] = min(request.args.get(\"limit\", type=int), 100)\n else:\n args[\"limit\"] = 25\n if \"with_users\" in request.args:\n args[\"with_users\"] = parse_bool_param(request.args.get(\"with_users\"))\n\n try:\n tracks = get_top_followee_saves(type, args)\n return api_helpers.success_response(tracks)\n except exceptions.ArgumentError as e:\n return api_helpers.error_response(str(e), 400)","function_tokens":["def","get_top_followee_saves_route","(","type",")",":","args","=","to_dict","(","request",".","args",")","if","\"limit\"","in","request",".","args",":","args","[","\"limit\"","]","=","min","(","request",".","args",".","get","(","\"limit\"",",","type","=","int",")",",","100",")","else",":","args","[","\"limit\"","]","=","25","if","\"with_users\"","in","request",".","args",":","args","[","\"with_users\"","]","=","parse_bool_param","(","request",".","args",".","get","(","\"with_users\"",")",")","try",":","tracks","=","get_top_followee_saves","(","type",",","args",")","return","api_helpers",".","success_response","(","tracks",")","except","exceptions",".","ArgumentError","as","e",":","return","api_helpers",".","error_response","(","str","(","e",")",",","400",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/queries\/queries.py#L476-L499"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/solana\/solana_client_manager.py","language":"python","identifier":"_try_all","parameters":"(iterable, func, message, randomize=False)","argument_list":"","return_statement":"","docstring":"Executes a function with retries across the iterable.\n If all executions fail, raise an exception.","docstring_summary":"Executes a function with retries across the iterable.\n If all executions fail, raise an exception.","docstring_tokens":["Executes","a","function","with","retries","across","the","iterable",".","If","all","executions","fail","raise","an","exception","."],"function":"def _try_all(iterable, func, message, randomize=False):\n \"\"\"Executes a function with retries across the iterable.\n If all executions fail, raise an exception.\"\"\"\n items = list(enumerate(iterable))\n items = items if not randomize else random.sample(items, k=len(items))\n for index, value in items:\n try:\n return func(value, index)\n except Exception:\n logger.error(\n f\"solana_client_manager.py | _try_all | Failed attempt at index {index} for function {func}\"\n )\n if index < len(items) - 1:\n logger.info(\"solana_client_manager.py | _try_all | Retrying\")\n continue\n raise Exception(message)","function_tokens":["def","_try_all","(","iterable",",","func",",","message",",","randomize","=","False",")",":","items","=","list","(","enumerate","(","iterable",")",")","items","=","items","if","not","randomize","else","random",".","sample","(","items",",","k","=","len","(","items",")",")","for","index",",","value","in","items",":","try",":","return","func","(","value",",","index",")","except","Exception",":","logger",".","error","(","f\"solana_client_manager.py | _try_all | Failed attempt at index {index} for function {func}\"",")","if","index","<","len","(","items",")","-","1",":","logger",".","info","(","\"solana_client_manager.py | _try_all | Retrying\"",")","continue","raise","Exception","(","message",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/solana\/solana_client_manager.py#L141-L156"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/solana\/solana_client_manager.py","language":"python","identifier":"_try_all_with_timeout","parameters":"(iterable, func, message, randomize=False)","argument_list":"","return_statement":"","docstring":"Do not use this function with ThreadPoolExecutor,\n doesn't play well with futures\n\n Executes a function with retries across the iterable.\n If all executions fail, raise an exception.","docstring_summary":"Do not use this function with ThreadPoolExecutor,\n doesn't play well with futures","docstring_tokens":["Do","not","use","this","function","with","ThreadPoolExecutor","doesn","t","play","well","with","futures"],"function":"def _try_all_with_timeout(iterable, func, message, randomize=False):\n \"\"\"Do not use this function with ThreadPoolExecutor,\n doesn't play well with futures\n\n Executes a function with retries across the iterable.\n If all executions fail, raise an exception.\"\"\"\n items = list(enumerate(iterable))\n items = items if not randomize else random.sample(items, k=len(items))\n for index, value in items:\n try:\n with timeout(30):\n return func(value, index)\n except Exception:\n logger.error(\n f\"solana_client_manager.py | _try_all | Failed attempt at index {index} for function {func}\"\n )\n if index < len(items) - 1:\n logger.info(\"solana_client_manager.py | _try_all | Retrying\")\n continue\n raise Exception(message)","function_tokens":["def","_try_all_with_timeout","(","iterable",",","func",",","message",",","randomize","=","False",")",":","items","=","list","(","enumerate","(","iterable",")",")","items","=","items","if","not","randomize","else","random",".","sample","(","items",",","k","=","len","(","items",")",")","for","index",",","value","in","items",":","try",":","with","timeout","(","30",")",":","return","func","(","value",",","index",")","except","Exception",":","logger",".","error","(","f\"solana_client_manager.py | _try_all | Failed attempt at index {index} for function {func}\"",")","if","index","<","len","(","items",")","-","1",":","logger",".","info","(","\"solana_client_manager.py | _try_all | Retrying\"",")","continue","raise","Exception","(","message",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/solana\/solana_client_manager.py#L159-L178"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/solana\/solana_client_manager.py","language":"python","identifier":"SolanaClientManager.get_sol_tx_info","parameters":"(self, tx_sig: str, retries=DEFAULT_MAX_RETRIES)","argument_list":"","return_statement":"return _try_all(\n self.clients,\n handle_get_sol_tx_info,\n f\"solana_client_manager.py | get_sol_tx_info | All requests failed to fetch {tx_sig}\",\n )","docstring":"Fetches a solana transaction by signature with retries and a delay.","docstring_summary":"Fetches a solana transaction by signature with retries and a delay.","docstring_tokens":["Fetches","a","solana","transaction","by","signature","with","retries","and","a","delay","."],"function":"def get_sol_tx_info(self, tx_sig: str, retries=DEFAULT_MAX_RETRIES):\n \"\"\"Fetches a solana transaction by signature with retries and a delay.\"\"\"\n\n def handle_get_sol_tx_info(client, index):\n endpoint = self.endpoints[index]\n num_retries = retries\n while num_retries > 0:\n try:\n logger.info(\n f\"solana_client_manager.py | get_sol_tx_info | Fetching tx {tx_sig} {endpoint}\"\n )\n tx_info = client.get_confirmed_transaction(tx_sig)\n logger.info(\n f\"solana_client_manager.py | get_sol_tx_info | Finished fetching tx {tx_sig} {endpoint}\"\n )\n if tx_info[\"result\"] is not None:\n return tx_info\n except Exception as e:\n logger.error(\n f\"solana_client_manager.py | get_sol_tx_info | \\\n Error fetching tx {tx_sig} from endpoint {endpoint}, {e}\",\n exc_info=True,\n )\n num_retries -= 1\n time.sleep(DELAY_SECONDS)\n logger.error(\n f\"solana_client_manager.py | get_sol_tx_info | Retrying tx fetch: {tx_sig} with endpoint {endpoint}\"\n )\n raise Exception(\n f\"solana_client_manager.py | get_sol_tx_info | Failed to fetch {tx_sig} with endpoint {endpoint}\"\n )\n\n return _try_all(\n self.clients,\n handle_get_sol_tx_info,\n f\"solana_client_manager.py | get_sol_tx_info | All requests failed to fetch {tx_sig}\",\n )","function_tokens":["def","get_sol_tx_info","(","self",",","tx_sig",":","str",",","retries","=","DEFAULT_MAX_RETRIES",")",":","def","handle_get_sol_tx_info","(","client",",","index",")",":","endpoint","=","self",".","endpoints","[","index","]","num_retries","=","retries","while","num_retries",">","0",":","try",":","logger",".","info","(","f\"solana_client_manager.py | get_sol_tx_info | Fetching tx {tx_sig} {endpoint}\"",")","tx_info","=","client",".","get_confirmed_transaction","(","tx_sig",")","logger",".","info","(","f\"solana_client_manager.py | get_sol_tx_info | Finished fetching tx {tx_sig} {endpoint}\"",")","if","tx_info","[","\"result\"","]","is","not","None",":","return","tx_info","except","Exception","as","e",":","logger",".","error","(","f\"solana_client_manager.py | get_sol_tx_info | \\\n Error fetching tx {tx_sig} from endpoint {endpoint}, {e}\"",",","exc_info","=","True",",",")","num_retries","-=","1","time",".","sleep","(","DELAY_SECONDS",")","logger",".","error","(","f\"solana_client_manager.py | get_sol_tx_info | Retrying tx fetch: {tx_sig} with endpoint {endpoint}\"",")","raise","Exception","(","f\"solana_client_manager.py | get_sol_tx_info | Failed to fetch {tx_sig} with endpoint {endpoint}\"",")","return","_try_all","(","self",".","clients",",","handle_get_sol_tx_info",",","f\"solana_client_manager.py | get_sol_tx_info | All requests failed to fetch {tx_sig}\"",",",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/solana\/solana_client_manager.py#L36-L72"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/solana\/solana_client_manager.py","language":"python","identifier":"SolanaClientManager.get_signatures_for_address","parameters":"(\n self,\n account: Union[str, Account, PublicKey],\n before: Optional[str] = None,\n limit: Optional[int] = None,\n retries: Optional[int] = DEFAULT_MAX_RETRIES,\n )","argument_list":"","return_statement":"return _try_all_with_timeout(\n self.clients,\n handle_get_signatures_for_address,\n \"solana_client_manager.py | get_signatures_for_address | All requests failed\",\n )","docstring":"Fetches confirmed signatures for transactions given an address.","docstring_summary":"Fetches confirmed signatures for transactions given an address.","docstring_tokens":["Fetches","confirmed","signatures","for","transactions","given","an","address","."],"function":"def get_signatures_for_address(\n self,\n account: Union[str, Account, PublicKey],\n before: Optional[str] = None,\n limit: Optional[int] = None,\n retries: Optional[int] = DEFAULT_MAX_RETRIES,\n ):\n \"\"\"Fetches confirmed signatures for transactions given an address.\"\"\"\n\n def handle_get_signatures_for_address(client, index):\n endpoint = self.endpoints[index]\n num_retries = retries\n while num_retries > 0:\n try:\n logger.info(\n f\"solana_client_manager.py | handle_get_signatures_for_address | Fetching {before} {endpoint}\"\n )\n transactions: ConfirmedSignatureForAddressResponse = (\n client.get_signatures_for_address(account, before, limit)\n )\n logger.info(\n f\"solana_client_manager.py | handle_get_signatures_for_address | Finished fetching {before} {endpoint}\"\n )\n return transactions\n except Exception as e:\n logger.error(\n f\"solana_client_manager.py | handle_get_signatures_for_address | \\\n Error fetching account {account} from endpoint {endpoint}, {e}\",\n exc_info=True,\n )\n num_retries -= 1\n time.sleep(DELAY_SECONDS)\n logger.error(\n f\"solana_client_manager.py | handle_get_signatures_for_address | Retrying account fetch: {account} with endpoint {endpoint}\"\n )\n raise Exception(\n f\"solana_client_manager.py | handle_get_signatures_for_address | Failed to fetch account {account} with endpoint {endpoint}\"\n )\n\n return _try_all_with_timeout(\n self.clients,\n handle_get_signatures_for_address,\n \"solana_client_manager.py | get_signatures_for_address | All requests failed\",\n )","function_tokens":["def","get_signatures_for_address","(","self",",","account",":","Union","[","str",",","Account",",","PublicKey","]",",","before",":","Optional","[","str","]","=","None",",","limit",":","Optional","[","int","]","=","None",",","retries",":","Optional","[","int","]","=","DEFAULT_MAX_RETRIES",",",")",":","def","handle_get_signatures_for_address","(","client",",","index",")",":","endpoint","=","self",".","endpoints","[","index","]","num_retries","=","retries","while","num_retries",">","0",":","try",":","logger",".","info","(","f\"solana_client_manager.py | handle_get_signatures_for_address | Fetching {before} {endpoint}\"",")","transactions",":","ConfirmedSignatureForAddressResponse","=","(","client",".","get_signatures_for_address","(","account",",","before",",","limit",")",")","logger",".","info","(","f\"solana_client_manager.py | handle_get_signatures_for_address | Finished fetching {before} {endpoint}\"",")","return","transactions","except","Exception","as","e",":","logger",".","error","(","f\"solana_client_manager.py | handle_get_signatures_for_address | \\\n Error fetching account {account} from endpoint {endpoint}, {e}\"",",","exc_info","=","True",",",")","num_retries","-=","1","time",".","sleep","(","DELAY_SECONDS",")","logger",".","error","(","f\"solana_client_manager.py | handle_get_signatures_for_address | Retrying account fetch: {account} with endpoint {endpoint}\"",")","raise","Exception","(","f\"solana_client_manager.py | handle_get_signatures_for_address | Failed to fetch account {account} with endpoint {endpoint}\"",")","return","_try_all_with_timeout","(","self",".","clients",",","handle_get_signatures_for_address",",","\"solana_client_manager.py | get_signatures_for_address | All requests failed\"",",",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/solana\/solana_client_manager.py#L74-L117"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/solana\/solana_parser.py","language":"python","identifier":"parse_instruction_data","parameters":"(data: str, instructionFormat: List[InstructionFormat])","argument_list":"","return_statement":"return decoded_params","docstring":"Parses encoded instruction data into a dictionary based on instruction format","docstring_summary":"Parses encoded instruction data into a dictionary based on instruction format","docstring_tokens":["Parses","encoded","instruction","data","into","a","dictionary","based","on","instruction","format"],"function":"def parse_instruction_data(data: str, instructionFormat: List[InstructionFormat]):\n \"\"\"Parses encoded instruction data into a dictionary based on instruction format\"\"\"\n decoded = base58.b58decode(data)[1:]\n last_end = 0\n decoded_params: Dict = {}\n for intr in instructionFormat:\n name = intr[\"name\"]\n type = intr[\"type\"]\n\n if type == SolanaInstructionType.u64:\n type_len = solanaInstructionSpace[type]\n decoded_params[name] = int.from_bytes(\n decoded[last_end : last_end + type_len], \"little\"\n )\n last_end = last_end + type_len\n elif type == SolanaInstructionType.string:\n type_len = solanaInstructionSpace[type]\n instr_len = int.from_bytes(\n decoded[last_end : last_end + type_len], \"little\"\n )\n start, end = last_end + type_len, last_end + type_len + instr_len\n decoded_value: bytes = decoded[start:end]\n decoded_params[name] = str(decoded_value, \"utf-8\")\n last_end = end\n elif type == SolanaInstructionType.EthereumAddress:\n type_len = solanaInstructionSpace[type]\n decoded_int = int.from_bytes(decoded[last_end : last_end + type_len], \"big\")\n # Ensure stored address is of length 40 characters\n # Pads zeros if present at start of string\n # https:\/\/stackoverflow.com\/a\/12638477\n decoded_params[name] = f\"0x{decoded_int:040x}\"\n last_end = last_end + type_len\n elif type == SolanaInstructionType.UnixTimestamp:\n type_len = solanaInstructionSpace[type]\n decoded_params[name] = int.from_bytes(\n decoded[last_end : last_end + type_len], \"little\"\n )\n last_end = last_end + type_len\n\n return decoded_params","function_tokens":["def","parse_instruction_data","(","data",":","str",",","instructionFormat",":","List","[","InstructionFormat","]",")",":","decoded","=","base58",".","b58decode","(","data",")","[","1",":","]","last_end","=","0","decoded_params",":","Dict","=","{","}","for","intr","in","instructionFormat",":","name","=","intr","[","\"name\"","]","type","=","intr","[","\"type\"","]","if","type","==","SolanaInstructionType",".","u64",":","type_len","=","solanaInstructionSpace","[","type","]","decoded_params","[","name","]","=","int",".","from_bytes","(","decoded","[","last_end",":","last_end","+","type_len","]",",","\"little\"",")","last_end","=","last_end","+","type_len","elif","type","==","SolanaInstructionType",".","string",":","type_len","=","solanaInstructionSpace","[","type","]","instr_len","=","int",".","from_bytes","(","decoded","[","last_end",":","last_end","+","type_len","]",",","\"little\"",")","start",",","end","=","last_end","+","type_len",",","last_end","+","type_len","+","instr_len","decoded_value",":","bytes","=","decoded","[","start",":","end","]","decoded_params","[","name","]","=","str","(","decoded_value",",","\"utf-8\"",")","last_end","=","end","elif","type","==","SolanaInstructionType",".","EthereumAddress",":","type_len","=","solanaInstructionSpace","[","type","]","decoded_int","=","int",".","from_bytes","(","decoded","[","last_end",":","last_end","+","type_len","]",",","\"big\"",")","# Ensure stored address is of length 40 characters","# Pads zeros if present at start of string","# https:\/\/stackoverflow.com\/a\/12638477","decoded_params","[","name","]","=","f\"0x{decoded_int:040x}\"","last_end","=","last_end","+","type_len","elif","type","==","SolanaInstructionType",".","UnixTimestamp",":","type_len","=","solanaInstructionSpace","[","type","]","decoded_params","[","name","]","=","int",".","from_bytes","(","decoded","[","last_end",":","last_end","+","type_len","]",",","\"little\"",")","last_end","=","last_end","+","type_len","return","decoded_params"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/solana\/solana_parser.py#L30-L69"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/ipfs_lib.py","language":"python","identifier":"construct_image_dir_gateway_url","parameters":"(address, CID)","argument_list":"","return_statement":"return urljoin(address, f\"\/ipfs\/{CID}\/original.jpg\")","docstring":"Construct the gateway url for an image directory.\n\n Args:\n args.address - base url of gateway\n args.CID - CID of the image directory","docstring_summary":"Construct the gateway url for an image directory.","docstring_tokens":["Construct","the","gateway","url","for","an","image","directory","."],"function":"def construct_image_dir_gateway_url(address, CID):\n \"\"\"Construct the gateway url for an image directory.\n\n Args:\n args.address - base url of gateway\n args.CID - CID of the image directory\n \"\"\"\n if not address:\n return None\n\n return urljoin(address, f\"\/ipfs\/{CID}\/original.jpg\")","function_tokens":["def","construct_image_dir_gateway_url","(","address",",","CID",")",":","if","not","address",":","return","None","return","urljoin","(","address",",","f\"\/ipfs\/{CID}\/original.jpg\"",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/ipfs_lib.py#L298-L308"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/ipfs_lib.py","language":"python","identifier":"IPFSClient.get_metadata","parameters":"(self, multihash, default_metadata_fields, user_replica_set=None)","argument_list":"","return_statement":"return api_metadata","docstring":"Retrieve file from IPFS or gateway, validating metadata requirements prior to\n returning an object with no missing entries","docstring_summary":"Retrieve file from IPFS or gateway, validating metadata requirements prior to\n returning an object with no missing entries","docstring_tokens":["Retrieve","file","from","IPFS","or","gateway","validating","metadata","requirements","prior","to","returning","an","object","with","no","missing","entries"],"function":"def get_metadata(self, multihash, default_metadata_fields, user_replica_set=None):\n \"\"\"Retrieve file from IPFS or gateway, validating metadata requirements prior to\n returning an object with no missing entries\n \"\"\"\n logger.warning(f\"IPFSCLIENT | get_metadata - {multihash}\")\n api_metadata = default_metadata_fields\n retrieved_from_gateway = False\n retrieved_from_ipfs_node = False\n start_time = time.time()\n\n with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:\n metadata_futures = {}\n metadata_futures[\n executor.submit(\n self.get_metadata_from_ipfs_node, multihash, default_metadata_fields\n )\n ] = \"metadata_from_ipfs_node\"\n metadata_futures[\n executor.submit(\n self.get_metadata_from_gateway,\n multihash,\n default_metadata_fields,\n user_replica_set,\n )\n ] = \"metadata_from_gateway\"\n for get_metadata_future in concurrent.futures.as_completed(\n metadata_futures, timeout=NEW_BLOCK_TIMEOUT_SECONDS\n ):\n metadata_fetch_source = metadata_futures[get_metadata_future]\n try:\n api_metadata = get_metadata_future.result()\n retrieved = api_metadata != default_metadata_fields\n if retrieved:\n logger.info(\n f\"IPFSCLIENT | retrieved metadata successfully, \\\n {api_metadata}, \\\n source: {metadata_fetch_source}\"\n )\n if metadata_fetch_source == \"metadata_from_gateway\":\n retrieved_from_gateway = True\n else:\n retrieved_from_ipfs_node = True\n self.force_clear_queue_and_stop_task_execution(executor)\n break # use first returned result\n except Exception as e:\n logger.error(\n f\"IPFSCLIENT | ipfs_lib.py | \\\n ERROR in metadata_futures parallel processing \\\n generated {e}, multihash: {multihash}, source: {metadata_fetch_source}\",\n exc_info=True,\n )\n\n retrieved_metadata = retrieved_from_gateway or retrieved_from_ipfs_node\n # Raise error if metadata is not retrieved.\n # Ensure default values are not written into database.\n if not retrieved_metadata:\n logger.error(\n f\"IPFSCLIENT | Retrieved metadata: {retrieved_metadata}. \"\n f\"retrieved from gateway : {retrieved_from_gateway}, \"\n f\"retrieved from local node : {retrieved_from_ipfs_node}\"\n )\n logger.error(api_metadata)\n logger.error(default_metadata_fields)\n raise Exception(\n f\"IPFSCLIENT | Failed to retrieve metadata. Using default values for {multihash}\"\n )\n\n duration = time.time() - start_time\n logger.info(\n f\"IPFSCLIENT | get_metadata ${multihash} {duration} seconds \\\n | from ipfs:{retrieved_from_ipfs_node} |from gateway:{retrieved_from_gateway}\"\n )\n\n return api_metadata","function_tokens":["def","get_metadata","(","self",",","multihash",",","default_metadata_fields",",","user_replica_set","=","None",")",":","logger",".","warning","(","f\"IPFSCLIENT | get_metadata - {multihash}\"",")","api_metadata","=","default_metadata_fields","retrieved_from_gateway","=","False","retrieved_from_ipfs_node","=","False","start_time","=","time",".","time","(",")","with","concurrent",".","futures",".","ThreadPoolExecutor","(","max_workers","=","2",")","as","executor",":","metadata_futures","=","{","}","metadata_futures","[","executor",".","submit","(","self",".","get_metadata_from_ipfs_node",",","multihash",",","default_metadata_fields",")","]","=","\"metadata_from_ipfs_node\"","metadata_futures","[","executor",".","submit","(","self",".","get_metadata_from_gateway",",","multihash",",","default_metadata_fields",",","user_replica_set",",",")","]","=","\"metadata_from_gateway\"","for","get_metadata_future","in","concurrent",".","futures",".","as_completed","(","metadata_futures",",","timeout","=","NEW_BLOCK_TIMEOUT_SECONDS",")",":","metadata_fetch_source","=","metadata_futures","[","get_metadata_future","]","try",":","api_metadata","=","get_metadata_future",".","result","(",")","retrieved","=","api_metadata","!=","default_metadata_fields","if","retrieved",":","logger",".","info","(","f\"IPFSCLIENT | retrieved metadata successfully, \\\n {api_metadata}, \\\n source: {metadata_fetch_source}\"",")","if","metadata_fetch_source","==","\"metadata_from_gateway\"",":","retrieved_from_gateway","=","True","else",":","retrieved_from_ipfs_node","=","True","self",".","force_clear_queue_and_stop_task_execution","(","executor",")","break","# use first returned result","except","Exception","as","e",":","logger",".","error","(","f\"IPFSCLIENT | ipfs_lib.py | \\\n ERROR in metadata_futures parallel processing \\\n generated {e}, multihash: {multihash}, source: {metadata_fetch_source}\"",",","exc_info","=","True",",",")","retrieved_metadata","=","retrieved_from_gateway","or","retrieved_from_ipfs_node","# Raise error if metadata is not retrieved.","# Ensure default values are not written into database.","if","not","retrieved_metadata",":","logger",".","error","(","f\"IPFSCLIENT | Retrieved metadata: {retrieved_metadata}. \"","f\"retrieved from gateway : {retrieved_from_gateway}, \"","f\"retrieved from local node : {retrieved_from_ipfs_node}\"",")","logger",".","error","(","api_metadata",")","logger",".","error","(","default_metadata_fields",")","raise","Exception","(","f\"IPFSCLIENT | Failed to retrieve metadata. Using default values for {multihash}\"",")","duration","=","time",".","time","(",")","-","start_time","logger",".","info","(","f\"IPFSCLIENT | get_metadata ${multihash} {duration} seconds \\\n | from ipfs:{retrieved_from_ipfs_node} |from gateway:{retrieved_from_gateway}\"",")","return","api_metadata"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/ipfs_lib.py#L73-L146"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/ipfs_lib.py","language":"python","identifier":"IPFSClient.get_metadata_from_gateway","parameters":"(\n self, multihash, default_metadata_fields, user_replica_set: str = None\n )","argument_list":"","return_statement":"return gateway_metadata_json","docstring":"Args:\n args.user_replica_set - comma-separated string of user's replica urls","docstring_summary":"Args:\n args.user_replica_set - comma-separated string of user's replica urls","docstring_tokens":["Args",":","args",".","user_replica_set","-","comma","-","separated","string","of","user","s","replica","urls"],"function":"def get_metadata_from_gateway(\n self, multihash, default_metadata_fields, user_replica_set: str = None\n ):\n \"\"\"Args:\n args.user_replica_set - comma-separated string of user's replica urls\n \"\"\"\n\n # Default return initial metadata format\n gateway_metadata_json = default_metadata_fields\n logger.warning(\n f\"IPFSCLIENT | get_metadata_from_gateway, {multihash} replica set: {user_replica_set}\"\n )\n gateway_endpoints = self._cnode_endpoints\n\n # first attempt to first fetch metadata from user replica set, if provided & non-empty\n if user_replica_set and isinstance(user_replica_set, str):\n user_replicas = user_replica_set.split(\",\")\n try:\n query_urls = [f\"{addr}\/ipfs\/{multihash}\" for addr in user_replicas]\n data = self.query_ipfs_metadata_json(\n query_urls, default_metadata_fields\n )\n if data is None:\n raise Exception()\n return data\n except Exception:\n logger.error(\n \"IPFSCLIENT | get_metadata_from_gateway \\\n \\nfailed to fetch metadata from user replica gateways\"\n )\n # Remove replica set from gateway endpoints before querying\n gateway_endpoints = list(\n filter(\n lambda endpoint: endpoint not in user_replicas,\n gateway_endpoints,\n )\n )\n\n logger.warning(\n f\"IPFSCLIENT | get_metadata_from_gateway, \\\n \\ncombined addresses: {gateway_endpoints}, \\\n \\ncnode_endpoints: {self._cnode_endpoints}\"\n )\n\n query_urls = [f\"{addr}\/ipfs\/{multihash}\" for addr in gateway_endpoints]\n data = self.query_ipfs_metadata_json(query_urls, default_metadata_fields)\n if data is None:\n raise Exception(\n f\"IPFSCLIENT | Failed to retrieve CID {multihash} from gateway\"\n )\n gateway_metadata_json = data\n return gateway_metadata_json","function_tokens":["def","get_metadata_from_gateway","(","self",",","multihash",",","default_metadata_fields",",","user_replica_set",":","str","=","None",")",":","# Default return initial metadata format","gateway_metadata_json","=","default_metadata_fields","logger",".","warning","(","f\"IPFSCLIENT | get_metadata_from_gateway, {multihash} replica set: {user_replica_set}\"",")","gateway_endpoints","=","self",".","_cnode_endpoints","# first attempt to first fetch metadata from user replica set, if provided & non-empty","if","user_replica_set","and","isinstance","(","user_replica_set",",","str",")",":","user_replicas","=","user_replica_set",".","split","(","\",\"",")","try",":","query_urls","=","[","f\"{addr}\/ipfs\/{multihash}\"","for","addr","in","user_replicas","]","data","=","self",".","query_ipfs_metadata_json","(","query_urls",",","default_metadata_fields",")","if","data","is","None",":","raise","Exception","(",")","return","data","except","Exception",":","logger",".","error","(","\"IPFSCLIENT | get_metadata_from_gateway \\\n \\nfailed to fetch metadata from user replica gateways\"",")","# Remove replica set from gateway endpoints before querying","gateway_endpoints","=","list","(","filter","(","lambda","endpoint",":","endpoint","not","in","user_replicas",",","gateway_endpoints",",",")",")","logger",".","warning","(","f\"IPFSCLIENT | get_metadata_from_gateway, \\\n \\ncombined addresses: {gateway_endpoints}, \\\n \\ncnode_endpoints: {self._cnode_endpoints}\"",")","query_urls","=","[","f\"{addr}\/ipfs\/{multihash}\"","for","addr","in","gateway_endpoints","]","data","=","self",".","query_ipfs_metadata_json","(","query_urls",",","default_metadata_fields",")","if","data","is","None",":","raise","Exception","(","f\"IPFSCLIENT | Failed to retrieve CID {multihash} from gateway\"",")","gateway_metadata_json","=","data","return","gateway_metadata_json"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/ipfs_lib.py#L188-L239"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/multihash.py","language":"python","identifier":"is_valid_code","parameters":"(code: int)","argument_list":"","return_statement":"return False","docstring":"Checks whether a multihash code is valid.\n :param code: Code to check.\n :return: Boolean indicating whether `code` is a valid multihash code.","docstring_summary":"Checks whether a multihash code is valid.\n :param code: Code to check.\n :return: Boolean indicating whether `code` is a valid multihash code.","docstring_tokens":["Checks","whether","a","multihash","code","is","valid",".",":","param","code",":","Code","to","check",".",":","return",":","Boolean","indicating","whether","code","is","a","valid","multihash","code","."],"function":"def is_valid_code(code: int) -> bool:\n \"\"\"\n Checks whether a multihash code is valid.\n :param code: Code to check.\n :return: Boolean indicating whether `code` is a valid multihash code.\n \"\"\"\n\n if is_app_code(code):\n return True\n\n if code in constants.codes:\n return True\n\n return False","function_tokens":["def","is_valid_code","(","code",":","int",")","->","bool",":","if","is_app_code","(","code",")",":","return","True","if","code","in","constants",".","codes",":","return","True","return","False"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/multihash.py#L13-L26"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/multihash.py","language":"python","identifier":"is_app_code","parameters":"(code: int)","argument_list":"","return_statement":"return 0 < code < 0x10","docstring":"Checks whether a code is part of the app range.\n :param code: Code to check.\n :return: Boolean indicating whether `code` is within the app range.","docstring_summary":"Checks whether a code is part of the app range.\n :param code: Code to check.\n :return: Boolean indicating whether `code` is within the app range.","docstring_tokens":["Checks","whether","a","code","is","part","of","the","app","range",".",":","param","code",":","Code","to","check",".",":","return",":","Boolean","indicating","whether","code","is","within","the","app","range","."],"function":"def is_app_code(code: int) -> bool:\n \"\"\"\n Checks whether a code is part of the app range.\n :param code: Code to check.\n :return: Boolean indicating whether `code` is within the app range.\n \"\"\"\n return 0 < code < 0x10","function_tokens":["def","is_app_code","(","code",":","int",")","->","bool",":","return","0","<","code","<","0x10"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/multihash.py#L29-L35"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/multihash.py","language":"python","identifier":"coerce_code","parameters":"(name: Union[str, int])","argument_list":"","return_statement":"return code","docstring":"Converts a hash function name into the matching code.\n If passed a number it will return the number if it's a valid code.\n :param name: The input hash function code.\n :return: Output hash function number\/code.","docstring_summary":"Converts a hash function name into the matching code.\n If passed a number it will return the number if it's a valid code.\n :param name: The input hash function code.\n :return: Output hash function number\/code.","docstring_tokens":["Converts","a","hash","function","name","into","the","matching","code",".","If","passed","a","number","it","will","return","the","number","if","it","s","a","valid","code",".",":","param","name",":","The","input","hash","function","code",".",":","return",":","Output","hash","function","number","\/","code","."],"function":"def coerce_code(name: Union[str, int]) -> int:\n \"\"\"\n Converts a hash function name into the matching code.\n If passed a number it will return the number if it's a valid code.\n :param name: The input hash function code.\n :return: Output hash function number\/code.\n \"\"\"\n code = name\n if isinstance(name, str):\n if name not in constants.names:\n raise ValueError(f\"Unrecognized hash function name: {name}\")\n code = constants.names[name]\n\n if not isinstance(code, int):\n raise TypeError(f\"Hash function code should be a number. Got: {code}\")\n\n if code not in constants.codes and not is_app_code(code):\n raise ValueError(f\"Unrecognized function code: {code}\")\n\n return code","function_tokens":["def","coerce_code","(","name",":","Union","[","str",",","int","]",")","->","int",":","code","=","name","if","isinstance","(","name",",","str",")",":","if","name","not","in","constants",".","names",":","raise","ValueError","(","f\"Unrecognized hash function name: {name}\"",")","code","=","constants",".","names","[","name","]","if","not","isinstance","(","code",",","int",")",":","raise","TypeError","(","f\"Hash function code should be a number. Got: {code}\"",")","if","code","not","in","constants",".","codes","and","not","is_app_code","(","code",")",":","raise","ValueError","(","f\"Unrecognized function code: {code}\"",")","return","code"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/multihash.py#L38-L57"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/multihash.py","language":"python","identifier":"encode","parameters":"(digest: bytes, code: Union[str, int], length: Optional[int] = None)","argument_list":"","return_statement":"return varint.to_varint(hash_function) + varint.to_varint(length) + digest","docstring":"Encode a hash digest along with the specified function code.\n :param digest: Input hash digest as an array of bytes.\n :param code: The hash function code as an int or string.\n :param length: The digest length. Defaults to None, in which case length is derived from the digest itself.\n :return: Output digest bytes array.","docstring_summary":"Encode a hash digest along with the specified function code.\n :param digest: Input hash digest as an array of bytes.\n :param code: The hash function code as an int or string.\n :param length: The digest length. Defaults to None, in which case length is derived from the digest itself.\n :return: Output digest bytes array.","docstring_tokens":["Encode","a","hash","digest","along","with","the","specified","function","code",".",":","param","digest",":","Input","hash","digest","as","an","array","of","bytes",".",":","param","code",":","The","hash","function","code","as","an","int","or","string",".",":","param","length",":","The","digest","length",".","Defaults","to","None","in","which","case","length","is","derived","from","the","digest","itself",".",":","return",":","Output","digest","bytes","array","."],"function":"def encode(digest: bytes, code: Union[str, int], length: Optional[int] = None) -> bytes:\n \"\"\"\n Encode a hash digest along with the specified function code.\n :param digest: Input hash digest as an array of bytes.\n :param code: The hash function code as an int or string.\n :param length: The digest length. Defaults to None, in which case length is derived from the digest itself.\n :return: Output digest bytes array.\n \"\"\"\n # Ensure it's a hash function code.\n hash_function = coerce_code(code)\n\n if not isinstance(digest, bytes):\n raise TypeError(\"digest should be `bytes` object\")\n\n length = len(digest) if length is None else length\n\n if length != len(digest):\n raise ValueError(\"digest length should be equal to specified length\")\n\n return varint.to_varint(hash_function) + varint.to_varint(length) + digest","function_tokens":["def","encode","(","digest",":","bytes",",","code",":","Union","[","str",",","int","]",",","length",":","Optional","[","int","]","=","None",")","->","bytes",":","# Ensure it's a hash function code.","hash_function","=","coerce_code","(","code",")","if","not","isinstance","(","digest",",","bytes",")",":","raise","TypeError","(","\"digest should be `bytes` object\"",")","length","=","len","(","digest",")","if","length","is","None","else","length","if","length","!=","len","(","digest",")",":","raise","ValueError","(","\"digest length should be equal to specified length\"",")","return","varint",".","to_varint","(","hash_function",")","+","varint",".","to_varint","(","length",")","+","digest"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/multihash.py#L60-L79"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/multihash.py","language":"python","identifier":"decode","parameters":"(buf: bytes)","argument_list":"","return_statement":"return dict(code=code, name=constants.codes[code], length=length, digest=buf)","docstring":"Decode a hash from the given multihash.\n :param buf: Input multihash as an array of bytes.\n :return: Dictionary of type {code: int, name: str, length: int, digest: bytes}","docstring_summary":"Decode a hash from the given multihash.\n :param buf: Input multihash as an array of bytes.\n :return: Dictionary of type {code: int, name: str, length: int, digest: bytes}","docstring_tokens":["Decode","a","hash","from","the","given","multihash",".",":","param","buf",":","Input","multihash","as","an","array","of","bytes",".",":","return",":","Dictionary","of","type","{","code",":","int","name",":","str","length",":","int","digest",":","bytes","}"],"function":"def decode(buf: bytes) -> Dict[str, Any]:\n \"\"\"\n Decode a hash from the given multihash.\n :param buf: Input multihash as an array of bytes.\n :return: Dictionary of type {code: int, name: str, length: int, digest: bytes}\n \"\"\"\n if not isinstance(buf, bytes):\n raise TypeError(\"multihash should be `bytes` object\")\n\n if len(buf) < 3:\n raise ValueError(\"multihash too short. must be > 3 bytes.\")\n\n code, n = varint.from_varint(buf)\n if not is_valid_code(code):\n raise ValueError(f\"multihash unknown function code: {code}\")\n\n length, n = varint.from_varint(buf, n)\n if length < 1:\n raise ValueError(f\"multihash invalid length: {length}\")\n\n buf = buf[n:]\n\n if len(buf) != length:\n raise ValueError(f\"multihash length inconsistent: {len(buf)} != {length}\")\n\n return dict(code=code, name=constants.codes[code], length=length, digest=buf)","function_tokens":["def","decode","(","buf",":","bytes",")","->","Dict","[","str",",","Any","]",":","if","not","isinstance","(","buf",",","bytes",")",":","raise","TypeError","(","\"multihash should be `bytes` object\"",")","if","len","(","buf",")","<","3",":","raise","ValueError","(","\"multihash too short. must be > 3 bytes.\"",")","code",",","n","=","varint",".","from_varint","(","buf",")","if","not","is_valid_code","(","code",")",":","raise","ValueError","(","f\"multihash unknown function code: {code}\"",")","length",",","n","=","varint",".","from_varint","(","buf",",","n",")","if","length","<","1",":","raise","ValueError","(","f\"multihash invalid length: {length}\"",")","buf","=","buf","[","n",":","]","if","len","(","buf",")","!=","length",":","raise","ValueError","(","f\"multihash length inconsistent: {len(buf)} != {length}\"",")","return","dict","(","code","=","code",",","name","=","constants",".","codes","[","code","]",",","length","=","length",",","digest","=","buf",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/multihash.py#L82-L107"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/multihash.py","language":"python","identifier":"to_b58_string","parameters":"(multihash: bytes)","argument_list":"","return_statement":"return base58.b58encode(multihash).decode()","docstring":"Convert the given multihash to a base58 encoded string.\n :param multihash: The input multihash as an array of bytes.\n :return: Output Base58 encoded string representation of input hash.","docstring_summary":"Convert the given multihash to a base58 encoded string.\n :param multihash: The input multihash as an array of bytes.\n :return: Output Base58 encoded string representation of input hash.","docstring_tokens":["Convert","the","given","multihash","to","a","base58","encoded","string",".",":","param","multihash",":","The","input","multihash","as","an","array","of","bytes",".",":","return",":","Output","Base58","encoded","string","representation","of","input","hash","."],"function":"def to_b58_string(multihash: bytes) -> str:\n \"\"\"\n Convert the given multihash to a base58 encoded string.\n :param multihash: The input multihash as an array of bytes.\n :return: Output Base58 encoded string representation of input hash.\n \"\"\"\n if not isinstance(multihash, bytes):\n raise TypeError(\"input must be bytes array\")\n return base58.b58encode(multihash).decode()","function_tokens":["def","to_b58_string","(","multihash",":","bytes",")","->","str",":","if","not","isinstance","(","multihash",",","bytes",")",":","raise","TypeError","(","\"input must be bytes array\"",")","return","base58",".","b58encode","(","multihash",")",".","decode","(",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/multihash.py#L110-L118"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/multihash.py","language":"python","identifier":"from_b58_string","parameters":"(multihash: str)","argument_list":"","return_statement":"return base58.b58decode(multihash)","docstring":"Convert the given base58 encoded string to a multi-hash.\n :param multihash: The input multihash as a base58 encoded string.\n :return: Output byte array representation of input hash.","docstring_summary":"Convert the given base58 encoded string to a multi-hash.\n :param multihash: The input multihash as a base58 encoded string.\n :return: Output byte array representation of input hash.","docstring_tokens":["Convert","the","given","base58","encoded","string","to","a","multi","-","hash",".",":","param","multihash",":","The","input","multihash","as","a","base58","encoded","string",".",":","return",":","Output","byte","array","representation","of","input","hash","."],"function":"def from_b58_string(multihash: str) -> bytes:\n \"\"\"\n Convert the given base58 encoded string to a multi-hash.\n :param multihash: The input multihash as a base58 encoded string.\n :return: Output byte array representation of input hash.\n \"\"\"\n if not isinstance(multihash, str):\n raise TypeError(\"input must be string\")\n return base58.b58decode(multihash)","function_tokens":["def","from_b58_string","(","multihash",":","str",")","->","bytes",":","if","not","isinstance","(","multihash",",","str",")",":","raise","TypeError","(","\"input must be string\"",")","return","base58",".","b58decode","(","multihash",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/multihash.py#L121-L129"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/get_all_other_nodes.py","language":"python","identifier":"get_node_endpoint","parameters":"()","argument_list":"","return_statement":"return endpoint","docstring":"Get endpoint for this discovery node\n At each node, get the service info which includes the endpoint\n return node endpoint of node with matching delegate_owner_wallet","docstring_summary":"Get endpoint for this discovery node\n At each node, get the service info which includes the endpoint\n return node endpoint of node with matching delegate_owner_wallet","docstring_tokens":["Get","endpoint","for","this","discovery","node","At","each","node","get","the","service","info","which","includes","the","endpoint","return","node","endpoint","of","node","with","matching","delegate_owner_wallet"],"function":"def get_node_endpoint() -> Optional[str]:\n \"\"\"\n Get endpoint for this discovery node\n At each node, get the service info which includes the endpoint\n return node endpoint of node with matching delegate_owner_wallet\n \"\"\"\n eth_web3 = web3_provider.get_eth_web3()\n\n eth_registry_address = eth_web3.toChecksumAddress(\n shared_config[\"eth_contracts\"][\"registry\"]\n )\n eth_registry_instance = eth_web3.eth.contract(\n address=eth_registry_address, abi=eth_abi_values[\"Registry\"][\"abi\"]\n )\n sp_factory_address = eth_registry_instance.functions.getContract(\n SP_FACTORY_REGISTRY_KEY\n ).call()\n sp_factory_inst = eth_web3.eth.contract(\n address=sp_factory_address, abi=eth_abi_values[\"ServiceProviderFactory\"][\"abi\"]\n )\n num_discovery_nodes = sp_factory_inst.functions.getTotalServiceTypeProviders(\n DISCOVERY_NODE_SERVICE_TYPE\n ).call()\n logger.info(f\"number of discovery nodes: {num_discovery_nodes}\")\n\n ids_list = list(range(1, num_discovery_nodes + 1))\n\n endpoint: Optional[str] = None\n\n # fetch all discovery nodes info in parallel\n with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:\n discovery_node_futures = {\n executor.submit(fetch_discovery_node_info, i, sp_factory_inst): i\n for i in ids_list\n }\n for future in concurrent.futures.as_completed(discovery_node_futures):\n node_op = discovery_node_futures[future]\n try:\n node_info = future.result()\n wallet = node_info[3]\n if wallet == shared_config[\"delegate\"][\"owner_wallet\"]:\n endpoint = node_info[1]\n break\n except Exception as e:\n logger.error(\n f\"get_all_other_nodes.py | ERROR in discovery_node_futures {node_op} generated {e}\"\n )\n\n logger.info(f\"this node's endpoint: {endpoint}\")\n\n return endpoint","function_tokens":["def","get_node_endpoint","(",")","->","Optional","[","str","]",":","eth_web3","=","web3_provider",".","get_eth_web3","(",")","eth_registry_address","=","eth_web3",".","toChecksumAddress","(","shared_config","[","\"eth_contracts\"","]","[","\"registry\"","]",")","eth_registry_instance","=","eth_web3",".","eth",".","contract","(","address","=","eth_registry_address",",","abi","=","eth_abi_values","[","\"Registry\"","]","[","\"abi\"","]",")","sp_factory_address","=","eth_registry_instance",".","functions",".","getContract","(","SP_FACTORY_REGISTRY_KEY",")",".","call","(",")","sp_factory_inst","=","eth_web3",".","eth",".","contract","(","address","=","sp_factory_address",",","abi","=","eth_abi_values","[","\"ServiceProviderFactory\"","]","[","\"abi\"","]",")","num_discovery_nodes","=","sp_factory_inst",".","functions",".","getTotalServiceTypeProviders","(","DISCOVERY_NODE_SERVICE_TYPE",")",".","call","(",")","logger",".","info","(","f\"number of discovery nodes: {num_discovery_nodes}\"",")","ids_list","=","list","(","range","(","1",",","num_discovery_nodes","+","1",")",")","endpoint",":","Optional","[","str","]","=","None","# fetch all discovery nodes info in parallel","with","concurrent",".","futures",".","ThreadPoolExecutor","(","max_workers","=","5",")","as","executor",":","discovery_node_futures","=","{","executor",".","submit","(","fetch_discovery_node_info",",","i",",","sp_factory_inst",")",":","i","for","i","in","ids_list","}","for","future","in","concurrent",".","futures",".","as_completed","(","discovery_node_futures",")",":","node_op","=","discovery_node_futures","[","future","]","try",":","node_info","=","future",".","result","(",")","wallet","=","node_info","[","3","]","if","wallet","==","shared_config","[","\"delegate\"","]","[","\"owner_wallet\"","]",":","endpoint","=","node_info","[","1","]","break","except","Exception","as","e",":","logger",".","error","(","f\"get_all_other_nodes.py | ERROR in discovery_node_futures {node_op} generated {e}\"",")","logger",".","info","(","f\"this node's endpoint: {endpoint}\"",")","return","endpoint"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/get_all_other_nodes.py#L24-L74"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/get_all_other_nodes.py","language":"python","identifier":"get_all_other_nodes","parameters":"()","argument_list":"","return_statement":"return all_other_nodes, all_other_wallets","docstring":"Get number of discovery nodes\n At each node, get the service info which includes the endpoint\n Return all a tuple of node endpoints except that of this node and all wallets\n (endpoints, wallets)","docstring_summary":"Get number of discovery nodes\n At each node, get the service info which includes the endpoint\n Return all a tuple of node endpoints except that of this node and all wallets\n (endpoints, wallets)","docstring_tokens":["Get","number","of","discovery","nodes","At","each","node","get","the","service","info","which","includes","the","endpoint","Return","all","a","tuple","of","node","endpoints","except","that","of","this","node","and","all","wallets","(","endpoints","wallets",")"],"function":"def get_all_other_nodes() -> Tuple[List[str], List[str]]:\n \"\"\"\n Get number of discovery nodes\n At each node, get the service info which includes the endpoint\n Return all a tuple of node endpoints except that of this node and all wallets\n (endpoints, wallets)\n \"\"\"\n eth_web3 = web3_provider.get_eth_web3()\n\n eth_registry_address = eth_web3.toChecksumAddress(\n shared_config[\"eth_contracts\"][\"registry\"]\n )\n eth_registry_instance = eth_web3.eth.contract(\n address=eth_registry_address, abi=eth_abi_values[\"Registry\"][\"abi\"]\n )\n sp_factory_address = eth_registry_instance.functions.getContract(\n SP_FACTORY_REGISTRY_KEY\n ).call()\n sp_factory_inst = eth_web3.eth.contract(\n address=sp_factory_address, abi=eth_abi_values[\"ServiceProviderFactory\"][\"abi\"]\n )\n num_discovery_nodes = sp_factory_inst.functions.getTotalServiceTypeProviders(\n DISCOVERY_NODE_SERVICE_TYPE\n ).call()\n logger.info(f\"number of discovery nodes: {num_discovery_nodes}\")\n\n ids_list = list(range(1, num_discovery_nodes + 1))\n all_other_nodes = []\n all_other_wallets = []\n\n # fetch all discovery nodes info in parallel\n with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:\n discovery_node_futures = {\n executor.submit(fetch_discovery_node_info, i, sp_factory_inst): i\n for i in ids_list\n }\n for future in concurrent.futures.as_completed(discovery_node_futures):\n node_op = discovery_node_futures[future]\n try:\n node_info = future.result()\n wallet = node_info[3]\n if wallet != shared_config[\"delegate\"][\"owner_wallet\"]:\n endpoint = node_info[1]\n all_other_wallets.append(wallet)\n if is_fqdn(endpoint):\n all_other_nodes.append(endpoint)\n except Exception as e:\n logger.error(\n f\"index_metrics.py | ERROR in discovery_node_futures {node_op} generated {e}\"\n )\n\n logger.info(\n f\"this node's delegate owner wallet: {shared_config['delegate']['owner_wallet']}\"\n )\n logger.info(f\"all the other nodes: {all_other_nodes}\")\n return all_other_nodes, all_other_wallets","function_tokens":["def","get_all_other_nodes","(",")","->","Tuple","[","List","[","str","]",",","List","[","str","]","]",":","eth_web3","=","web3_provider",".","get_eth_web3","(",")","eth_registry_address","=","eth_web3",".","toChecksumAddress","(","shared_config","[","\"eth_contracts\"","]","[","\"registry\"","]",")","eth_registry_instance","=","eth_web3",".","eth",".","contract","(","address","=","eth_registry_address",",","abi","=","eth_abi_values","[","\"Registry\"","]","[","\"abi\"","]",")","sp_factory_address","=","eth_registry_instance",".","functions",".","getContract","(","SP_FACTORY_REGISTRY_KEY",")",".","call","(",")","sp_factory_inst","=","eth_web3",".","eth",".","contract","(","address","=","sp_factory_address",",","abi","=","eth_abi_values","[","\"ServiceProviderFactory\"","]","[","\"abi\"","]",")","num_discovery_nodes","=","sp_factory_inst",".","functions",".","getTotalServiceTypeProviders","(","DISCOVERY_NODE_SERVICE_TYPE",")",".","call","(",")","logger",".","info","(","f\"number of discovery nodes: {num_discovery_nodes}\"",")","ids_list","=","list","(","range","(","1",",","num_discovery_nodes","+","1",")",")","all_other_nodes","=","[","]","all_other_wallets","=","[","]","# fetch all discovery nodes info in parallel","with","concurrent",".","futures",".","ThreadPoolExecutor","(","max_workers","=","5",")","as","executor",":","discovery_node_futures","=","{","executor",".","submit","(","fetch_discovery_node_info",",","i",",","sp_factory_inst",")",":","i","for","i","in","ids_list","}","for","future","in","concurrent",".","futures",".","as_completed","(","discovery_node_futures",")",":","node_op","=","discovery_node_futures","[","future","]","try",":","node_info","=","future",".","result","(",")","wallet","=","node_info","[","3","]","if","wallet","!=","shared_config","[","\"delegate\"","]","[","\"owner_wallet\"","]",":","endpoint","=","node_info","[","1","]","all_other_wallets",".","append","(","wallet",")","if","is_fqdn","(","endpoint",")",":","all_other_nodes",".","append","(","endpoint",")","except","Exception","as","e",":","logger",".","error","(","f\"index_metrics.py | ERROR in discovery_node_futures {node_op} generated {e}\"",")","logger",".","info","(","f\"this node's delegate owner wallet: {shared_config['delegate']['owner_wallet']}\"",")","logger",".","info","(","f\"all the other nodes: {all_other_nodes}\"",")","return","all_other_nodes",",","all_other_wallets"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/get_all_other_nodes.py#L77-L132"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/session_manager.py","language":"python","identifier":"SessionManager.comment_sql_calls","parameters":"(\n self, conn, cursor, statement, parameters, context, executemany\n )","argument_list":"","return_statement":"return statement, parameters","docstring":"Before the engine tries to execute a statement,\n try to comment the caller's function name.","docstring_summary":"Before the engine tries to execute a statement,\n try to comment the caller's function name.","docstring_tokens":["Before","the","engine","tries","to","execute","a","statement","try","to","comment","the","caller","s","function","name","."],"function":"def comment_sql_calls(\n self, conn, cursor, statement, parameters, context, executemany\n ):\n \"\"\"\n Before the engine tries to execute a statement,\n try to comment the caller's function name.\n \"\"\"\n if \"src\" in conn.info:\n statement = f\"-- {conn.info.pop('src')} \\n{statement}\"\n\n return statement, parameters","function_tokens":["def","comment_sql_calls","(","self",",","conn",",","cursor",",","statement",",","parameters",",","context",",","executemany",")",":","if","\"src\"","in","conn",".","info",":","statement","=","f\"-- {conn.info.pop('src')} \\n{statement}\"","return","statement",",","parameters"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/session_manager.py#L30-L40"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/session_manager.py","language":"python","identifier":"SessionManager.session_on_after_begin","parameters":"(self, session, transaction, connection)","argument_list":"","return_statement":"","docstring":"After a transaction has begun, try to add the caller's function\n name to the connection.\n\n This serves as a bridge between the ORM session object and the connection\n which will be used for statements.","docstring_summary":"After a transaction has begun, try to add the caller's function\n name to the connection.","docstring_tokens":["After","a","transaction","has","begun","try","to","add","the","caller","s","function","name","to","the","connection","."],"function":"def session_on_after_begin(self, session, transaction, connection):\n \"\"\"\n After a transaction has begun, try to add the caller's function\n name to the connection.\n\n This serves as a bridge between the ORM session object and the connection\n which will be used for statements.\n \"\"\"\n if \"src\" in session.info:\n connection.info[\"src\"] = session.info[\"src\"]","function_tokens":["def","session_on_after_begin","(","self",",","session",",","transaction",",","connection",")",":","if","\"src\"","in","session",".","info",":","connection",".","info","[","\"src\"","]","=","session",".","info","[","\"src\"","]"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/session_manager.py#L42-L51"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/session_manager.py","language":"python","identifier":"SessionManager.on_connect","parameters":"(self, dbapi_conn, connection_record)","argument_list":"","return_statement":"","docstring":"Callback invoked with a raw DBAPI connection every time the engine assigns a new\n connection to the session manager.\n\n Actions that should be fired on new connection should be performed here.\n For example, pg_trgm.similarity_threshold needs to be set once for each connection,\n but not if that connection is recycled and used in another session.","docstring_summary":"Callback invoked with a raw DBAPI connection every time the engine assigns a new\n connection to the session manager.","docstring_tokens":["Callback","invoked","with","a","raw","DBAPI","connection","every","time","the","engine","assigns","a","new","connection","to","the","session","manager","."],"function":"def on_connect(self, dbapi_conn, connection_record):\n \"\"\"\n Callback invoked with a raw DBAPI connection every time the engine assigns a new\n connection to the session manager.\n\n Actions that should be fired on new connection should be performed here.\n For example, pg_trgm.similarity_threshold needs to be set once for each connection,\n but not if that connection is recycled and used in another session.\n \"\"\"\n logger.debug(\"Using new DBAPI connection\")\n cursor = dbapi_conn.cursor()\n set_search_similarity(cursor)\n cursor.close()","function_tokens":["def","on_connect","(","self",",","dbapi_conn",",","connection_record",")",":","logger",".","debug","(","\"Using new DBAPI connection\"",")","cursor","=","dbapi_conn",".","cursor","(",")","set_search_similarity","(","cursor",")","cursor",".","close","(",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/session_manager.py#L53-L65"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/session_manager.py","language":"python","identifier":"SessionManager.session","parameters":"(self)","argument_list":"","return_statement":"return self._session_factory()","docstring":"Get a session for direct management\/use. Use not recommended unless absolutely\n necessary.","docstring_summary":"Get a session for direct management\/use. Use not recommended unless absolutely\n necessary.","docstring_tokens":["Get","a","session","for","direct","management","\/","use",".","Use","not","recommended","unless","absolutely","necessary","."],"function":"def session(self):\n \"\"\"\n Get a session for direct management\/use. Use not recommended unless absolutely\n necessary.\n \"\"\"\n return self._session_factory()","function_tokens":["def","session","(","self",")",":","return","self",".","_session_factory","(",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/session_manager.py#L67-L72"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/session_manager.py","language":"python","identifier":"SessionManager.scoped_session","parameters":"(self, expire_on_commit=True)","argument_list":"","return_statement":"","docstring":"Usage:\n with scoped_session() as session:\n use the session ...\n\n Session commits when leaving the block normally, or rolls back if an exception\n is thrown.\n\n Taken from: http:\/\/docs.sqlalchemy.org\/en\/latest\/orm\/session_basics.html","docstring_summary":"Usage:\n with scoped_session() as session:\n use the session ...","docstring_tokens":["Usage",":","with","scoped_session","()","as","session",":","use","the","session","..."],"function":"def scoped_session(self, expire_on_commit=True):\n \"\"\"\n Usage:\n with scoped_session() as session:\n use the session ...\n\n Session commits when leaving the block normally, or rolls back if an exception\n is thrown.\n\n Taken from: http:\/\/docs.sqlalchemy.org\/en\/latest\/orm\/session_basics.html\n \"\"\"\n session = self._session_factory()\n session.expire_on_commit = expire_on_commit\n\n try:\n session.info[\"src\"] = inspect.stack()[2][3] # get caller's function name\n except Exception:\n pass\n\n try:\n yield session\n session.commit()\n except:\n session.rollback()\n raise\n finally:\n session.close()","function_tokens":["def","scoped_session","(","self",",","expire_on_commit","=","True",")",":","session","=","self",".","_session_factory","(",")","session",".","expire_on_commit","=","expire_on_commit","try",":","session",".","info","[","\"src\"","]","=","inspect",".","stack","(",")","[","2","]","[","3","]","# get caller's function name","except","Exception",":","pass","try",":","yield","session","session",".","commit","(",")","except",":","session",".","rollback","(",")","raise","finally",":","session",".","close","(",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/session_manager.py#L75-L101"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/helpers.py","language":"python","identifier":"get_ip","parameters":"(request_obj)","argument_list":"","return_statement":"return ip.split(\",\")[0].strip()","docstring":"Gets the IP address from a request using the X-Forwarded-For header if present","docstring_summary":"Gets the IP address from a request using the X-Forwarded-For header if present","docstring_tokens":["Gets","the","IP","address","from","a","request","using","the","X","-","Forwarded","-","For","header","if","present"],"function":"def get_ip(request_obj):\n \"\"\"Gets the IP address from a request using the X-Forwarded-For header if present\"\"\"\n ip = request_obj.headers.get(\"X-Forwarded-For\", request_obj.remote_addr)\n if not ip:\n return \"\"\n return ip.split(\",\")[0].strip()","function_tokens":["def","get_ip","(","request_obj",")",":","ip","=","request_obj",".","headers",".","get","(","\"X-Forwarded-For\"",",","request_obj",".","remote_addr",")","if","not","ip",":","return","\"\"","return","ip",".","split","(","\",\"",")","[","0","]",".","strip","(",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/helpers.py#L23-L28"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/helpers.py","language":"python","identifier":"get_openresty_public_key","parameters":"()","argument_list":"","return_statement":"","docstring":"Get public key for openresty if it is running","docstring_summary":"Get public key for openresty if it is running","docstring_tokens":["Get","public","key","for","openresty","if","it","is","running"],"function":"def get_openresty_public_key():\n \"\"\"Get public key for openresty if it is running\"\"\"\n try:\n resp = requests.get(\"http:\/\/localhost:5000\/openresty_pubkey\", timeout=1)\n resp.raise_for_status()\n return resp.text\n except requests.exceptions.RequestException:\n return None","function_tokens":["def","get_openresty_public_key","(",")",":","try",":","resp","=","requests",".","get","(","\"http:\/\/localhost:5000\/openresty_pubkey\"",",","timeout","=","1",")","resp",".","raise_for_status","(",")","return","resp",".","text","except","requests",".","exceptions",".","RequestException",":","return","None"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/helpers.py#L31-L38"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/helpers.py","language":"python","identifier":"cd","parameters":"(path)","argument_list":"","return_statement":"","docstring":"Context manager that changes to directory `path` and return to CWD\n when exited.","docstring_summary":"Context manager that changes to directory `path` and return to CWD\n when exited.","docstring_tokens":["Context","manager","that","changes","to","directory","path","and","return","to","CWD","when","exited","."],"function":"def cd(path):\n \"\"\"Context manager that changes to directory `path` and return to CWD\n when exited.\n \"\"\"\n old_path = os.getcwd()\n os.chdir(path)\n try:\n yield\n finally:\n os.chdir(old_path)","function_tokens":["def","cd","(","path",")",":","old_path","=","os",".","getcwd","(",")","os",".","chdir","(","path",")","try",":","yield","finally",":","os",".","chdir","(","old_path",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/helpers.py#L108-L117"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/helpers.py","language":"python","identifier":"model_to_dictionary","parameters":"(model, exclude_keys=None)","argument_list":"","return_statement":"return model_dict","docstring":"Converts the given SQLAlchemy model into a dictionary, primarily used\n for serialization to JSON.\n\n - Includes columns, relationships, and properties decorated with\n `@property` (useful for calculated values from relationships).\n - Excludes the keys in `exclude_keys` and the keys in the given model's\n `exclude_keys` property or attribute.\n - Excludes any property or attribute with a leading underscore.","docstring_summary":"Converts the given SQLAlchemy model into a dictionary, primarily used\n for serialization to JSON.","docstring_tokens":["Converts","the","given","SQLAlchemy","model","into","a","dictionary","primarily","used","for","serialization","to","JSON","."],"function":"def model_to_dictionary(model, exclude_keys=None):\n \"\"\"Converts the given SQLAlchemy model into a dictionary, primarily used\n for serialization to JSON.\n\n - Includes columns, relationships, and properties decorated with\n `@property` (useful for calculated values from relationships).\n - Excludes the keys in `exclude_keys` and the keys in the given model's\n `exclude_keys` property or attribute.\n - Excludes any property or attribute with a leading underscore.\n \"\"\"\n model_dict = {}\n\n columns = model.__table__.columns.keys()\n relationships = model.__mapper__.relationships.keys()\n properties = []\n for key in list(set(dir(model)) - set(columns) - set(relationships)):\n if hasattr(type(model), key):\n attr = getattr(type(model), key)\n if not callable(attr) and isinstance(attr, property):\n properties.append(key)\n\n if exclude_keys is None:\n exclude_keys = []\n if hasattr(model, \"exclude_keys\"):\n exclude_keys.extend(model.exclude_keys)\n\n assert set(exclude_keys).issubset(set(properties).union(columns))\n\n for key in columns:\n if key not in exclude_keys and not key.startswith(\"_\"):\n model_dict[key] = getattr(model, key)\n\n for key in properties:\n if key not in exclude_keys and not key.startswith(\"_\"):\n model_dict[key] = getattr(model, key)\n\n for key in relationships:\n if key not in exclude_keys and not key.startswith(\"_\"):\n attr = getattr(model, key)\n if isinstance(attr, list):\n model_dict[key] = query_result_to_list(attr)\n else:\n model_dict[key] = model_to_dictionary(attr)\n\n return model_dict","function_tokens":["def","model_to_dictionary","(","model",",","exclude_keys","=","None",")",":","model_dict","=","{","}","columns","=","model",".","__table__",".","columns",".","keys","(",")","relationships","=","model",".","__mapper__",".","relationships",".","keys","(",")","properties","=","[","]","for","key","in","list","(","set","(","dir","(","model",")",")","-","set","(","columns",")","-","set","(","relationships",")",")",":","if","hasattr","(","type","(","model",")",",","key",")",":","attr","=","getattr","(","type","(","model",")",",","key",")","if","not","callable","(","attr",")","and","isinstance","(","attr",",","property",")",":","properties",".","append","(","key",")","if","exclude_keys","is","None",":","exclude_keys","=","[","]","if","hasattr","(","model",",","\"exclude_keys\"",")",":","exclude_keys",".","extend","(","model",".","exclude_keys",")","assert","set","(","exclude_keys",")",".","issubset","(","set","(","properties",")",".","union","(","columns",")",")","for","key","in","columns",":","if","key","not","in","exclude_keys","and","not","key",".","startswith","(","\"_\"",")",":","model_dict","[","key","]","=","getattr","(","model",",","key",")","for","key","in","properties",":","if","key","not","in","exclude_keys","and","not","key",".","startswith","(","\"_\"",")",":","model_dict","[","key","]","=","getattr","(","model",",","key",")","for","key","in","relationships",":","if","key","not","in","exclude_keys","and","not","key",".","startswith","(","\"_\"",")",":","attr","=","getattr","(","model",",","key",")","if","isinstance","(","attr",",","list",")",":","model_dict","[","key","]","=","query_result_to_list","(","attr",")","else",":","model_dict","[","key","]","=","model_to_dictionary","(","attr",")","return","model_dict"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/helpers.py#L147-L191"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/helpers.py","language":"python","identifier":"tuple_to_model_dictionary","parameters":"(t, model)","argument_list":"","return_statement":"return dict(zip(keys, t))","docstring":"Converts the given tuple into the proper SQLAlchemy model object in dictionary form.","docstring_summary":"Converts the given tuple into the proper SQLAlchemy model object in dictionary form.","docstring_tokens":["Converts","the","given","tuple","into","the","proper","SQLAlchemy","model","object","in","dictionary","form","."],"function":"def tuple_to_model_dictionary(t, model):\n \"\"\"Converts the given tuple into the proper SQLAlchemy model object in dictionary form.\"\"\"\n keys = model.__table__.columns.keys()\n assert len(t) == len(keys)\n\n return dict(zip(keys, t))","function_tokens":["def","tuple_to_model_dictionary","(","t",",","model",")",":","keys","=","model",".","__table__",".","columns",".","keys","(",")","assert","len","(","t",")","==","len","(","keys",")","return","dict","(","zip","(","keys",",","t",")",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/helpers.py#L200-L205"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/helpers.py","language":"python","identifier":"remove_test_file","parameters":"(filepath)","argument_list":"","return_statement":"","docstring":"Try and remove a file, no-op if not present","docstring_summary":"Try and remove a file, no-op if not present","docstring_tokens":["Try","and","remove","a","file","no","-","op","if","not","present"],"function":"def remove_test_file(filepath):\n \"\"\"Try and remove a file, no-op if not present\"\"\"\n try:\n os.remove(filepath)\n except OSError:\n pass","function_tokens":["def","remove_test_file","(","filepath",")",":","try",":","os",".","remove","(","filepath",")","except","OSError",":","pass"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/helpers.py#L316-L321"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/helpers.py","language":"python","identifier":"create_track_route_id","parameters":"(title, handle)","argument_list":"","return_statement":"return f\"{sanitized_handle}\/{sanitized_title}\"","docstring":"Constructs a track's route_id from an unsanitized title and handle.\n Resulting route_ids are of the shape `\/`.","docstring_summary":"Constructs a track's route_id from an unsanitized title and handle.\n Resulting route_ids are of the shape `\/`.","docstring_tokens":["Constructs","a","track","s","route_id","from","an","unsanitized","title","and","handle",".","Resulting","route_ids","are","of","the","shape","","\/","","."],"function":"def create_track_route_id(title, handle):\n \"\"\"\n Constructs a track's route_id from an unsanitized title and handle.\n Resulting route_ids are of the shape `\/`.\n \"\"\"\n sanitized_title = title.encode(\"utf-8\", \"ignore\").decode(\"utf-8\", \"ignore\")\n # Strip out invalid character\n sanitized_title = re.sub(\n r\"!|%|#|\\$|&|\\'|\\(|\\)|&|\\*|\\+|,|\\\/|:|;|=|\\?|@|\\[|\\]|\\x00\", \"\", sanitized_title\n )\n\n # Convert whitespaces to dashes\n sanitized_title = re.sub(r\"\\s+\", \"-\", sanitized_title)\n\n # Convert multiple dashes to single dashes\n sanitized_title = re.sub(r\"-+\", \"-\", sanitized_title)\n\n # Lowercase it\n sanitized_title = sanitized_title.lower()\n\n # Lowercase the handle\n sanitized_handle = handle.lower()\n\n return f\"{sanitized_handle}\/{sanitized_title}\"","function_tokens":["def","create_track_route_id","(","title",",","handle",")",":","sanitized_title","=","title",".","encode","(","\"utf-8\"",",","\"ignore\"",")",".","decode","(","\"utf-8\"",",","\"ignore\"",")","# Strip out invalid character","sanitized_title","=","re",".","sub","(","r\"!|%|#|\\$|&|\\'|\\(|\\)|&|\\*|\\+|,|\\\/|:|;|=|\\?|@|\\[|\\]|\\x00\"",",","\"\"",",","sanitized_title",")","# Convert whitespaces to dashes","sanitized_title","=","re",".","sub","(","r\"\\s+\"",",","\"-\"",",","sanitized_title",")","# Convert multiple dashes to single dashes","sanitized_title","=","re",".","sub","(","r\"-+\"",",","\"-\"",",","sanitized_title",")","# Lowercase it","sanitized_title","=","sanitized_title",".","lower","(",")","# Lowercase the handle","sanitized_handle","=","handle",".","lower","(",")","return","f\"{sanitized_handle}\/{sanitized_title}\""],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/helpers.py#L416-L439"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/helpers.py","language":"python","identifier":"create_track_slug","parameters":"(title, track_id, collision_id=0)","argument_list":"","return_statement":"return sanitized_title","docstring":"Converts the title of a track into a URL-friendly 'slug'\n\n Strips special characters, replaces spaces with dashes, converts to\n lowercase, and appends a collision_id if non-zero.\n\n If the sanitized title is entirely escaped (empty string), use the\n hashed track_id.\n\n Example:\n (Title=\"My Awesome Track!\", collision_id=2) => \"my-awesome-track-2\"","docstring_summary":"Converts the title of a track into a URL-friendly 'slug'","docstring_tokens":["Converts","the","title","of","a","track","into","a","URL","-","friendly","slug"],"function":"def create_track_slug(title, track_id, collision_id=0):\n \"\"\"Converts the title of a track into a URL-friendly 'slug'\n\n Strips special characters, replaces spaces with dashes, converts to\n lowercase, and appends a collision_id if non-zero.\n\n If the sanitized title is entirely escaped (empty string), use the\n hashed track_id.\n\n Example:\n (Title=\"My Awesome Track!\", collision_id=2) => \"my-awesome-track-2\"\n \"\"\"\n sanitized_title = title.encode(\"utf-8\", \"ignore\").decode(\"utf-8\", \"ignore\")\n # Strip out invalid character\n sanitized_title = re.sub(\n r\"!|%|#|\\$|&|\\'|\\(|\\)|&|\\*|\\+|,|\\\/|:|;|=|\\?|@|\\[|\\]|\\x00|\\^|\\.|\\{|\\}|\\\"\",\n \"\",\n sanitized_title,\n )\n\n # Convert whitespaces to dashes\n sanitized_title = re.sub(r\"\\s+\", \"-\", sanitized_title.strip())\n sanitized_title = re.sub(r\"-+\", \"-\", sanitized_title)\n\n sanitized_title = sanitized_title.lower()\n # This means that the entire title was sanitized away, use the id\n # for the slug.\n if not sanitized_title:\n sanitized_title = encode_int_id(track_id)\n\n if collision_id > 0:\n sanitized_title = f\"{sanitized_title}-{collision_id}\"\n\n return sanitized_title","function_tokens":["def","create_track_slug","(","title",",","track_id",",","collision_id","=","0",")",":","sanitized_title","=","title",".","encode","(","\"utf-8\"",",","\"ignore\"",")",".","decode","(","\"utf-8\"",",","\"ignore\"",")","# Strip out invalid character","sanitized_title","=","re",".","sub","(","r\"!|%|#|\\$|&|\\'|\\(|\\)|&|\\*|\\+|,|\\\/|:|;|=|\\?|@|\\[|\\]|\\x00|\\^|\\.|\\{|\\}|\\\"\"",",","\"\"",",","sanitized_title",",",")","# Convert whitespaces to dashes","sanitized_title","=","re",".","sub","(","r\"\\s+\"",",","\"-\"",",","sanitized_title",".","strip","(",")",")","sanitized_title","=","re",".","sub","(","r\"-+\"",",","\"-\"",",","sanitized_title",")","sanitized_title","=","sanitized_title",".","lower","(",")","# This means that the entire title was sanitized away, use the id","# for the slug.","if","not","sanitized_title",":","sanitized_title","=","encode_int_id","(","track_id",")","if","collision_id",">","0",":","sanitized_title","=","f\"{sanitized_title}-{collision_id}\"","return","sanitized_title"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/helpers.py#L442-L475"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/redis_metrics.py","language":"python","identifier":"parse_metrics_key","parameters":"(key)","argument_list":"","return_statement":"return source, ip, date_time","docstring":"Validates that a key is correctly formatted and returns\n the source: (routes|applications), ip address, and date of key","docstring_summary":"Validates that a key is correctly formatted and returns\n the source: (routes|applications), ip address, and date of key","docstring_tokens":["Validates","that","a","key","is","correctly","formatted","and","returns","the","source",":","(","routes|applications",")","ip","address","and","date","of","key"],"function":"def parse_metrics_key(key):\n \"\"\"\n Validates that a key is correctly formatted and returns\n the source: (routes|applications), ip address, and date of key\n \"\"\"\n if not key.startswith(metrics_prefix):\n logger.warning(f\"Bad redis key inserted w\/out metrics prefix {key}\")\n return None\n\n fragments = key.split(\":\")\n if len(fragments) != 5:\n logger.warning(f\"Bad redis key inserted: must have 5 parts {key}\")\n return None\n\n _, source, ip, date, time = fragments\n # Replace the ipv6 _ delimiter back to :\n ip = ip.replace(\"_\", \":\")\n if source not in (metrics_routes, metrics_applications):\n logger.warning(f\"Bad redis key inserted: must be routes or application {key}\")\n return None\n date_time = datetime.strptime(f\"{date}:{time}\", datetime_format)\n\n return source, ip, date_time","function_tokens":["def","parse_metrics_key","(","key",")",":","if","not","key",".","startswith","(","metrics_prefix",")",":","logger",".","warning","(","f\"Bad redis key inserted w\/out metrics prefix {key}\"",")","return","None","fragments","=","key",".","split","(","\":\"",")","if","len","(","fragments",")","!=","5",":","logger",".","warning","(","f\"Bad redis key inserted: must have 5 parts {key}\"",")","return","None","_",",","source",",","ip",",","date",",","time","=","fragments","# Replace the ipv6 _ delimiter back to :","ip","=","ip",".","replace","(","\"_\"",",","\":\"",")","if","source","not","in","(","metrics_routes",",","metrics_applications",")",":","logger",".","warning","(","f\"Bad redis key inserted: must be routes or application {key}\"",")","return","None","date_time","=","datetime",".","strptime","(","f\"{date}:{time}\"",",","datetime_format",")","return","source",",","ip",",","date_time"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/redis_metrics.py#L73-L95"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/redis_metrics.py","language":"python","identifier":"merge_metrics","parameters":"(metrics, end_time, metric_type, db)","argument_list":"","return_statement":"","docstring":"Merge this node's metrics to those received from other discovery nodes:\n Update unique and total, daily and monthly metrics for routes and apps\n\n Dump the cached metrics so that if this node temporarily goes down,\n we can recover the IPs and app names to perform the calculation and deduplication\n when the node comes back up\n\n Clean up old metrics from cache\n\n Persist metrics in the database","docstring_summary":"Merge this node's metrics to those received from other discovery nodes:\n Update unique and total, daily and monthly metrics for routes and apps","docstring_tokens":["Merge","this","node","s","metrics","to","those","received","from","other","discovery","nodes",":","Update","unique","and","total","daily","and","monthly","metrics","for","routes","and","apps"],"function":"def merge_metrics(metrics, end_time, metric_type, db):\n \"\"\"\n Merge this node's metrics to those received from other discovery nodes:\n Update unique and total, daily and monthly metrics for routes and apps\n\n Dump the cached metrics so that if this node temporarily goes down,\n we can recover the IPs and app names to perform the calculation and deduplication\n when the node comes back up\n\n Clean up old metrics from cache\n\n Persist metrics in the database\n \"\"\"\n logger.info(f\"about to merge {metric_type} metrics: {len(metrics)} new entries\")\n day = end_time.split(\":\")[0]\n month = f\"{day[:7]}\/01\"\n\n daily_key = daily_route_metrics if metric_type == \"route\" else daily_app_metrics\n daily_metrics_str = redis_get_or_restore(REDIS, daily_key)\n daily_metrics = json.loads(daily_metrics_str) if daily_metrics_str else {}\n\n monthly_key = (\n monthly_route_metrics if metric_type == \"route\" else monthly_app_metrics\n )\n monthly_metrics_str = redis_get_or_restore(REDIS, monthly_key)\n monthly_metrics = json.loads(monthly_metrics_str) if monthly_metrics_str else {}\n\n if day not in daily_metrics:\n daily_metrics[day] = {}\n if month not in monthly_metrics:\n monthly_metrics[month] = {}\n\n # only relevant for unique users metrics\n unique_daily_count = 0\n unique_monthly_count = 0\n\n # only relevant for app metrics\n app_count = {}\n\n # update daily and monthly metrics, which could be route metrics or app metrics\n # if route metrics, new_value and new_count would be an IP and the number of requests from it\n # otherwise, new_value and new_count would be an app and the number of requests from it\n for new_value, new_count in metrics.items():\n if metric_type == \"route\" and new_value not in daily_metrics[day]:\n unique_daily_count += 1\n if metric_type == \"route\" and new_value not in monthly_metrics[month]:\n unique_monthly_count += 1\n if metric_type == \"app\":\n app_count[new_value] = new_count\n daily_metrics[day][new_value] = (\n daily_metrics[day][new_value] + new_count\n if new_value in daily_metrics[day]\n else new_count\n )\n monthly_metrics[month][new_value] = (\n monthly_metrics[month][new_value] + new_count\n if new_value in monthly_metrics[month]\n else new_count\n )\n\n # clean up metrics METRICS_INTERVAL after the end of the day from daily_metrics\n yesterday_str = (datetime.utcnow() - timedelta(days=1)).strftime(\n datetime_format_secondary\n )\n daily_metrics = {\n timestamp: metrics\n for timestamp, metrics in daily_metrics.items()\n if timestamp > yesterday_str\n }\n if daily_metrics:\n redis_set_and_dump(REDIS, daily_key, json.dumps(daily_metrics))\n logger.info(f\"updated cached daily {metric_type} metrics\")\n\n # clean up metrics METRICS_INTERVAL after the end of the month from monthly_metrics\n thirty_one_days_ago = (datetime.utcnow() - timedelta(days=31)).strftime(\n datetime_format_secondary\n )\n monthly_metrics = {\n timestamp: metrics\n for timestamp, metrics in monthly_metrics.items()\n if timestamp > thirty_one_days_ago\n }\n if monthly_metrics:\n redis_set_and_dump(REDIS, monthly_key, json.dumps(monthly_metrics))\n logger.info(f\"updated cached monthly {metric_type} metrics\")\n\n # persist aggregated metrics from other nodes\n day_obj = datetime.strptime(day, day_format).date()\n month_obj = datetime.strptime(month, day_format).date()\n if metric_type == \"route\":\n persist_route_metrics(\n db,\n day_obj,\n month_obj,\n sum(metrics.values()),\n unique_daily_count,\n unique_monthly_count,\n )\n else:\n persist_app_metrics(db, day_obj, month_obj, app_count)","function_tokens":["def","merge_metrics","(","metrics",",","end_time",",","metric_type",",","db",")",":","logger",".","info","(","f\"about to merge {metric_type} metrics: {len(metrics)} new entries\"",")","day","=","end_time",".","split","(","\":\"",")","[","0","]","month","=","f\"{day[:7]}\/01\"","daily_key","=","daily_route_metrics","if","metric_type","==","\"route\"","else","daily_app_metrics","daily_metrics_str","=","redis_get_or_restore","(","REDIS",",","daily_key",")","daily_metrics","=","json",".","loads","(","daily_metrics_str",")","if","daily_metrics_str","else","{","}","monthly_key","=","(","monthly_route_metrics","if","metric_type","==","\"route\"","else","monthly_app_metrics",")","monthly_metrics_str","=","redis_get_or_restore","(","REDIS",",","monthly_key",")","monthly_metrics","=","json",".","loads","(","monthly_metrics_str",")","if","monthly_metrics_str","else","{","}","if","day","not","in","daily_metrics",":","daily_metrics","[","day","]","=","{","}","if","month","not","in","monthly_metrics",":","monthly_metrics","[","month","]","=","{","}","# only relevant for unique users metrics","unique_daily_count","=","0","unique_monthly_count","=","0","# only relevant for app metrics","app_count","=","{","}","# update daily and monthly metrics, which could be route metrics or app metrics","# if route metrics, new_value and new_count would be an IP and the number of requests from it","# otherwise, new_value and new_count would be an app and the number of requests from it","for","new_value",",","new_count","in","metrics",".","items","(",")",":","if","metric_type","==","\"route\"","and","new_value","not","in","daily_metrics","[","day","]",":","unique_daily_count","+=","1","if","metric_type","==","\"route\"","and","new_value","not","in","monthly_metrics","[","month","]",":","unique_monthly_count","+=","1","if","metric_type","==","\"app\"",":","app_count","[","new_value","]","=","new_count","daily_metrics","[","day","]","[","new_value","]","=","(","daily_metrics","[","day","]","[","new_value","]","+","new_count","if","new_value","in","daily_metrics","[","day","]","else","new_count",")","monthly_metrics","[","month","]","[","new_value","]","=","(","monthly_metrics","[","month","]","[","new_value","]","+","new_count","if","new_value","in","monthly_metrics","[","month","]","else","new_count",")","# clean up metrics METRICS_INTERVAL after the end of the day from daily_metrics","yesterday_str","=","(","datetime",".","utcnow","(",")","-","timedelta","(","days","=","1",")",")",".","strftime","(","datetime_format_secondary",")","daily_metrics","=","{","timestamp",":","metrics","for","timestamp",",","metrics","in","daily_metrics",".","items","(",")","if","timestamp",">","yesterday_str","}","if","daily_metrics",":","redis_set_and_dump","(","REDIS",",","daily_key",",","json",".","dumps","(","daily_metrics",")",")","logger",".","info","(","f\"updated cached daily {metric_type} metrics\"",")","# clean up metrics METRICS_INTERVAL after the end of the month from monthly_metrics","thirty_one_days_ago","=","(","datetime",".","utcnow","(",")","-","timedelta","(","days","=","31",")",")",".","strftime","(","datetime_format_secondary",")","monthly_metrics","=","{","timestamp",":","metrics","for","timestamp",",","metrics","in","monthly_metrics",".","items","(",")","if","timestamp",">","thirty_one_days_ago","}","if","monthly_metrics",":","redis_set_and_dump","(","REDIS",",","monthly_key",",","json",".","dumps","(","monthly_metrics",")",")","logger",".","info","(","f\"updated cached monthly {metric_type} metrics\"",")","# persist aggregated metrics from other nodes","day_obj","=","datetime",".","strptime","(","day",",","day_format",")",".","date","(",")","month_obj","=","datetime",".","strptime","(","month",",","day_format",")",".","date","(",")","if","metric_type","==","\"route\"",":","persist_route_metrics","(","db",",","day_obj",",","month_obj",",","sum","(","metrics",".","values","(",")",")",",","unique_daily_count",",","unique_monthly_count",",",")","else",":","persist_app_metrics","(","db",",","day_obj",",","month_obj",",","app_count",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/redis_metrics.py#L305-L404"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/redis_metrics.py","language":"python","identifier":"extract_app_name_key","parameters":"()","argument_list":"","return_statement":"return (application_key, application_name)","docstring":"Extracts the application name redis key and hash from the request\n The key should be of format:\n :::\n ie: \"API_METRICS:applications:192.168.0.1:2020\/08\/04:14\"\n The hash should be of format:\n \n ie: \"audius_dapp\"","docstring_summary":"Extracts the application name redis key and hash from the request\n The key should be of format:\n :::\n ie: \"API_METRICS:applications:192.168.0.1:2020\/08\/04:14\"\n The hash should be of format:\n \n ie: \"audius_dapp\"","docstring_tokens":["Extracts","the","application","name","redis","key","and","hash","from","the","request","The","key","should","be","of","format",":","",":","",":","",":","","ie",":","API_METRICS",":","applications",":","192",".","168",".","0",".","1",":","2020","\/","08","\/","04",":","14","The","hash","should","be","of","format",":","","ie",":","audius_dapp"],"function":"def extract_app_name_key():\n \"\"\"\n Extracts the application name redis key and hash from the request\n The key should be of format:\n :::\n ie: \"API_METRICS:applications:192.168.0.1:2020\/08\/04:14\"\n The hash should be of format:\n \n ie: \"audius_dapp\"\n \"\"\"\n application_name = request.args.get(app_name_param, type=str, default=None)\n ip = get_request_ip(request)\n date_time = get_rounded_date_time().strftime(datetime_format)\n\n application_key = f\"{metrics_prefix}:{metrics_applications}:{ip}:{date_time}\"\n return (application_key, application_name)","function_tokens":["def","extract_app_name_key","(",")",":","application_name","=","request",".","args",".","get","(","app_name_param",",","type","=","str",",","default","=","None",")","ip","=","get_request_ip","(","request",")","date_time","=","get_rounded_date_time","(",")",".","strftime","(","datetime_format",")","application_key","=","f\"{metrics_prefix}:{metrics_applications}:{ip}:{date_time}\"","return","(","application_key",",","application_name",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/redis_metrics.py#L481-L496"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/redis_metrics.py","language":"python","identifier":"extract_route_key","parameters":"()","argument_list":"","return_statement":"return (route_key, route)","docstring":"Extracts the route redis key and hash from the request\n The key should be of format:\n :::\n ie: \"API_METRICS:routes:192.168.0.1:2020\/08\/04:14\"\n The hash should be of format:\n \n ie: \"\/v1\/tracks\/search?genre=rap&query=best\"","docstring_summary":"Extracts the route redis key and hash from the request\n The key should be of format:\n :::\n ie: \"API_METRICS:routes:192.168.0.1:2020\/08\/04:14\"\n The hash should be of format:\n \n ie: \"\/v1\/tracks\/search?genre=rap&query=best\"","docstring_tokens":["Extracts","the","route","redis","key","and","hash","from","the","request","The","key","should","be","of","format",":","",":","",":","",":","","ie",":","API_METRICS",":","routes",":","192",".","168",".","0",".","1",":","2020","\/","08","\/","04",":","14","The","hash","should","be","of","format",":","","","ie",":","\/","v1","\/","tracks","\/","search?genre","=","rap&query","=","best"],"function":"def extract_route_key():\n \"\"\"\n Extracts the route redis key and hash from the request\n The key should be of format:\n :::\n ie: \"API_METRICS:routes:192.168.0.1:2020\/08\/04:14\"\n The hash should be of format:\n \n ie: \"\/v1\/tracks\/search?genre=rap&query=best\"\n \"\"\"\n path = request.path\n req_args = request.args.items()\n req_args = stringify_query_params(req_args)\n route = f\"{path}?{req_args}\" if req_args else path\n ip = get_request_ip(request)\n date_time = get_rounded_date_time().strftime(datetime_format)\n\n route_key = f\"{metrics_prefix}:{metrics_routes}:{ip}:{date_time}\"\n return (route_key, route)","function_tokens":["def","extract_route_key","(",")",":","path","=","request",".","path","req_args","=","request",".","args",".","items","(",")","req_args","=","stringify_query_params","(","req_args",")","route","=","f\"{path}?{req_args}\"","if","req_args","else","path","ip","=","get_request_ip","(","request",")","date_time","=","get_rounded_date_time","(",")",".","strftime","(","datetime_format",")","route_key","=","f\"{metrics_prefix}:{metrics_routes}:{ip}:{date_time}\"","return","(","route_key",",","route",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/redis_metrics.py#L499-L517"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/redis_metrics.py","language":"python","identifier":"record_metrics","parameters":"(func)","argument_list":"","return_statement":"return wrap","docstring":"The metrics decorator records each time a route is hit in redis\n The number of times a route is hit and an app_name query param are used are recorded.\n A redis a redis hash map is used to store each of these values.\n\n NOTE: This must be placed before the cache decorator in order for the redis incr to occur","docstring_summary":"The metrics decorator records each time a route is hit in redis\n The number of times a route is hit and an app_name query param are used are recorded.\n A redis a redis hash map is used to store each of these values.","docstring_tokens":["The","metrics","decorator","records","each","time","a","route","is","hit","in","redis","The","number","of","times","a","route","is","hit","and","an","app_name","query","param","are","used","are","recorded",".","A","redis","a","redis","hash","map","is","used","to","store","each","of","these","values","."],"function":"def record_metrics(func):\n \"\"\"\n The metrics decorator records each time a route is hit in redis\n The number of times a route is hit and an app_name query param are used are recorded.\n A redis a redis hash map is used to store each of these values.\n\n NOTE: This must be placed before the cache decorator in order for the redis incr to occur\n \"\"\"\n\n @functools.wraps(func)\n def wrap(*args, **kwargs):\n try:\n application_key, application_name = extract_app_name_key()\n route_key, route = extract_route_key()\n REDIS.hincrby(route_key, route, 1)\n if application_name:\n REDIS.hincrby(application_key, application_name, 1)\n\n record_aggregate_metrics()\n except Exception as e:\n logger.error(\"Error while recording metrics: %s\", e.message)\n\n return func(*args, **kwargs)\n\n return wrap","function_tokens":["def","record_metrics","(","func",")",":","@","functools",".","wraps","(","func",")","def","wrap","(","*","args",",","*","*","kwargs",")",":","try",":","application_key",",","application_name","=","extract_app_name_key","(",")","route_key",",","route","=","extract_route_key","(",")","REDIS",".","hincrby","(","route_key",",","route",",","1",")","if","application_name",":","REDIS",".","hincrby","(","application_key",",","application_name",",","1",")","record_aggregate_metrics","(",")","except","Exception","as","e",":","logger",".","error","(","\"Error while recording metrics: %s\"",",","e",".","message",")","return","func","(","*","args",",","*","*","kwargs",")","return","wrap"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/redis_metrics.py#L620-L644"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/varint.py","language":"python","identifier":"to_varint","parameters":"(number: int)","argument_list":"","return_statement":"return buf","docstring":"Pack `number` into varint bytes.\n\n :param number: Integer value to pack into varint bytes.\n :return: Varint bytes array.","docstring_summary":"Pack `number` into varint bytes.","docstring_tokens":["Pack","number","into","varint","bytes","."],"function":"def to_varint(number: int) -> bytes:\n \"\"\"\n Pack `number` into varint bytes.\n\n :param number: Integer value to pack into varint bytes.\n :return: Varint bytes array.\n \"\"\"\n buf = b\"\"\n while True:\n towrite = number & 0x7F\n number >>= 7\n if number:\n buf += _byte(towrite | 0x80)\n else:\n buf += _byte(towrite)\n break\n return buf","function_tokens":["def","to_varint","(","number",":","int",")","->","bytes",":","buf","=","b\"\"","while","True",":","towrite","=","number","&","0x7F","number",">>=","7","if","number",":","buf","+=","_byte","(","towrite","|","0x80",")","else",":","buf","+=","_byte","(","towrite",")","break","return","buf"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/varint.py#L14-L30"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/varint.py","language":"python","identifier":"from_stream","parameters":"(stream: BytesIO, offset: int = 0)","argument_list":"","return_statement":"","docstring":"Read a varint from `stream`.\n\n :param stream: Input bytes stream.\n :param offset: Offset in bytes from which to begin decoding.\n :return: Tuple containing decoded integer and offset of the first byte after encoded integer in source stream.","docstring_summary":"Read a varint from `stream`.","docstring_tokens":["Read","a","varint","from","stream","."],"function":"def from_stream(stream: BytesIO, offset: int = 0) -> Tuple[int, int]:\n \"\"\"\n Read a varint from `stream`.\n\n :param stream: Input bytes stream.\n :param offset: Offset in bytes from which to begin decoding.\n :return: Tuple containing decoded integer and offset of the first byte after encoded integer in source stream.\n \"\"\"\n shift = 0\n result = 0\n stream.seek(offset)\n while True:\n i = _read_one(stream)\n result |= (i & 0x7F) << shift\n shift += 7\n if not i & 0x80:\n return result, offset + shift \/\/ 7","function_tokens":["def","from_stream","(","stream",":","BytesIO",",","offset",":","int","=","0",")","->","Tuple","[","int",",","int","]",":","shift","=","0","result","=","0","stream",".","seek","(","offset",")","while","True",":","i","=","_read_one","(","stream",")","result","|=","(","i","&","0x7F",")","<<","shift","shift","+=","7","if","not","i","&","0x80",":","return","result",",","offset","+","shift","\/\/","7"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/varint.py#L33-L49"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/varint.py","language":"python","identifier":"from_varint","parameters":"(data: bytes, offset: int = 0)","argument_list":"","return_statement":"return from_stream(BytesIO(data), offset)","docstring":"Return an integer value obtained by decoding varint data.\n\n :param data: Input varint bytes array.\n :param offset: Offset in bytes from which to begin decoding.\n :return: Tuple containing decoded integer and offset of the first byte after encoded integer in source bytes array.","docstring_summary":"Return an integer value obtained by decoding varint data.","docstring_tokens":["Return","an","integer","value","obtained","by","decoding","varint","data","."],"function":"def from_varint(data: bytes, offset: int = 0) -> Tuple[int, int]:\n \"\"\"\n Return an integer value obtained by decoding varint data.\n\n :param data: Input varint bytes array.\n :param offset: Offset in bytes from which to begin decoding.\n :return: Tuple containing decoded integer and offset of the first byte after encoded integer in source bytes array.\n \"\"\"\n return from_stream(BytesIO(data), offset)","function_tokens":["def","from_varint","(","data",":","bytes",",","offset",":","int","=","0",")","->","Tuple","[","int",",","int","]",":","return","from_stream","(","BytesIO","(","data",")",",","offset",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/varint.py#L52-L60"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/varint.py","language":"python","identifier":"_read_one","parameters":"(stream: BytesIO)","argument_list":"","return_statement":"return ord(c)","docstring":"Read a byte from a file-like object (as an integer)\n\n :param stream: Input bytes stream.\n :return: Decoded integer value\n :raises: EOFError if the stream ends while reading bytes.","docstring_summary":"Read a byte from a file-like object (as an integer)","docstring_tokens":["Read","a","byte","from","a","file","-","like","object","(","as","an","integer",")"],"function":"def _read_one(stream: BytesIO) -> int:\n \"\"\"\n Read a byte from a file-like object (as an integer)\n\n :param stream: Input bytes stream.\n :return: Decoded integer value\n :raises: EOFError if the stream ends while reading bytes.\n \"\"\"\n c = stream.read(1)\n if c == b\"\":\n raise EOFError(\"Unexpected EOF while reading bytes\")\n return ord(c)","function_tokens":["def","_read_one","(","stream",":","BytesIO",")","->","int",":","c","=","stream",".","read","(","1",")","if","c","==","b\"\"",":","raise","EOFError","(","\"Unexpected EOF while reading bytes\"",")","return","ord","(","c",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/varint.py#L68-L79"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/auth_middleware.py","language":"python","identifier":"auth_middleware","parameters":"(**kwargs)","argument_list":"","return_statement":"return outer_wrap","docstring":"Auth middleware decorator.\n\n Should decorate a route and be used to supply an authed user to\n the query behind a route.\n\n Example:\n\n @auth_middleware\n def get(self):\n args = track_slug_parser.parse_args()\n slug, handle = (args.get(\"slug\"), args.get(\"handle\"))\n routes = args.get(\"route\")\n\n @functools.wraps simply ensures that if Python introspects `inner_wrap`, it refers to\n `func` rather than `inner_wrap`.","docstring_summary":"Auth middleware decorator.","docstring_tokens":["Auth","middleware","decorator","."],"function":"def auth_middleware(**kwargs):\n \"\"\"\n Auth middleware decorator.\n\n Should decorate a route and be used to supply an authed user to\n the query behind a route.\n\n Example:\n\n @auth_middleware\n def get(self):\n args = track_slug_parser.parse_args()\n slug, handle = (args.get(\"slug\"), args.get(\"handle\"))\n routes = args.get(\"route\")\n\n @functools.wraps simply ensures that if Python introspects `inner_wrap`, it refers to\n `func` rather than `inner_wrap`.\n \"\"\"\n\n def outer_wrap(func):\n @functools.wraps(func)\n def inner_wrap(*args, **kwargs):\n message = request.headers.get(MESSAGE_HEADER)\n signature = request.headers.get(SIGNATURE_HEADER)\n\n authed_user_id = None\n if message and signature:\n web3 = web3_provider.get_web3()\n encoded_to_recover = encode_defunct(text=message)\n wallet = web3.eth.account.recover_message(\n encoded_to_recover, signature=signature\n )\n db = db_session.get_db_read_replica()\n with db.scoped_session() as session:\n user = (\n session.query(User.user_id)\n .filter(\n # Convert checksum wallet to lowercase\n User.wallet == wallet.lower(),\n User.is_current == True,\n )\n .first()\n )\n if user:\n authed_user_id = user.user_id\n return func(*args, **kwargs, authed_user_id=authed_user_id)\n\n return inner_wrap\n\n return outer_wrap","function_tokens":["def","auth_middleware","(","*","*","kwargs",")",":","def","outer_wrap","(","func",")",":","@","functools",".","wraps","(","func",")","def","inner_wrap","(","*","args",",","*","*","kwargs",")",":","message","=","request",".","headers",".","get","(","MESSAGE_HEADER",")","signature","=","request",".","headers",".","get","(","SIGNATURE_HEADER",")","authed_user_id","=","None","if","message","and","signature",":","web3","=","web3_provider",".","get_web3","(",")","encoded_to_recover","=","encode_defunct","(","text","=","message",")","wallet","=","web3",".","eth",".","account",".","recover_message","(","encoded_to_recover",",","signature","=","signature",")","db","=","db_session",".","get_db_read_replica","(",")","with","db",".","scoped_session","(",")","as","session",":","user","=","(","session",".","query","(","User",".","user_id",")",".","filter","(","# Convert checksum wallet to lowercase","User",".","wallet","==","wallet",".","lower","(",")",",","User",".","is_current","==","True",",",")",".","first","(",")",")","if","user",":","authed_user_id","=","user",".","user_id","return","func","(","*","args",",","*","*","kwargs",",","authed_user_id","=","authed_user_id",")","return","inner_wrap","return","outer_wrap"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/auth_middleware.py#L12-L61"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/db_session.py","language":"python","identifier":"get_db","parameters":"()","argument_list":"","return_statement":"return current_app.db_session_manager","docstring":"Connect to the configured database. The connection\n is unique for each request and will be reused if this is called\n again.","docstring_summary":"Connect to the configured database. The connection\n is unique for each request and will be reused if this is called\n again.","docstring_tokens":["Connect","to","the","configured","database",".","The","connection","is","unique","for","each","request","and","will","be","reused","if","this","is","called","again","."],"function":"def get_db():\n \"\"\"Connect to the configured database. The connection\n is unique for each request and will be reused if this is called\n again.\n \"\"\"\n return current_app.db_session_manager","function_tokens":["def","get_db","(",")",":","return","current_app",".","db_session_manager"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/db_session.py#L4-L9"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/db_session.py","language":"python","identifier":"get_db_read_replica","parameters":"()","argument_list":"","return_statement":"return current_app.db_read_replica_session_manager","docstring":"Connect to the configured database. The connection\n is unique for each request and will be reused if this is called\n again.","docstring_summary":"Connect to the configured database. The connection\n is unique for each request and will be reused if this is called\n again.","docstring_tokens":["Connect","to","the","configured","database",".","The","connection","is","unique","for","each","request","and","will","be","reused","if","this","is","called","again","."],"function":"def get_db_read_replica():\n \"\"\"Connect to the configured database. The connection\n is unique for each request and will be reused if this is called\n again.\n \"\"\"\n return current_app.db_read_replica_session_manager","function_tokens":["def","get_db_read_replica","(",")",":","return","current_app",".","db_read_replica_session_manager"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/db_session.py#L12-L17"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/redis_cache.py","language":"python","identifier":"use_redis_cache","parameters":"(key, ttl_sec, work_func)","argument_list":"","return_statement":"return to_cache","docstring":"Attempts to return value by key, otherwise caches and returns `work_func`","docstring_summary":"Attempts to return value by key, otherwise caches and returns `work_func`","docstring_tokens":["Attempts","to","return","value","by","key","otherwise","caches","and","returns","work_func"],"function":"def use_redis_cache(key, ttl_sec, work_func):\n \"\"\"Attempts to return value by key, otherwise caches and returns `work_func`\"\"\"\n redis = redis_connection.get_redis()\n cached_value = get_pickled_key(redis, key)\n if cached_value:\n return cached_value\n to_cache = work_func()\n pickle_and_set(redis, key, to_cache, ttl_sec)\n return to_cache","function_tokens":["def","use_redis_cache","(","key",",","ttl_sec",",","work_func",")",":","redis","=","redis_connection",".","get_redis","(",")","cached_value","=","get_pickled_key","(","redis",",","key",")","if","cached_value",":","return","cached_value","to_cache","=","work_func","(",")","pickle_and_set","(","redis",",","key",",","to_cache",",","ttl_sec",")","return","to_cache"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/redis_cache.py#L52-L60"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/redis_cache.py","language":"python","identifier":"cache","parameters":"(**kwargs)","argument_list":"","return_statement":"return outer_wrap","docstring":"Cache decorator.\n Should be called with `@cache(ttl_sec=123, transform=transform_response, cache_prefix_override='some-prefix')`\n\n Arguments:\n ttl_sec: optional,number The time in seconds to cache the response if\n status code < 400\n transform: optional,func The transform function of the wrapped function\n to convert the function response to request response\n cache_prefix_override: optional,the prefix for the cache key to use\n currently the cache decorator function has a default prefix for public API routes\n this param allows us to override the prefix for the internal API routes and avoid confusion\n\n Usage Notes:\n If the wrapped function returns a tuple, the transform function will not\n be run on the response. The first item of the tuple must be serializable.\n\n If the wrapped function returns a single response, the transform function\n must be passed to the decorator. The wrapper function response must be\n serializable.\n\n Decorators in Python are just higher-order-functions that accept a function\n as a single parameter, and return a function that wraps the input function.\n\n In this case, because we need to pass kwargs into our decorator function,\n we need an additional layer of wrapping; the outermost function accepts the kwargs,\n and when called, returns the decorating function `outer_wrap`, which in turn returns\n the wrapped input function, `inner_wrap`.\n\n @functools.wraps simply ensures that if Python introspects `inner_wrap`, it refers to\n `func` rather than `inner_wrap`.","docstring_summary":"Cache decorator.\n Should be called with `@cache(ttl_sec=123, transform=transform_response, cache_prefix_override='some-prefix')`","docstring_tokens":["Cache","decorator",".","Should","be","called","with","@cache","(","ttl_sec","=","123","transform","=","transform_response","cache_prefix_override","=","some","-","prefix",")"],"function":"def cache(**kwargs):\n \"\"\"\n Cache decorator.\n Should be called with `@cache(ttl_sec=123, transform=transform_response, cache_prefix_override='some-prefix')`\n\n Arguments:\n ttl_sec: optional,number The time in seconds to cache the response if\n status code < 400\n transform: optional,func The transform function of the wrapped function\n to convert the function response to request response\n cache_prefix_override: optional,the prefix for the cache key to use\n currently the cache decorator function has a default prefix for public API routes\n this param allows us to override the prefix for the internal API routes and avoid confusion\n\n Usage Notes:\n If the wrapped function returns a tuple, the transform function will not\n be run on the response. The first item of the tuple must be serializable.\n\n If the wrapped function returns a single response, the transform function\n must be passed to the decorator. The wrapper function response must be\n serializable.\n\n Decorators in Python are just higher-order-functions that accept a function\n as a single parameter, and return a function that wraps the input function.\n\n In this case, because we need to pass kwargs into our decorator function,\n we need an additional layer of wrapping; the outermost function accepts the kwargs,\n and when called, returns the decorating function `outer_wrap`, which in turn returns\n the wrapped input function, `inner_wrap`.\n\n @functools.wraps simply ensures that if Python introspects `inner_wrap`, it refers to\n `func` rather than `inner_wrap`.\n \"\"\"\n ttl_sec = kwargs[\"ttl_sec\"] if \"ttl_sec\" in kwargs else default_ttl_sec\n transform = kwargs[\"transform\"] if \"transform\" in kwargs else None\n cache_prefix_override = (\n kwargs[\"cache_prefix_override\"] if \"cache_prefix_override\" in kwargs else None\n )\n redis = redis_connection.get_redis()\n\n def outer_wrap(func):\n @functools.wraps(func)\n def inner_wrap(*args, **kwargs):\n has_user_id = (\n \"user_id\" in request.args and request.args[\"user_id\"] is not None\n )\n key = extract_key(request.path, request.args.items(), cache_prefix_override)\n if not has_user_id:\n cached_resp = redis.get(key)\n\n if cached_resp:\n logger.debug(f\"Redis Cache - hit {key}\")\n try:\n deserialized = pickle.loads(cached_resp)\n if transform is not None:\n return transform(deserialized) # pylint: disable=E1102\n return deserialized, 200\n except Exception as e:\n logger.warning(f\"Unable to deserialize cached response: {e}\")\n\n logger.debug(f\"Redis Cache - miss {key}\")\n response = func(*args, **kwargs)\n\n if len(response) == 2:\n resp, status_code = response\n if status_code < 400:\n serialized = pickle.dumps(resp)\n redis.set(key, serialized, ttl_sec)\n return resp, status_code\n serialized = pickle.dumps(response)\n redis.set(key, serialized, ttl_sec)\n return transform(response) # pylint: disable=E1102\n\n return inner_wrap\n\n return outer_wrap","function_tokens":["def","cache","(","*","*","kwargs",")",":","ttl_sec","=","kwargs","[","\"ttl_sec\"","]","if","\"ttl_sec\"","in","kwargs","else","default_ttl_sec","transform","=","kwargs","[","\"transform\"","]","if","\"transform\"","in","kwargs","else","None","cache_prefix_override","=","(","kwargs","[","\"cache_prefix_override\"","]","if","\"cache_prefix_override\"","in","kwargs","else","None",")","redis","=","redis_connection",".","get_redis","(",")","def","outer_wrap","(","func",")",":","@","functools",".","wraps","(","func",")","def","inner_wrap","(","*","args",",","*","*","kwargs",")",":","has_user_id","=","(","\"user_id\"","in","request",".","args","and","request",".","args","[","\"user_id\"","]","is","not","None",")","key","=","extract_key","(","request",".","path",",","request",".","args",".","items","(",")",",","cache_prefix_override",")","if","not","has_user_id",":","cached_resp","=","redis",".","get","(","key",")","if","cached_resp",":","logger",".","debug","(","f\"Redis Cache - hit {key}\"",")","try",":","deserialized","=","pickle",".","loads","(","cached_resp",")","if","transform","is","not","None",":","return","transform","(","deserialized",")","# pylint: disable=E1102","return","deserialized",",","200","except","Exception","as","e",":","logger",".","warning","(","f\"Unable to deserialize cached response: {e}\"",")","logger",".","debug","(","f\"Redis Cache - miss {key}\"",")","response","=","func","(","*","args",",","*","*","kwargs",")","if","len","(","response",")","==","2",":","resp",",","status_code","=","response","if","status_code","<","400",":","serialized","=","pickle",".","dumps","(","resp",")","redis",".","set","(","key",",","serialized",",","ttl_sec",")","return","resp",",","status_code","serialized","=","pickle",".","dumps","(","response",")","redis",".","set","(","key",",","serialized",",","ttl_sec",")","return","transform","(","response",")","# pylint: disable=E1102","return","inner_wrap","return","outer_wrap"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/redis_cache.py#L82-L157"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/config.py","language":"python","identifier":"ConfigIni.read","parameters":"(self, filenames, encoding=None)","argument_list":"","return_statement":"return ret","docstring":"Overridden read() method to call parse_flask_section() at the end","docstring_summary":"Overridden read() method to call parse_flask_section() at the end","docstring_tokens":["Overridden","read","()","method","to","call","parse_flask_section","()","at","the","end"],"function":"def read(self, filenames, encoding=None):\n \"\"\"Overridden read() method to call parse_flask_section() at the end\"\"\"\n ret = configparser.ConfigParser.read(self, filenames, encoding)\n self.parse_flask_section()\n return ret","function_tokens":["def","read","(","self",",","filenames",",","encoding","=","None",")",":","ret","=","configparser",".","ConfigParser",".","read","(","self",",","filenames",",","encoding",")","self",".","parse_flask_section","(",")","return","ret"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/config.py#L51-L55"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/config.py","language":"python","identifier":"ConfigIni.parse_flask_section","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Parse the [flask] section of your config and hand off the config\n to the app in context.\n\n Config vars should have the same name as their flask equivalent except\n in all lower-case.","docstring_summary":"Parse the [flask] section of your config and hand off the config\n to the app in context.","docstring_tokens":["Parse","the","[","flask","]","section","of","your","config","and","hand","off","the","config","to","the","app","in","context","."],"function":"def parse_flask_section(self):\n \"\"\"Parse the [flask] section of your config and hand off the config\n to the app in context.\n\n Config vars should have the same name as their flask equivalent except\n in all lower-case.\"\"\"\n for section_name in self.sections():\n current_app.config[section_name] = {}\n\n for section_name in self.sections():\n for item in self.items(section_name):\n self._load_item(section_name, item[0])\n\n # Set db_read_replica url to same as db url if none provided\n if (\"url_read_replica\" not in current_app.config[\"db\"]) or (\n not current_app.config[\"db\"][\"url_read_replica\"]\n ):\n current_app.config[\"db\"][\"url_read_replica\"] = current_app.config[\"db\"][\n \"url\"\n ]\n\n # Always disable (not included in app.default_config)\n # See https:\/\/flask-restx.readthedocs.io\/en\/latest\/mask.html#usage\n current_app.config[\"RESTX_MASK_SWAGGER\"] = False","function_tokens":["def","parse_flask_section","(","self",")",":","for","section_name","in","self",".","sections","(",")",":","current_app",".","config","[","section_name","]","=","{","}","for","section_name","in","self",".","sections","(",")",":","for","item","in","self",".","items","(","section_name",")",":","self",".","_load_item","(","section_name",",","item","[","0","]",")","# Set db_read_replica url to same as db url if none provided","if","(","\"url_read_replica\"","not","in","current_app",".","config","[","\"db\"","]",")","or","(","not","current_app",".","config","[","\"db\"","]","[","\"url_read_replica\"","]",")",":","current_app",".","config","[","\"db\"","]","[","\"url_read_replica\"","]","=","current_app",".","config","[","\"db\"","]","[","\"url\"","]","# Always disable (not included in app.default_config)","# See https:\/\/flask-restx.readthedocs.io\/en\/latest\/mask.html#usage","current_app",".","config","[","\"RESTX_MASK_SWAGGER\"","]","=","False"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/config.py#L57-L80"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/utils\/config.py","language":"python","identifier":"ConfigIni._load_item","parameters":"(self, section_name, key)","argument_list":"","return_statement":"","docstring":"Load the specified item from the [flask] section. Type is\n determined by the type of the equivalent value in app.default_config\n or string if unknown.","docstring_summary":"Load the specified item from the [flask] section. Type is\n determined by the type of the equivalent value in app.default_config\n or string if unknown.","docstring_tokens":["Load","the","specified","item","from","the","[","flask","]","section",".","Type","is","determined","by","the","type","of","the","equivalent","value","in","app",".","default_config","or","string","if","unknown","."],"function":"def _load_item(self, section_name, key):\n \"\"\"Load the specified item from the [flask] section. Type is\n determined by the type of the equivalent value in app.default_config\n or string if unknown.\"\"\"\n default = current_app.default_config.get(key)\n if isinstance(default, datetime.timedelta):\n # One of the default config vars is a timedelta - interpret it\n # as an int and construct using it\n current_app.config[section_name][key] = datetime.timedelta(\n self.getint(section_name, key)\n )\n elif isinstance(default, bool):\n current_app.config[section_name][key] = self.getboolean(section_name, key)\n elif isinstance(default, float):\n current_app.config[section_name][key] = self.getfloat(section_name, key)\n elif isinstance(default, int):\n current_app.config[section_name][key] = self.getint(section_name, key)\n else:\n # All the string keys need to be coerced into str()\n # because Flask expects some of them not to be unicode\n current_app.config[section_name][key] = str(self.get(section_name, key))\n env_config_update(current_app.config, section_name, key)","function_tokens":["def","_load_item","(","self",",","section_name",",","key",")",":","default","=","current_app",".","default_config",".","get","(","key",")","if","isinstance","(","default",",","datetime",".","timedelta",")",":","# One of the default config vars is a timedelta - interpret it","# as an int and construct using it","current_app",".","config","[","section_name","]","[","key","]","=","datetime",".","timedelta","(","self",".","getint","(","section_name",",","key",")",")","elif","isinstance","(","default",",","bool",")",":","current_app",".","config","[","section_name","]","[","key","]","=","self",".","getboolean","(","section_name",",","key",")","elif","isinstance","(","default",",","float",")",":","current_app",".","config","[","section_name","]","[","key","]","=","self",".","getfloat","(","section_name",",","key",")","elif","isinstance","(","default",",","int",")",":","current_app",".","config","[","section_name","]","[","key","]","=","self",".","getint","(","section_name",",","key",")","else",":","# All the string keys need to be coerced into str()","# because Flask expects some of them not to be unicode","current_app",".","config","[","section_name","]","[","key","]","=","str","(","self",".","get","(","section_name",",","key",")",")","env_config_update","(","current_app",".","config",",","section_name",",","key",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/utils\/config.py#L82-L103"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/eth_indexing\/event_scanner.py","language":"python","identifier":"_retry_web3_call","parameters":"( # type: ignore\n func,\n start_block,\n end_block,\n retries=MAX_REQUEST_RETRIES,\n delay=REQUEST_RETRY_SECONDS,\n)","argument_list":"","return_statement":"","docstring":"A custom retry loop to throttle down block range.\n\n If our JSON-RPC server cannot serve all incoming `eth_get_logs` in a single request,\n we retry and throttle down block range for every retry.\n\n For example, Go Ethereum does not indicate what is an acceptable response size.\n It just fails on the server-side with a \"context was cancelled\" warning.\n\n :param func: A callable that triggers Ethereum JSON-RPC, as func(start_block, end_block)\n :param start_block: The initial start block of the block range\n :param end_block: The initial start block of the block range\n :param retries: How many times we retry\n :param delay: Time to sleep between retries","docstring_summary":"A custom retry loop to throttle down block range.","docstring_tokens":["A","custom","retry","loop","to","throttle","down","block","range","."],"function":"def _retry_web3_call( # type: ignore\n func,\n start_block,\n end_block,\n retries=MAX_REQUEST_RETRIES,\n delay=REQUEST_RETRY_SECONDS,\n) -> Tuple[int, list]: # type: ignore\n \"\"\"A custom retry loop to throttle down block range.\n\n If our JSON-RPC server cannot serve all incoming `eth_get_logs` in a single request,\n we retry and throttle down block range for every retry.\n\n For example, Go Ethereum does not indicate what is an acceptable response size.\n It just fails on the server-side with a \"context was cancelled\" warning.\n\n :param func: A callable that triggers Ethereum JSON-RPC, as func(start_block, end_block)\n :param start_block: The initial start block of the block range\n :param end_block: The initial start block of the block range\n :param retries: How many times we retry\n :param delay: Time to sleep between retries\n \"\"\"\n for i in range(retries):\n try:\n return end_block, func(start_block, end_block)\n except Exception as e:\n # Assume this is HTTPConnectionPool(host='localhost', port=8545): Read timed out. (read timeout=10)\n # from Go Ethereum. This translates to the error \"context was cancelled\" on the server side:\n # https:\/\/github.com\/ethereum\/go-ethereum\/issues\/20426\n if i < retries - 1:\n # Give some more verbose info than the default middleware\n logger.warning(\n \"event_scanner.py | Retrying events for block range %d - %d (%d) failed with %s, retrying in %s seconds\",\n start_block,\n end_block,\n end_block - start_block,\n e,\n delay,\n )\n # Decrease the `eth_get_blocks` range\n end_block = start_block + ((end_block - start_block) \/\/ 2)\n # Let the JSON-RPC to recover e.g. from restart\n time.sleep(delay)\n continue\n logger.warning(\"event_scanner.py | Out of retries\")\n raise","function_tokens":["def","_retry_web3_call","(","# type: ignore","func",",","start_block",",","end_block",",","retries","=","MAX_REQUEST_RETRIES",",","delay","=","REQUEST_RETRY_SECONDS",",",")","->","Tuple","[","int",",","list","]",":","# type: ignore","for","i","in","range","(","retries",")",":","try",":","return","end_block",",","func","(","start_block",",","end_block",")","except","Exception","as","e",":","# Assume this is HTTPConnectionPool(host='localhost', port=8545): Read timed out. (read timeout=10)","# from Go Ethereum. This translates to the error \"context was cancelled\" on the server side:","# https:\/\/github.com\/ethereum\/go-ethereum\/issues\/20426","if","i","<","retries","-","1",":","# Give some more verbose info than the default middleware","logger",".","warning","(","\"event_scanner.py | Retrying events for block range %d - %d (%d) failed with %s, retrying in %s seconds\"",",","start_block",",","end_block",",","end_block","-","start_block",",","e",",","delay",",",")","# Decrease the `eth_get_blocks` range","end_block","=","start_block","+","(","(","end_block","-","start_block",")","\/\/","2",")","# Let the JSON-RPC to recover e.g. from restart","time",".","sleep","(","delay",")","continue","logger",".","warning","(","\"event_scanner.py | Out of retries\"",")","raise"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/eth_indexing\/event_scanner.py#L351-L395"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/eth_indexing\/event_scanner.py","language":"python","identifier":"_fetch_events_for_all_contracts","parameters":"(\n web3,\n event_type,\n argument_filters: dict,\n from_block: BlockIdentifier,\n to_block: BlockIdentifier,\n)","argument_list":"","return_statement":"return all_events","docstring":"Get events using eth_get_logs API.\n\n This method is detached from any contract instance.\n\n This is a stateless method, as opposed to createFilter.\n It can be safely called against nodes which do not provide `eth_newFilter` API, like Infura.","docstring_summary":"Get events using eth_get_logs API.","docstring_tokens":["Get","events","using","eth_get_logs","API","."],"function":"def _fetch_events_for_all_contracts(\n web3,\n event_type,\n argument_filters: dict,\n from_block: BlockIdentifier,\n to_block: BlockIdentifier,\n) -> Iterable:\n \"\"\"Get events using eth_get_logs API.\n\n This method is detached from any contract instance.\n\n This is a stateless method, as opposed to createFilter.\n It can be safely called against nodes which do not provide `eth_newFilter` API, like Infura.\n \"\"\"\n\n if from_block is None:\n raise TypeError(\"Missing mandatory keyword argument to get_logs: fromBlock\")\n\n # Currently no way to poke this using a public Web3.py API.\n # This will return raw underlying ABI JSON object for the event\n abi = event_type._get_event_abi()\n\n # Depending on the Solidity version used to compile\n # the contract that uses the ABI,\n # it might have Solidity ABI encoding v1 or v2.\n # We just assume the default that you set on Web3 object here.\n # More information here https:\/\/eth-abi.readthedocs.io\/en\/latest\/index.html\n codec: ABICodec = web3.codec\n\n # Here we need to poke a bit into Web3 internals, as this\n # functionality is not exposed by default.\n # Construct JSON-RPC raw filter presentation based on human readable Python descriptions\n # Namely, convert event names to their keccak signatures\n # More information here:\n # https:\/\/github.com\/ethereum\/web3.py\/blob\/e176ce0793dafdd0573acc8d4b76425b6eb604ca\/web3\/_utils\/filters.py#L71\n _, event_filter_params = construct_event_filter_params(\n abi,\n codec,\n address=argument_filters.get(\"address\"),\n argument_filters=argument_filters,\n fromBlock=from_block,\n toBlock=to_block,\n )\n\n logger.debug(\n \"event_scanner.py | Querying eth_get_logs with the following parameters: %s\",\n event_filter_params,\n )\n\n # Call JSON-RPC API on your Ethereum node.\n # get_logs() returns raw AttributedDict entries\n logs = web3.eth.getLogs(event_filter_params)\n\n # Convert raw binary data to Python proxy objects as described by ABI\n all_events = []\n for log in logs:\n # Convert raw JSON-RPC log result to human readable event by using ABI data\n # More information how processLog works here\n # https:\/\/github.com\/ethereum\/web3.py\/blob\/fbaf1ad11b0c7fac09ba34baff2c256cffe0a148\/web3\/_utils\/events.py#L200\n event = get_event_data(codec, abi, log)\n all_events.append(event)\n return all_events","function_tokens":["def","_fetch_events_for_all_contracts","(","web3",",","event_type",",","argument_filters",":","dict",",","from_block",":","BlockIdentifier",",","to_block",":","BlockIdentifier",",",")","->","Iterable",":","if","from_block","is","None",":","raise","TypeError","(","\"Missing mandatory keyword argument to get_logs: fromBlock\"",")","# Currently no way to poke this using a public Web3.py API.","# This will return raw underlying ABI JSON object for the event","abi","=","event_type",".","_get_event_abi","(",")","# Depending on the Solidity version used to compile","# the contract that uses the ABI,","# it might have Solidity ABI encoding v1 or v2.","# We just assume the default that you set on Web3 object here.","# More information here https:\/\/eth-abi.readthedocs.io\/en\/latest\/index.html","codec",":","ABICodec","=","web3",".","codec","# Here we need to poke a bit into Web3 internals, as this","# functionality is not exposed by default.","# Construct JSON-RPC raw filter presentation based on human readable Python descriptions","# Namely, convert event names to their keccak signatures","# More information here:","# https:\/\/github.com\/ethereum\/web3.py\/blob\/e176ce0793dafdd0573acc8d4b76425b6eb604ca\/web3\/_utils\/filters.py#L71","_",",","event_filter_params","=","construct_event_filter_params","(","abi",",","codec",",","address","=","argument_filters",".","get","(","\"address\"",")",",","argument_filters","=","argument_filters",",","fromBlock","=","from_block",",","toBlock","=","to_block",",",")","logger",".","debug","(","\"event_scanner.py | Querying eth_get_logs with the following parameters: %s\"",",","event_filter_params",",",")","# Call JSON-RPC API on your Ethereum node.","# get_logs() returns raw AttributedDict entries","logs","=","web3",".","eth",".","getLogs","(","event_filter_params",")","# Convert raw binary data to Python proxy objects as described by ABI","all_events","=","[","]","for","log","in","logs",":","# Convert raw JSON-RPC log result to human readable event by using ABI data","# More information how processLog works here","# https:\/\/github.com\/ethereum\/web3.py\/blob\/fbaf1ad11b0c7fac09ba34baff2c256cffe0a148\/web3\/_utils\/events.py#L200","event","=","get_event_data","(","codec",",","abi",",","log",")","all_events",".","append","(","event",")","return","all_events"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/eth_indexing\/event_scanner.py#L398-L459"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/eth_indexing\/event_scanner.py","language":"python","identifier":"EventScanner.__init__","parameters":"(\n self,\n db,\n redis,\n web3: Web3,\n contract: Type[Contract],\n event_type: Type[ContractEvent],\n filters: dict,\n )","argument_list":"","return_statement":"","docstring":":param db: database handle\n :param redis: redis handle\n :param web3: Web3 instantiated with provider url\n :param contract: Contract\n :param state: state manager to keep tracks of last scanned block and persisting events to db\n :param event_type: web3 Event we scan\n :param filters: Filters passed to get_logs e.g. { \"address\": }","docstring_summary":":param db: database handle\n :param redis: redis handle\n :param web3: Web3 instantiated with provider url\n :param contract: Contract\n :param state: state manager to keep tracks of last scanned block and persisting events to db\n :param event_type: web3 Event we scan\n :param filters: Filters passed to get_logs e.g. { \"address\": }","docstring_tokens":[":","param","db",":","database","handle",":","param","redis",":","redis","handle",":","param","web3",":","Web3","instantiated","with","provider","url",":","param","contract",":","Contract",":","param","state",":","state","manager","to","keep","tracks","of","last","scanned","block","and","persisting","events","to","db",":","param","event_type",":","web3","Event","we","scan",":","param","filters",":","Filters","passed","to","get_logs","e",".","g",".","{","address",":","","}"],"function":"def __init__(\n self,\n db,\n redis,\n web3: Web3,\n contract: Type[Contract],\n event_type: Type[ContractEvent],\n filters: dict,\n ):\n \"\"\"\n :param db: database handle\n :param redis: redis handle\n :param web3: Web3 instantiated with provider url\n :param contract: Contract\n :param state: state manager to keep tracks of last scanned block and persisting events to db\n :param event_type: web3 Event we scan\n :param filters: Filters passed to get_logs e.g. { \"address\": }\n \"\"\"\n\n self.logger = logger\n self.db = db\n self.redis = redis\n self.contract = contract\n self.web3 = web3\n self.event_type = event_type\n self.filters = filters\n self.last_scanned_block = MIN_SCAN_START_BLOCK\n self.latest_chain_block = self.web3.eth.blockNumber","function_tokens":["def","__init__","(","self",",","db",",","redis",",","web3",":","Web3",",","contract",":","Type","[","Contract","]",",","event_type",":","Type","[","ContractEvent","]",",","filters",":","dict",",",")",":","self",".","logger","=","logger","self",".","db","=","db","self",".","redis","=","redis","self",".","contract","=","contract","self",".","web3","=","web3","self",".","event_type","=","event_type","self",".","filters","=","filters","self",".","last_scanned_block","=","MIN_SCAN_START_BLOCK","self",".","latest_chain_block","=","self",".","web3",".","eth",".","blockNumber"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/eth_indexing\/event_scanner.py#L62-L89"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/eth_indexing\/event_scanner.py","language":"python","identifier":"EventScanner.restore","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Restore the last scan state from redis.\n If value not found in redis, restore from database.","docstring_summary":"Restore the last scan state from redis.\n If value not found in redis, restore from database.","docstring_tokens":["Restore","the","last","scan","state","from","redis",".","If","value","not","found","in","redis","restore","from","database","."],"function":"def restore(self):\n \"\"\"Restore the last scan state from redis.\n If value not found in redis, restore from database.\"\"\"\n restored = self.redis.get(eth_indexing_last_scanned_block_key)\n if not restored:\n with self.db.scoped_session() as session:\n result = session.query(EthBlock.last_scanned_block).first()\n restored = result[0] if result else restored\n self.last_scanned_block = int(restored) if restored else MIN_SCAN_START_BLOCK\n logger.info(\n f\"event_scanner.py | Restored last scanned block ({self.last_scanned_block})\"\n )","function_tokens":["def","restore","(","self",")",":","restored","=","self",".","redis",".","get","(","eth_indexing_last_scanned_block_key",")","if","not","restored",":","with","self",".","db",".","scoped_session","(",")","as","session",":","result","=","session",".","query","(","EthBlock",".","last_scanned_block",")",".","first","(",")","restored","=","result","[","0","]","if","result","else","restored","self",".","last_scanned_block","=","int","(","restored",")","if","restored","else","MIN_SCAN_START_BLOCK","logger",".","info","(","f\"event_scanner.py | Restored last scanned block ({self.last_scanned_block})\"",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/eth_indexing\/event_scanner.py#L91-L102"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/eth_indexing\/event_scanner.py","language":"python","identifier":"EventScanner.save","parameters":"(self, block_number: int)","argument_list":"","return_statement":"","docstring":"Save at the end of each chunk of blocks, so we can resume in the case of a crash or CTRL+C\n Next time the scanner is started we will resume from this block","docstring_summary":"Save at the end of each chunk of blocks, so we can resume in the case of a crash or CTRL+C\n Next time the scanner is started we will resume from this block","docstring_tokens":["Save","at","the","end","of","each","chunk","of","blocks","so","we","can","resume","in","the","case","of","a","crash","or","CTRL","+","C","Next","time","the","scanner","is","started","we","will","resume","from","this","block"],"function":"def save(self, block_number: int):\n \"\"\"Save at the end of each chunk of blocks, so we can resume in the case of a crash or CTRL+C\n Next time the scanner is started we will resume from this block\n \"\"\"\n self.last_scanned_block = block_number\n logger.info(\n f\"event_scanner.py | Saving last scanned block ({self.last_scanned_block}) to redis\"\n )\n self.redis.set(\n eth_indexing_last_scanned_block_key,\n str(self.last_scanned_block),\n )\n with self.db.scoped_session() as session:\n record = session.query(EthBlock).first()\n if record:\n record.last_scanned_block = self.last_scanned_block\n else:\n record = EthBlock(last_scanned_block=self.last_scanned_block)\n session.add(record)","function_tokens":["def","save","(","self",",","block_number",":","int",")",":","self",".","last_scanned_block","=","block_number","logger",".","info","(","f\"event_scanner.py | Saving last scanned block ({self.last_scanned_block}) to redis\"",")","self",".","redis",".","set","(","eth_indexing_last_scanned_block_key",",","str","(","self",".","last_scanned_block",")",",",")","with","self",".","db",".","scoped_session","(",")","as","session",":","record","=","session",".","query","(","EthBlock",")",".","first","(",")","if","record",":","record",".","last_scanned_block","=","self",".","last_scanned_block","else",":","record","=","EthBlock","(","last_scanned_block","=","self",".","last_scanned_block",")","session",".","add","(","record",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/eth_indexing\/event_scanner.py#L104-L122"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/eth_indexing\/event_scanner.py","language":"python","identifier":"EventScanner.get_block_timestamp","parameters":"(self, block_num)","argument_list":"","return_statement":"return datetime.datetime.utcfromtimestamp(last_time)","docstring":"Get Ethereum block timestamp","docstring_summary":"Get Ethereum block timestamp","docstring_tokens":["Get","Ethereum","block","timestamp"],"function":"def get_block_timestamp(self, block_num) -> Union[datetime.datetime, None]:\n \"\"\"Get Ethereum block timestamp\"\"\"\n try:\n block_info = self.web3.eth.getBlock(block_num)\n except BlockNotFound:\n # Block was not mined yet,\n # minor chain reorganisation?\n return None\n last_time = block_info[\"timestamp\"]\n return datetime.datetime.utcfromtimestamp(last_time)","function_tokens":["def","get_block_timestamp","(","self",",","block_num",")","->","Union","[","datetime",".","datetime",",","None","]",":","try",":","block_info","=","self",".","web3",".","eth",".","getBlock","(","block_num",")","except","BlockNotFound",":","# Block was not mined yet,","# minor chain reorganisation?","return","None","last_time","=","block_info","[","\"timestamp\"","]","return","datetime",".","datetime",".","utcfromtimestamp","(","last_time",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/eth_indexing\/event_scanner.py#L124-L133"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/eth_indexing\/event_scanner.py","language":"python","identifier":"EventScanner.get_suggested_scan_end_block","parameters":"(self)","argument_list":"","return_statement":"return self.latest_chain_block - ETH_BLOCK_TAIL_OFFSET","docstring":"Get the last mined block on Ethereum chain we are following.","docstring_summary":"Get the last mined block on Ethereum chain we are following.","docstring_tokens":["Get","the","last","mined","block","on","Ethereum","chain","we","are","following","."],"function":"def get_suggested_scan_end_block(self):\n \"\"\"Get the last mined block on Ethereum chain we are following.\"\"\"\n\n # Do not scan all the way to the final block, as this\n # block might not be mined yet\n return self.latest_chain_block - ETH_BLOCK_TAIL_OFFSET","function_tokens":["def","get_suggested_scan_end_block","(","self",")",":","# Do not scan all the way to the final block, as this","# block might not be mined yet","return","self",".","latest_chain_block","-","ETH_BLOCK_TAIL_OFFSET"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/eth_indexing\/event_scanner.py#L135-L140"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/eth_indexing\/event_scanner.py","language":"python","identifier":"EventScanner.get_last_scanned_block","parameters":"(self)","argument_list":"","return_statement":"return self.last_scanned_block","docstring":"The number of the last block we have stored.","docstring_summary":"The number of the last block we have stored.","docstring_tokens":["The","number","of","the","last","block","we","have","stored","."],"function":"def get_last_scanned_block(self) -> int:\n \"\"\"The number of the last block we have stored.\"\"\"\n return self.last_scanned_block","function_tokens":["def","get_last_scanned_block","(","self",")","->","int",":","return","self",".","last_scanned_block"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/eth_indexing\/event_scanner.py#L142-L144"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/eth_indexing\/event_scanner.py","language":"python","identifier":"EventScanner.process_event","parameters":"(\n self, block_timestamp: datetime.datetime, event: TransferEvent\n )","argument_list":"","return_statement":"return f\"{block_number}-{txhash}-{log_index}\"","docstring":"Record a ERC-20 transfer in our database.","docstring_summary":"Record a ERC-20 transfer in our database.","docstring_tokens":["Record","a","ERC","-","20","transfer","in","our","database","."],"function":"def process_event(\n self, block_timestamp: datetime.datetime, event: TransferEvent\n ) -> str:\n \"\"\"Record a ERC-20 transfer in our database.\"\"\"\n # Events are keyed by their transaction hash and log index\n # One transaction may contain multiple events\n # and each one of those gets their own log index\n\n log_index = event[\"logIndex\"] # Log index within the block\n # transaction_index = event.transactionIndex # Transaction index within the block\n txhash = event[\"transactionHash\"].hex() # Transaction hash\n block_number = event[\"blockNumber\"]\n\n # Convert ERC-20 Transfer event to our internal format\n args = event[\"args\"]\n transfer = {\n \"from\": args[\"from\"],\n \"to\": args[\"to\"],\n \"value\": args[\"value\"],\n \"timestamp\": block_timestamp,\n }\n\n # Add user ids from the transfer event into the balance refresh queue.\n # Depending on the wallet connection, we may have the address stored as\n # lower cased, so to be safe, we refresh check-summed and lower-cased adddresses.\n transfer_event_wallets = [\n transfer[\"from\"],\n transfer[\"to\"],\n transfer[\"from\"].lower(),\n transfer[\"to\"].lower(),\n ]\n with self.db.scoped_session() as session:\n user_result = (\n session.query(User.user_id)\n .filter(User.is_current == True)\n .filter(User.wallet.in_(transfer_event_wallets))\n ).all()\n user_set = {user_id for [user_id] in user_result}\n\n associated_wallet_result = (\n session.query(AssociatedWallet.user_id)\n .filter(AssociatedWallet.is_current == True)\n .filter(AssociatedWallet.is_delete == False)\n .filter(AssociatedWallet.wallet.in_(transfer_event_wallets))\n ).all()\n associated_wallet_set = {user_id for [user_id] in associated_wallet_result}\n\n user_ids = list(user_set.union(associated_wallet_set))\n if user_ids:\n logger.info(\n f\"event_scanner.py | Enqueueing user ids {user_ids} to immediate balance refresh queue\"\n )\n enqueue_immediate_balance_refresh(self.redis, user_ids)\n\n # Return a pointer that allows us to look up this event later if needed\n return f\"{block_number}-{txhash}-{log_index}\"","function_tokens":["def","process_event","(","self",",","block_timestamp",":","datetime",".","datetime",",","event",":","TransferEvent",")","->","str",":","# Events are keyed by their transaction hash and log index","# One transaction may contain multiple events","# and each one of those gets their own log index","log_index","=","event","[","\"logIndex\"","]","# Log index within the block","# transaction_index = event.transactionIndex # Transaction index within the block","txhash","=","event","[","\"transactionHash\"","]",".","hex","(",")","# Transaction hash","block_number","=","event","[","\"blockNumber\"","]","# Convert ERC-20 Transfer event to our internal format","args","=","event","[","\"args\"","]","transfer","=","{","\"from\"",":","args","[","\"from\"","]",",","\"to\"",":","args","[","\"to\"","]",",","\"value\"",":","args","[","\"value\"","]",",","\"timestamp\"",":","block_timestamp",",","}","# Add user ids from the transfer event into the balance refresh queue.","# Depending on the wallet connection, we may have the address stored as","# lower cased, so to be safe, we refresh check-summed and lower-cased adddresses.","transfer_event_wallets","=","[","transfer","[","\"from\"","]",",","transfer","[","\"to\"","]",",","transfer","[","\"from\"","]",".","lower","(",")",",","transfer","[","\"to\"","]",".","lower","(",")",",","]","with","self",".","db",".","scoped_session","(",")","as","session",":","user_result","=","(","session",".","query","(","User",".","user_id",")",".","filter","(","User",".","is_current","==","True",")",".","filter","(","User",".","wallet",".","in_","(","transfer_event_wallets",")",")",")",".","all","(",")","user_set","=","{","user_id","for","[","user_id","]","in","user_result","}","associated_wallet_result","=","(","session",".","query","(","AssociatedWallet",".","user_id",")",".","filter","(","AssociatedWallet",".","is_current","==","True",")",".","filter","(","AssociatedWallet",".","is_delete","==","False",")",".","filter","(","AssociatedWallet",".","wallet",".","in_","(","transfer_event_wallets",")",")",")",".","all","(",")","associated_wallet_set","=","{","user_id","for","[","user_id","]","in","associated_wallet_result","}","user_ids","=","list","(","user_set",".","union","(","associated_wallet_set",")",")","if","user_ids",":","logger",".","info","(","f\"event_scanner.py | Enqueueing user ids {user_ids} to immediate balance refresh queue\"",")","enqueue_immediate_balance_refresh","(","self",".","redis",",","user_ids",")","# Return a pointer that allows us to look up this event later if needed","return","f\"{block_number}-{txhash}-{log_index}\""],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/eth_indexing\/event_scanner.py#L146-L201"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/eth_indexing\/event_scanner.py","language":"python","identifier":"EventScanner.scan_chunk","parameters":"(self, start_block, end_block)","argument_list":"","return_statement":"return end_block, all_processed","docstring":"Read and process events between to block numbers.\n\n Dynamically decrease the size of the chunk in case the JSON-RPC server pukes out.\n\n :return: tuple(actual end block number, when this block was mined, processed events)","docstring_summary":"Read and process events between to block numbers.","docstring_tokens":["Read","and","process","events","between","to","block","numbers","."],"function":"def scan_chunk(self, start_block, end_block) -> Tuple[int, list]:\n \"\"\"Read and process events between to block numbers.\n\n Dynamically decrease the size of the chunk in case the JSON-RPC server pukes out.\n\n :return: tuple(actual end block number, when this block was mined, processed events)\n \"\"\"\n\n block_timestamps = {}\n get_block_timestamp = self.get_block_timestamp\n\n # Cache block timestamps to reduce some RPC overhead\n # Real solution might include smarter models around block\n def get_block_mined_timestamp(block_num):\n if block_num not in block_timestamps:\n block_timestamps[block_num] = get_block_timestamp(block_num)\n return block_timestamps[block_num]\n\n all_processed = []\n\n # Callable that takes care of the underlying web3 call\n def _fetch_events(from_block, to_block):\n return _fetch_events_for_all_contracts(\n self.web3,\n self.event_type,\n self.filters,\n from_block=from_block,\n to_block=to_block,\n )\n\n # Do `n` retries on `eth_get_logs`,\n # throttle down block range if needed\n end_block, events = _retry_web3_call(\n _fetch_events, start_block=start_block, end_block=end_block\n )\n\n for evt in events:\n idx = evt[\n \"logIndex\"\n ] # Integer of the log index position in the block, null when its pending\n\n # We cannot avoid minor chain reorganisations, but\n # at least we must avoid blocks that are not mined yet\n assert idx is not None, \"Somehow tried to scan a pending block\"\n\n block_number = evt[\"blockNumber\"]\n\n # Get UTC time when this event happened (block mined timestamp)\n # from our in-memory cache\n block_timestamp = get_block_mined_timestamp(block_number)\n\n logger.debug(\n f'event_scanner.py | Processing event {evt[\"event\"]}, block:{evt[\"blockNumber\"]}'\n )\n processed = self.process_event(block_timestamp, evt)\n all_processed.append(processed)\n\n return end_block, all_processed","function_tokens":["def","scan_chunk","(","self",",","start_block",",","end_block",")","->","Tuple","[","int",",","list","]",":","block_timestamps","=","{","}","get_block_timestamp","=","self",".","get_block_timestamp","# Cache block timestamps to reduce some RPC overhead","# Real solution might include smarter models around block","def","get_block_mined_timestamp","(","block_num",")",":","if","block_num","not","in","block_timestamps",":","block_timestamps","[","block_num","]","=","get_block_timestamp","(","block_num",")","return","block_timestamps","[","block_num","]","all_processed","=","[","]","# Callable that takes care of the underlying web3 call","def","_fetch_events","(","from_block",",","to_block",")",":","return","_fetch_events_for_all_contracts","(","self",".","web3",",","self",".","event_type",",","self",".","filters",",","from_block","=","from_block",",","to_block","=","to_block",",",")","# Do `n` retries on `eth_get_logs`,","# throttle down block range if needed","end_block",",","events","=","_retry_web3_call","(","_fetch_events",",","start_block","=","start_block",",","end_block","=","end_block",")","for","evt","in","events",":","idx","=","evt","[","\"logIndex\"","]","# Integer of the log index position in the block, null when its pending","# We cannot avoid minor chain reorganisations, but","# at least we must avoid blocks that are not mined yet","assert","idx","is","not","None",",","\"Somehow tried to scan a pending block\"","block_number","=","evt","[","\"blockNumber\"","]","# Get UTC time when this event happened (block mined timestamp)","# from our in-memory cache","block_timestamp","=","get_block_mined_timestamp","(","block_number",")","logger",".","debug","(","f'event_scanner.py | Processing event {evt[\"event\"]}, block:{evt[\"blockNumber\"]}'",")","processed","=","self",".","process_event","(","block_timestamp",",","evt",")","all_processed",".","append","(","processed",")","return","end_block",",","all_processed"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/eth_indexing\/event_scanner.py#L203-L260"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/eth_indexing\/event_scanner.py","language":"python","identifier":"EventScanner.estimate_next_chunk_size","parameters":"(self, current_chuck_size: int, event_found_count: int)","argument_list":"","return_statement":"return int(current_chuck_size)","docstring":"Try to figure out optimal chunk size\n\n Our scanner might need to scan the whole blockchain for all events\n\n * We want to minimize API calls over empty blocks\n * We want to make sure that one scan chunk does not try to process too many entries once, as we try to control commit buffer size and potentially asynchronous busy loop\n * Do not overload node serving JSON-RPC API by asking data for too many events at a time\n\n Currently Ethereum JSON-API does not have an API to tell when a first event occured in a blockchain\n and our heuristics try to accelerate block fetching (chunk size) until we see the first event.\n\n These heurestics exponentially increase the scan chunk size depending on if we are seeing events or not.\n When any transfers are encountered, we are back to scanning only a few blocks at a time.\n It does not make sense to do a full chain scan starting from block 1, doing one JSON-RPC call per 20 blocks.","docstring_summary":"Try to figure out optimal chunk size","docstring_tokens":["Try","to","figure","out","optimal","chunk","size"],"function":"def estimate_next_chunk_size(self, current_chuck_size: int, event_found_count: int):\n \"\"\"Try to figure out optimal chunk size\n\n Our scanner might need to scan the whole blockchain for all events\n\n * We want to minimize API calls over empty blocks\n * We want to make sure that one scan chunk does not try to process too many entries once, as we try to control commit buffer size and potentially asynchronous busy loop\n * Do not overload node serving JSON-RPC API by asking data for too many events at a time\n\n Currently Ethereum JSON-API does not have an API to tell when a first event occured in a blockchain\n and our heuristics try to accelerate block fetching (chunk size) until we see the first event.\n\n These heurestics exponentially increase the scan chunk size depending on if we are seeing events or not.\n When any transfers are encountered, we are back to scanning only a few blocks at a time.\n It does not make sense to do a full chain scan starting from block 1, doing one JSON-RPC call per 20 blocks.\n \"\"\"\n\n if event_found_count > 0:\n # When we encounter first events, reset the chunk size window\n current_chuck_size = MIN_SCAN_CHUNK_SIZE\n else:\n current_chuck_size *= CHUNK_SIZE_INCREASE\n\n current_chuck_size = max(MIN_SCAN_CHUNK_SIZE, current_chuck_size)\n current_chuck_size = min(MAX_CHUNK_SCAN_SIZE, current_chuck_size)\n return int(current_chuck_size)","function_tokens":["def","estimate_next_chunk_size","(","self",",","current_chuck_size",":","int",",","event_found_count",":","int",")",":","if","event_found_count",">","0",":","# When we encounter first events, reset the chunk size window","current_chuck_size","=","MIN_SCAN_CHUNK_SIZE","else",":","current_chuck_size","*=","CHUNK_SIZE_INCREASE","current_chuck_size","=","max","(","MIN_SCAN_CHUNK_SIZE",",","current_chuck_size",")","current_chuck_size","=","min","(","MAX_CHUNK_SCAN_SIZE",",","current_chuck_size",")","return","int","(","current_chuck_size",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/eth_indexing\/event_scanner.py#L262-L287"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/eth_indexing\/event_scanner.py","language":"python","identifier":"EventScanner.scan","parameters":"(\n self,\n start_block,\n end_block,\n start_chunk_size=START_CHUNK_SIZE,\n )","argument_list":"","return_statement":"return all_processed, total_chunks_scanned","docstring":"Perform a token events scan.\n\n :param start_block: The first block included in the scan\n :param end_block: The last block included in the scan\n :param start_chunk_size: How many blocks we try to fetch over JSON-RPC on the first attempt\n\n :return: [All processed events, number of chunks used]","docstring_summary":"Perform a token events scan.","docstring_tokens":["Perform","a","token","events","scan","."],"function":"def scan(\n self,\n start_block,\n end_block,\n start_chunk_size=START_CHUNK_SIZE,\n ) -> Tuple[list, int]:\n \"\"\"Perform a token events scan.\n\n :param start_block: The first block included in the scan\n :param end_block: The last block included in the scan\n :param start_chunk_size: How many blocks we try to fetch over JSON-RPC on the first attempt\n\n :return: [All processed events, number of chunks used]\n \"\"\"\n\n current_block = start_block\n\n # Scan in chunks, commit between\n chunk_size = start_chunk_size\n last_scan_duration = last_logs_found = 0\n total_chunks_scanned = 0\n\n # All processed entries we got on this scan cycle\n all_processed = []\n\n while current_block <= end_block:\n\n # Print some diagnostics to logs to try to fiddle with real world JSON-RPC API performance\n estimated_end_block = min(\n current_block + chunk_size, self.get_suggested_scan_end_block()\n )\n logger.debug(\n \"event_scanner.py | Scanning token transfers for blocks: %d - %d, chunk size %d, last chunk scan took %f, last logs found %d\",\n current_block,\n estimated_end_block,\n chunk_size,\n last_scan_duration,\n last_logs_found,\n )\n\n start = time.time()\n actual_end_block, new_entries = self.scan_chunk(\n current_block, estimated_end_block\n )\n\n # Where does our current chunk scan ends - are we out of chain yet?\n current_end = actual_end_block\n\n last_scan_duration = int(time.time() - start)\n all_processed += new_entries\n\n # Try to guess how many blocks to fetch over `eth_get_logs` API next time\n chunk_size = self.estimate_next_chunk_size(chunk_size, len(new_entries))\n\n # Set where the next chunk starts\n current_block = current_end + 1\n total_chunks_scanned += 1\n self.save(min(current_end, self.get_suggested_scan_end_block()))\n\n return all_processed, total_chunks_scanned","function_tokens":["def","scan","(","self",",","start_block",",","end_block",",","start_chunk_size","=","START_CHUNK_SIZE",",",")","->","Tuple","[","list",",","int","]",":","current_block","=","start_block","# Scan in chunks, commit between","chunk_size","=","start_chunk_size","last_scan_duration","=","last_logs_found","=","0","total_chunks_scanned","=","0","# All processed entries we got on this scan cycle","all_processed","=","[","]","while","current_block","<=","end_block",":","# Print some diagnostics to logs to try to fiddle with real world JSON-RPC API performance","estimated_end_block","=","min","(","current_block","+","chunk_size",",","self",".","get_suggested_scan_end_block","(",")",")","logger",".","debug","(","\"event_scanner.py | Scanning token transfers for blocks: %d - %d, chunk size %d, last chunk scan took %f, last logs found %d\"",",","current_block",",","estimated_end_block",",","chunk_size",",","last_scan_duration",",","last_logs_found",",",")","start","=","time",".","time","(",")","actual_end_block",",","new_entries","=","self",".","scan_chunk","(","current_block",",","estimated_end_block",")","# Where does our current chunk scan ends - are we out of chain yet?","current_end","=","actual_end_block","last_scan_duration","=","int","(","time",".","time","(",")","-","start",")","all_processed","+=","new_entries","# Try to guess how many blocks to fetch over `eth_get_logs` API next time","chunk_size","=","self",".","estimate_next_chunk_size","(","chunk_size",",","len","(","new_entries",")",")","# Set where the next chunk starts","current_block","=","current_end","+","1","total_chunks_scanned","+=","1","self",".","save","(","min","(","current_end",",","self",".","get_suggested_scan_end_block","(",")",")",")","return","all_processed",",","total_chunks_scanned"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/eth_indexing\/event_scanner.py#L289-L348"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/challenges\/challenge_event_bus.py","language":"python","identifier":"ChallengeEventBus.register_listener","parameters":"(self, event: ChallengeEvent, listener: ChallengeManager)","argument_list":"","return_statement":"","docstring":"Registers a listener (`ChallengeManager`) to listen for a particular event type.","docstring_summary":"Registers a listener (`ChallengeManager`) to listen for a particular event type.","docstring_tokens":["Registers","a","listener","(","ChallengeManager",")","to","listen","for","a","particular","event","type","."],"function":"def register_listener(self, event: ChallengeEvent, listener: ChallengeManager):\n \"\"\"Registers a listener (`ChallengeManager`) to listen for a particular event type.\"\"\"\n self._listeners[event].append(listener)\n if listener.challenge_id not in self._managers:\n self._managers[listener.challenge_id] = listener","function_tokens":["def","register_listener","(","self",",","event",":","ChallengeEvent",",","listener",":","ChallengeManager",")",":","self",".","_listeners","[","event","]",".","append","(","listener",")","if","listener",".","challenge_id","not","in","self",".","_managers",":","self",".","_managers","[","listener",".","challenge_id","]","=","listener"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/challenges\/challenge_event_bus.py#L57-L61"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/challenges\/challenge_event_bus.py","language":"python","identifier":"ChallengeEventBus.get_manager","parameters":"(self, challenge_id: str)","argument_list":"","return_statement":"return self._managers[challenge_id]","docstring":"Gets a manager for a given challenge_id","docstring_summary":"Gets a manager for a given challenge_id","docstring_tokens":["Gets","a","manager","for","a","given","challenge_id"],"function":"def get_manager(self, challenge_id: str) -> ChallengeManager:\n \"\"\"Gets a manager for a given challenge_id\"\"\"\n return self._managers[challenge_id]","function_tokens":["def","get_manager","(","self",",","challenge_id",":","str",")","->","ChallengeManager",":","return","self",".","_managers","[","challenge_id","]"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/challenges\/challenge_event_bus.py#L63-L65"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/challenges\/challenge_event_bus.py","language":"python","identifier":"ChallengeEventBus.does_manager_exist","parameters":"(self, challenge_id: str)","argument_list":"","return_statement":"return challenge_id in self._managers","docstring":"Returns whether or not a manager exists for a given challenge_id","docstring_summary":"Returns whether or not a manager exists for a given challenge_id","docstring_tokens":["Returns","whether","or","not","a","manager","exists","for","a","given","challenge_id"],"function":"def does_manager_exist(self, challenge_id: str) -> bool:\n \"\"\"Returns whether or not a manager exists for a given challenge_id\"\"\"\n return challenge_id in self._managers","function_tokens":["def","does_manager_exist","(","self",",","challenge_id",":","str",")","->","bool",":","return","challenge_id","in","self",".","_managers"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/challenges\/challenge_event_bus.py#L67-L69"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/challenges\/challenge_event_bus.py","language":"python","identifier":"ChallengeEventBus.use_scoped_dispatch_queue","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Makes the bus only dispatch the events once out of the new scope created with 'with","docstring_summary":"Makes the bus only dispatch the events once out of the new scope created with 'with","docstring_tokens":["Makes","the","bus","only","dispatch","the","events","once","out","of","the","new","scope","created","with","with"],"function":"def use_scoped_dispatch_queue(self):\n \"\"\"Makes the bus only dispatch the events once out of the new scope created with 'with'\"\"\"\n if len(self._in_memory_queue) > 0:\n logger.warning(\"ChallengeEventBus: Already using in-memory queue\")\n try:\n yield self._in_memory_queue\n finally:\n self.flush()","function_tokens":["def","use_scoped_dispatch_queue","(","self",")",":","if","len","(","self",".","_in_memory_queue",")",">","0",":","logger",".","warning","(","\"ChallengeEventBus: Already using in-memory queue\"",")","try",":","yield","self",".","_in_memory_queue","finally",":","self",".","flush","(",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/challenges\/challenge_event_bus.py#L72-L79"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/challenges\/challenge_event_bus.py","language":"python","identifier":"ChallengeEventBus.dispatch","parameters":"(\n self,\n event: ChallengeEvent,\n block_number: int,\n user_id: int,\n extra: Dict = None,\n )","argument_list":"","return_statement":"","docstring":"Dispatches an event + block_number + user_id to an in memory queue.\n\n Does not dispatch to Redis until flush is called or a scoped dispatch queue goes out of scope","docstring_summary":"Dispatches an event + block_number + user_id to an in memory queue.","docstring_tokens":["Dispatches","an","event","+","block_number","+","user_id","to","an","in","memory","queue","."],"function":"def dispatch(\n self,\n event: ChallengeEvent,\n block_number: int,\n user_id: int,\n extra: Dict = None,\n ):\n \"\"\"Dispatches an event + block_number + user_id to an in memory queue.\n\n Does not dispatch to Redis until flush is called or a scoped dispatch queue goes out of scope\n \"\"\"\n if extra is None:\n extra = {}\n # Sanitize input, drop the event if it's malformed\n valid_event = event is not None and isinstance(event, str)\n valid_block = block_number is not None and isinstance(block_number, int)\n valid_user = user_id is not None and isinstance(user_id, int)\n valid_extra = extra is not None and isinstance(extra, dict)\n if not (valid_event and valid_block and valid_user and valid_extra):\n logger.warning(\n f\"ChallengeEventBus: ignoring invalid event: {(event, block_number, user_id, extra)}\"\n )\n return\n\n self._in_memory_queue.append(\n {\n \"event\": event,\n \"block_number\": block_number,\n \"user_id\": user_id,\n \"extra\": extra,\n }\n )","function_tokens":["def","dispatch","(","self",",","event",":","ChallengeEvent",",","block_number",":","int",",","user_id",":","int",",","extra",":","Dict","=","None",",",")",":","if","extra","is","None",":","extra","=","{","}","# Sanitize input, drop the event if it's malformed","valid_event","=","event","is","not","None","and","isinstance","(","event",",","str",")","valid_block","=","block_number","is","not","None","and","isinstance","(","block_number",",","int",")","valid_user","=","user_id","is","not","None","and","isinstance","(","user_id",",","int",")","valid_extra","=","extra","is","not","None","and","isinstance","(","extra",",","dict",")","if","not","(","valid_event","and","valid_block","and","valid_user","and","valid_extra",")",":","logger",".","warning","(","f\"ChallengeEventBus: ignoring invalid event: {(event, block_number, user_id, extra)}\"",")","return","self",".","_in_memory_queue",".","append","(","{","\"event\"",":","event",",","\"block_number\"",":","block_number",",","\"user_id\"",":","user_id",",","\"extra\"",":","extra",",","}",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/challenges\/challenge_event_bus.py#L81-L112"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/challenges\/challenge_event_bus.py","language":"python","identifier":"ChallengeEventBus.flush","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Flushes the in-memory queue of events and enqueues them to Redis","docstring_summary":"Flushes the in-memory queue of events and enqueues them to Redis","docstring_tokens":["Flushes","the","in","-","memory","queue","of","events","and","enqueues","them","to","Redis"],"function":"def flush(self):\n \"\"\"Flushes the in-memory queue of events and enqueues them to Redis\"\"\"\n logger.info(\n f\"ChallengeEventBus: Flushing {len(self._in_memory_queue)} events from in-memory queue\"\n )\n for event in self._in_memory_queue:\n try:\n event_json = self._event_to_json(\n event[\"event\"],\n event[\"block_number\"],\n event[\"user_id\"],\n event.get(\"extra\", {}),\n )\n logger.info(f\"ChallengeEventBus: dispatch {event_json}\")\n self._redis.rpush(REDIS_QUEUE_PREFIX, event_json)\n except Exception as e:\n logger.warning(f\"ChallengeEventBus: error enqueuing to Redis: {e}\")\n self._in_memory_queue.clear()","function_tokens":["def","flush","(","self",")",":","logger",".","info","(","f\"ChallengeEventBus: Flushing {len(self._in_memory_queue)} events from in-memory queue\"",")","for","event","in","self",".","_in_memory_queue",":","try",":","event_json","=","self",".","_event_to_json","(","event","[","\"event\"","]",",","event","[","\"block_number\"","]",",","event","[","\"user_id\"","]",",","event",".","get","(","\"extra\"",",","{","}",")",",",")","logger",".","info","(","f\"ChallengeEventBus: dispatch {event_json}\"",")","self",".","_redis",".","rpush","(","REDIS_QUEUE_PREFIX",",","event_json",")","except","Exception","as","e",":","logger",".","warning","(","f\"ChallengeEventBus: error enqueuing to Redis: {e}\"",")","self",".","_in_memory_queue",".","clear","(",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/challenges\/challenge_event_bus.py#L114-L131"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/challenges\/challenge_event_bus.py","language":"python","identifier":"ChallengeEventBus.process_events","parameters":"(self, session: Session, max_events=1000)","argument_list":"","return_statement":"return (len(events_json), did_error)","docstring":"Dequeues `max_events` from Redis queue and processes them, forwarding to listening ChallengeManagers.\n Returns (num_processed_events, did_error).\n Will return -1 as num_processed_events if an error prevented any events from\n being processed (i.e. some error deserializing from Redis)","docstring_summary":"Dequeues `max_events` from Redis queue and processes them, forwarding to listening ChallengeManagers.\n Returns (num_processed_events, did_error).\n Will return -1 as num_processed_events if an error prevented any events from\n being processed (i.e. some error deserializing from Redis)","docstring_tokens":["Dequeues","max_events","from","Redis","queue","and","processes","them","forwarding","to","listening","ChallengeManagers",".","Returns","(","num_processed_events","did_error",")",".","Will","return","-","1","as","num_processed_events","if","an","error","prevented","any","events","from","being","processed","(","i",".","e",".","some","error","deserializing","from","Redis",")"],"function":"def process_events(self, session: Session, max_events=1000) -> Tuple[int, bool]:\n \"\"\"Dequeues `max_events` from Redis queue and processes them, forwarding to listening ChallengeManagers.\n Returns (num_processed_events, did_error).\n Will return -1 as num_processed_events if an error prevented any events from\n being processed (i.e. some error deserializing from Redis)\n \"\"\"\n try:\n # get the first max_events elements.\n events_json = self._redis.lrange(REDIS_QUEUE_PREFIX, 0, max_events)\n logger.info(f\"ChallengeEventBus: dequeued {len(events_json)} events\")\n # trim the first from the front of the list\n self._redis.ltrim(REDIS_QUEUE_PREFIX, len(events_json), -1)\n events_dicts = list(map(self._json_to_event, events_json))\n\n # Consolidate event types for processing\n # map of {\"event_type\": [{ user_id: number, block_number: number, extra: {} }]}}\n event_user_dict: DefaultDict[\n ChallengeEvent, List[EventMetadata]\n ] = defaultdict(lambda: [])\n for event_dict in events_dicts:\n event_type = event_dict[\"event\"]\n event_user_dict[event_type].append(\n {\n \"user_id\": event_dict[\"user_id\"],\n \"block_number\": event_dict[\"block_number\"],\n \"extra\": event_dict.get( # use .get to be safe since prior versions didn't have `extra`\n \"extra\", {}\n ),\n }\n )\n except Exception as e:\n logger.warning(f\"ChallengeEventBus: error processing from Redis: {e}\")\n return (-1, True)\n\n did_error = False\n for (event_type, event_dicts) in event_user_dict.items():\n listeners = self._listeners[event_type]\n for listener in listeners:\n try:\n listener.process(session, event_type, event_dicts)\n except Exception as e:\n # We really shouldn't see errors from a ChallengeManager (they should handle on their own),\n # but in case we do, swallow it and continue on\n logger.warning(\n f\"ChallengeEventBus: manager [{listener.challenge_id} unexpectedly propogated error: [{e}]\"\n )\n did_error = True\n\n return (len(events_json), did_error)","function_tokens":["def","process_events","(","self",",","session",":","Session",",","max_events","=","1000",")","->","Tuple","[","int",",","bool","]",":","try",":","# get the first max_events elements.","events_json","=","self",".","_redis",".","lrange","(","REDIS_QUEUE_PREFIX",",","0",",","max_events",")","logger",".","info","(","f\"ChallengeEventBus: dequeued {len(events_json)} events\"",")","# trim the first from the front of the list","self",".","_redis",".","ltrim","(","REDIS_QUEUE_PREFIX",",","len","(","events_json",")",",","-","1",")","events_dicts","=","list","(","map","(","self",".","_json_to_event",",","events_json",")",")","# Consolidate event types for processing","# map of {\"event_type\": [{ user_id: number, block_number: number, extra: {} }]}}","event_user_dict",":","DefaultDict","[","ChallengeEvent",",","List","[","EventMetadata","]","]","=","defaultdict","(","lambda",":","[","]",")","for","event_dict","in","events_dicts",":","event_type","=","event_dict","[","\"event\"","]","event_user_dict","[","event_type","]",".","append","(","{","\"user_id\"",":","event_dict","[","\"user_id\"","]",",","\"block_number\"",":","event_dict","[","\"block_number\"","]",",","\"extra\"",":","event_dict",".","get","(","# use .get to be safe since prior versions didn't have `extra`","\"extra\"",",","{","}",")",",","}",")","except","Exception","as","e",":","logger",".","warning","(","f\"ChallengeEventBus: error processing from Redis: {e}\"",")","return","(","-","1",",","True",")","did_error","=","False","for","(","event_type",",","event_dicts",")","in","event_user_dict",".","items","(",")",":","listeners","=","self",".","_listeners","[","event_type","]","for","listener","in","listeners",":","try",":","listener",".","process","(","session",",","event_type",",","event_dicts",")","except","Exception","as","e",":","# We really shouldn't see errors from a ChallengeManager (they should handle on their own),","# but in case we do, swallow it and continue on","logger",".","warning","(","f\"ChallengeEventBus: manager [{listener.challenge_id} unexpectedly propogated error: [{e}]\"",")","did_error","=","True","return","(","len","(","events_json",")",",","did_error",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/challenges\/challenge_event_bus.py#L133-L181"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/challenges\/trending_challenge.py","language":"python","identifier":"is_dst","parameters":"(zonename, dt)","argument_list":"","return_statement":"return localized.astimezone(tz).dst() != timedelta(0)","docstring":"Checks if is daylight savings time\n During daylight savings, the clock moves forward one hr","docstring_summary":"Checks if is daylight savings time\n During daylight savings, the clock moves forward one hr","docstring_tokens":["Checks","if","is","daylight","savings","time","During","daylight","savings","the","clock","moves","forward","one","hr"],"function":"def is_dst(zonename, dt):\n \"\"\"Checks if is daylight savings time\n During daylight savings, the clock moves forward one hr\n \"\"\"\n tz = pytz.timezone(zonename)\n localized = pytz.utc.localize(dt)\n return localized.astimezone(tz).dst() != timedelta(0)","function_tokens":["def","is_dst","(","zonename",",","dt",")",":","tz","=","pytz",".","timezone","(","zonename",")","localized","=","pytz",".","utc",".","localize","(","dt",")","return","localized",".","astimezone","(","tz",")",".","dst","(",")","!=","timedelta","(","0",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/challenges\/trending_challenge.py#L63-L69"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/challenges\/trending_challenge.py","language":"python","identifier":"should_trending_challenge_update","parameters":"(\n session: Session, timestamp: int\n)","argument_list":"","return_statement":"return (True, dt.date())","docstring":"Checks if the timestamp is after a week and there is no pending trending update\n Returns a tuple of boolean if the challenge should be updated, and if it's set to true, the date","docstring_summary":"Checks if the timestamp is after a week and there is no pending trending update\n Returns a tuple of boolean if the challenge should be updated, and if it's set to true, the date","docstring_tokens":["Checks","if","the","timestamp","is","after","a","week","and","there","is","no","pending","trending","update","Returns","a","tuple","of","boolean","if","the","challenge","should","be","updated","and","if","it","s","set","to","true","the","date"],"function":"def should_trending_challenge_update(\n session: Session, timestamp: int\n) -> Tuple[bool, Optional[date]]:\n \"\"\"Checks if the timestamp is after a week and there is no pending trending update\n Returns a tuple of boolean if the challenge should be updated, and if it's set to true, the date\n \"\"\"\n\n dt = datetime.fromtimestamp(timestamp)\n is_valid_timestamp = get_is_valid_timestamp(dt)\n if not is_valid_timestamp:\n return (False, None)\n\n # DB query for most recent db row of trending's date\n # using that, figure out new date threshold -> next friday at noon\n most_recent_user_challenge = (\n session.query(TrendingResult.week).order_by(desc(TrendingResult.week)).first()\n )\n\n if most_recent_user_challenge is None:\n # do somthing\n return (True, dt.date())\n week = most_recent_user_challenge[0]\n\n if week == dt.date():\n return (False, None)\n\n return (True, dt.date())","function_tokens":["def","should_trending_challenge_update","(","session",":","Session",",","timestamp",":","int",")","->","Tuple","[","bool",",","Optional","[","date","]","]",":","dt","=","datetime",".","fromtimestamp","(","timestamp",")","is_valid_timestamp","=","get_is_valid_timestamp","(","dt",")","if","not","is_valid_timestamp",":","return","(","False",",","None",")","# DB query for most recent db row of trending's date","# using that, figure out new date threshold -> next friday at noon","most_recent_user_challenge","=","(","session",".","query","(","TrendingResult",".","week",")",".","order_by","(","desc","(","TrendingResult",".","week",")",")",".","first","(",")",")","if","most_recent_user_challenge","is","None",":","# do somthing","return","(","True",",","dt",".","date","(",")",")","week","=","most_recent_user_challenge","[","0","]","if","week","==","dt",".","date","(",")",":","return","(","False",",","None",")","return","(","True",",","dt",".","date","(",")",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/challenges\/trending_challenge.py#L86-L112"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/challenges\/challenge.py","language":"python","identifier":"ChallengeUpdater.update_user_challenges","parameters":"(\n self,\n session: Session,\n event: str,\n user_challenges: List[UserChallenge],\n step_count: Optional[int],\n event_metadatas: List[FullEventMetadata],\n starting_block: Optional[int],\n )","argument_list":"","return_statement":"","docstring":"This is usually the main required method to fill out when implementing a new challenge.\n Given an event type, a list of existing user challenges, and the base challenge type,\n update the given user_challenges.\n\n In the case of aggregate challenges, where UserChallenges are created in an\n already completed state, this method can be left as is.","docstring_summary":"This is usually the main required method to fill out when implementing a new challenge.\n Given an event type, a list of existing user challenges, and the base challenge type,\n update the given user_challenges.","docstring_tokens":["This","is","usually","the","main","required","method","to","fill","out","when","implementing","a","new","challenge",".","Given","an","event","type","a","list","of","existing","user","challenges","and","the","base","challenge","type","update","the","given","user_challenges","."],"function":"def update_user_challenges(\n self,\n session: Session,\n event: str,\n user_challenges: List[UserChallenge],\n step_count: Optional[int],\n event_metadatas: List[FullEventMetadata],\n starting_block: Optional[int],\n ):\n \"\"\"This is usually the main required method to fill out when implementing a new challenge.\n Given an event type, a list of existing user challenges, and the base challenge type,\n update the given user_challenges.\n\n In the case of aggregate challenges, where UserChallenges are created in an\n already completed state, this method can be left as is.\n \"\"\"","function_tokens":["def","update_user_challenges","(","self",",","session",":","Session",",","event",":","str",",","user_challenges",":","List","[","UserChallenge","]",",","step_count",":","Optional","[","int","]",",","event_metadatas",":","List","[","FullEventMetadata","]",",","starting_block",":","Optional","[","int","]",",",")",":"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/challenges\/challenge.py#L49-L64"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/challenges\/challenge.py","language":"python","identifier":"ChallengeUpdater.on_after_challenge_creation","parameters":"(\n self, session: Session, metadatas: List[FullEventMetadata]\n )","argument_list":"","return_statement":"","docstring":"Optional method to do some work after the `ChallengeManager` creates new challenges.\n If a challenge is backed by it's own table, for instance, create those rows here.","docstring_summary":"Optional method to do some work after the `ChallengeManager` creates new challenges.\n If a challenge is backed by it's own table, for instance, create those rows here.","docstring_tokens":["Optional","method","to","do","some","work","after","the","ChallengeManager","creates","new","challenges",".","If","a","challenge","is","backed","by","it","s","own","table","for","instance","create","those","rows","here","."],"function":"def on_after_challenge_creation(\n self, session: Session, metadatas: List[FullEventMetadata]\n ):\n \"\"\"Optional method to do some work after the `ChallengeManager` creates new challenges.\n If a challenge is backed by it's own table, for instance, create those rows here.\n \"\"\"","function_tokens":["def","on_after_challenge_creation","(","self",",","session",":","Session",",","metadatas",":","List","[","FullEventMetadata","]",")",":"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/challenges\/challenge.py#L66-L71"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/challenges\/challenge.py","language":"python","identifier":"ChallengeUpdater.generate_specifier","parameters":"(self, user_id: int, extra: Dict)","argument_list":"","return_statement":"return str(user_id)","docstring":"Optional method to provide a custom specifier for a challenge, given a user_id","docstring_summary":"Optional method to provide a custom specifier for a challenge, given a user_id","docstring_tokens":["Optional","method","to","provide","a","custom","specifier","for","a","challenge","given","a","user_id"],"function":"def generate_specifier(self, user_id: int, extra: Dict) -> str:\n \"\"\"Optional method to provide a custom specifier for a challenge, given a user_id\"\"\"\n return str(user_id)","function_tokens":["def","generate_specifier","(","self",",","user_id",":","int",",","extra",":","Dict",")","->","str",":","return","str","(","user_id",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/challenges\/challenge.py#L73-L75"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/challenges\/challenge.py","language":"python","identifier":"ChallengeUpdater.should_create_new_challenge","parameters":"(\n self, session: Session, event: str, user_id: int, extra: Dict\n )","argument_list":"","return_statement":"return True","docstring":"Optional method called for aggregate challenges to allow for overriding default\n behavior of creating a new UserChallenge whenever 1) we see a relevant event and\n 2) the parent challenge is not yet complete.","docstring_summary":"Optional method called for aggregate challenges to allow for overriding default\n behavior of creating a new UserChallenge whenever 1) we see a relevant event and\n 2) the parent challenge is not yet complete.","docstring_tokens":["Optional","method","called","for","aggregate","challenges","to","allow","for","overriding","default","behavior","of","creating","a","new","UserChallenge","whenever","1",")","we","see","a","relevant","event","and","2",")","the","parent","challenge","is","not","yet","complete","."],"function":"def should_create_new_challenge(\n self, session: Session, event: str, user_id: int, extra: Dict\n ) -> bool:\n \"\"\"Optional method called for aggregate challenges to allow for overriding default\n behavior of creating a new UserChallenge whenever 1) we see a relevant event and\n 2) the parent challenge is not yet complete.\n \"\"\"\n return True","function_tokens":["def","should_create_new_challenge","(","self",",","session",":","Session",",","event",":","str",",","user_id",":","int",",","extra",":","Dict",")","->","bool",":","return","True"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/challenges\/challenge.py#L77-L84"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/challenges\/challenge.py","language":"python","identifier":"ChallengeUpdater.should_show_challenge_for_user","parameters":"(self, session: Session, user_id: int)","argument_list":"","return_statement":"return True","docstring":"Optional method to show\/hide a challenge for a particular user.","docstring_summary":"Optional method to show\/hide a challenge for a particular user.","docstring_tokens":["Optional","method","to","show","\/","hide","a","challenge","for","a","particular","user","."],"function":"def should_show_challenge_for_user(self, session: Session, user_id: int) -> bool:\n \"\"\"Optional method to show\/hide a challenge for a particular user.\"\"\"\n return True","function_tokens":["def","should_show_challenge_for_user","(","self",",","session",":","Session",",","user_id",":","int",")","->","bool",":","return","True"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/challenges\/challenge.py#L86-L88"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/challenges\/challenge.py","language":"python","identifier":"ChallengeUpdater.get_metadata","parameters":"(self, session: Session, specifiers: List[str])","argument_list":"","return_statement":"return [{} for s in specifiers]","docstring":"Optional method to provide any extra metadata required for client to properly display a challenge.","docstring_summary":"Optional method to provide any extra metadata required for client to properly display a challenge.","docstring_tokens":["Optional","method","to","provide","any","extra","metadata","required","for","client","to","properly","display","a","challenge","."],"function":"def get_metadata(self, session: Session, specifiers: List[str]) -> List[Dict]:\n \"\"\"Optional method to provide any extra metadata required for client to properly display a challenge.\"\"\"\n return [{} for s in specifiers]","function_tokens":["def","get_metadata","(","self",",","session",":","Session",",","specifiers",":","List","[","str","]",")","->","List","[","Dict","]",":","return","[","{","}","for","s","in","specifiers","]"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/challenges\/challenge.py#L90-L92"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/challenges\/challenge.py","language":"python","identifier":"ChallengeUpdater.get_default_metadata","parameters":"(self)","argument_list":"","return_statement":"return {}","docstring":"Optional method to provide default metadata for an challenge with no progress.","docstring_summary":"Optional method to provide default metadata for an challenge with no progress.","docstring_tokens":["Optional","method","to","provide","default","metadata","for","an","challenge","with","no","progress","."],"function":"def get_default_metadata(self) -> Dict:\n \"\"\"Optional method to provide default metadata for an challenge with no progress.\"\"\"\n return {}","function_tokens":["def","get_default_metadata","(","self",")","->","Dict",":","return","{","}"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/challenges\/challenge.py#L94-L96"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/challenges\/challenge.py","language":"python","identifier":"ChallengeUpdater.get_override_challenge_step_count","parameters":"(\n self, session: Session, user_id: int\n )","argument_list":"","return_statement":"return None","docstring":"Optional method to override the step count value","docstring_summary":"Optional method to override the step count value","docstring_tokens":["Optional","method","to","override","the","step","count","value"],"function":"def get_override_challenge_step_count(\n self, session: Session, user_id: int\n ) -> Optional[int]:\n \"\"\"Optional method to override the step count value\"\"\"\n return None","function_tokens":["def","get_override_challenge_step_count","(","self",",","session",":","Session",",","user_id",":","int",")","->","Optional","[","int","]",":","return","None"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/challenges\/challenge.py#L98-L102"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/challenges\/challenge.py","language":"python","identifier":"ChallengeManager.process","parameters":"(self, session, event_type: str, event_metadatas: List[EventMetadata])","argument_list":"","return_statement":"","docstring":"Processes a number of events for a particular event type, updating\n UserChallengeEvents as needed.","docstring_summary":"Processes a number of events for a particular event type, updating\n UserChallengeEvents as needed.","docstring_tokens":["Processes","a","number","of","events","for","a","particular","event","type","updating","UserChallengeEvents","as","needed","."],"function":"def process(self, session, event_type: str, event_metadatas: List[EventMetadata]):\n \"\"\"Processes a number of events for a particular event type, updating\n UserChallengeEvents as needed.\n \"\"\"\n logger.info(\n f\"ChallengeManager: processing event type [{event_type}] for challenge [{self.challenge_id}]\"\n )\n if not self._did_init: # lazy init\n self._init_challenge(session)\n\n # If inactive, do nothing\n if not self._is_active:\n return\n\n # filter out events that took place before the starting block, returning\n # early if need be\n if self._starting_block is not None:\n event_metadatas = list(\n filter(\n lambda x: x[\"block_number\"] >= cast(int, self._starting_block),\n event_metadatas,\n )\n )\n if not event_metadatas:\n return\n\n # Add specifiers\n events_with_specifiers: List[FullEventMetadata] = [\n {\n \"user_id\": event[\"user_id\"],\n \"block_number\": event[\"block_number\"],\n \"extra\": event[\"extra\"],\n \"specifier\": self._updater.generate_specifier(\n event[\"user_id\"], event[\"extra\"]\n ),\n }\n for event in event_metadatas\n ]\n\n # Drop any duplicate specifiers\n events_with_specifiers_map = {\n event[\"specifier\"]: event for event in events_with_specifiers\n }\n events_with_specifiers = list(events_with_specifiers_map.values())\n\n specifiers: List[str] = [e[\"specifier\"] for e in events_with_specifiers]\n\n # Because we reuse a single session between multiple\n # challenge managers, we have to be extra careful in the case\n # that we run into a Postgres level error, to rollback\n # the session so it remains usable - hence, all the sensitive\n # code belongs in a `try` block here.\n try:\n # Gets all user challenges,\n existing_user_challenges = fetch_user_challenges(\n session, self.challenge_id, specifiers\n )\n\n # Create users that need challenges still\n existing_specifiers = {\n challenge.specifier for challenge in existing_user_challenges\n }\n\n # Create new challenges\n\n new_challenge_metadata = [\n metadata\n for metadata in events_with_specifiers\n if metadata[\"specifier\"] not in existing_specifiers\n ]\n to_create_metadata: List[FullEventMetadata] = []\n if self._challenge_type == ChallengeType.aggregate:\n # For aggregate challenges, only create them\n # if we haven't maxed out completion yet, and\n # we haven't overriden this via should_create_new_challenge\n\n # Get *all* UserChallenges per user\n user_ids = list({e[\"user_id\"] for e in event_metadatas})\n all_user_challenges: List[Tuple[int, int]] = (\n session.query(\n UserChallenge.user_id, func.count(UserChallenge.specifier)\n )\n .filter(\n UserChallenge.challenge_id == self.challenge_id,\n UserChallenge.user_id.in_(user_ids),\n )\n .group_by(UserChallenge.user_id)\n ).all()\n challenges_per_user = dict(all_user_challenges)\n new_user_challenges_specifiers: Dict[int, Set[str]] = defaultdict(set)\n for new_metadata in new_challenge_metadata:\n user_id = new_metadata[\"user_id\"]\n completion_count = challenges_per_user.get(user_id, 0) + len(\n new_user_challenges_specifiers[user_id]\n )\n if self._step_count and completion_count >= self._step_count:\n continue\n if not self._updater.should_create_new_challenge(\n session,\n event_type,\n new_metadata[\"user_id\"],\n new_metadata[\"extra\"],\n ):\n continue\n new_user_challenges_specifiers[user_id].add(\n new_metadata[\"specifier\"]\n )\n to_create_metadata.append(new_metadata)\n else:\n to_create_metadata = new_challenge_metadata\n\n new_user_challenges = [\n self._create_new_user_challenge(\n metadata[\"user_id\"], metadata[\"specifier\"]\n )\n for metadata in to_create_metadata\n ]\n logger.warning(f\"new challenges ${new_user_challenges}\")\n\n # Get the other challenges to update (the ones in progress)\n in_progress_challenges = [\n challenge\n for challenge in existing_user_challenges\n if not challenge.is_complete\n ]\n to_update = in_progress_challenges + new_user_challenges\n\n # Filter out challenges for deactivated users\n to_update_user_ids = list({c.user_id for c in to_update})\n deactivated_user_ids = (\n session.query(User.user_id)\n .filter(\n User.user_id.in_(to_update_user_ids),\n User.is_deactivated == True,\n )\n .all()\n )\n to_create_metadata = list(\n filter(\n lambda c: c[\"user_id\"] not in deactivated_user_ids,\n to_create_metadata,\n )\n )\n to_update = list(\n filter(lambda c: c.user_id not in deactivated_user_ids, to_update)\n )\n\n # Do any other custom work needed after creating a challenge event\n self._updater.on_after_challenge_creation(session, to_create_metadata)\n\n # Update all the challenges\n self._updater.update_user_challenges(\n session,\n event_type,\n to_update,\n self._step_count,\n events_with_specifiers,\n self._starting_block,\n )\n\n # Add block # to newly completed challenges\n for challenge in to_update:\n if challenge.is_complete:\n block_number = events_with_specifiers_map[challenge.specifier][\n \"block_number\"\n ]\n challenge.completed_blocknumber = block_number\n\n logger.debug(\n f\"ChallengeManager: Updated challenges from event [{event_type}]: [{to_update}]\"\n )\n # Only add the new ones\n session.add_all(new_user_challenges)\n\n # Commit, so if there are DB errors\n # we encounter now and can roll back\n # to keep the session valid\n # for the next manager\n session.commit()\n except Exception as e:\n logger.warning(\n f\"ChallengeManager: caught error in manager [{self.challenge_id}]: [{e}]. Rolling back\"\n )\n session.rollback()","function_tokens":["def","process","(","self",",","session",",","event_type",":","str",",","event_metadatas",":","List","[","EventMetadata","]",")",":","logger",".","info","(","f\"ChallengeManager: processing event type [{event_type}] for challenge [{self.challenge_id}]\"",")","if","not","self",".","_did_init",":","# lazy init","self",".","_init_challenge","(","session",")","# If inactive, do nothing","if","not","self",".","_is_active",":","return","# filter out events that took place before the starting block, returning","# early if need be","if","self",".","_starting_block","is","not","None",":","event_metadatas","=","list","(","filter","(","lambda","x",":","x","[","\"block_number\"","]",">=","cast","(","int",",","self",".","_starting_block",")",",","event_metadatas",",",")",")","if","not","event_metadatas",":","return","# Add specifiers","events_with_specifiers",":","List","[","FullEventMetadata","]","=","[","{","\"user_id\"",":","event","[","\"user_id\"","]",",","\"block_number\"",":","event","[","\"block_number\"","]",",","\"extra\"",":","event","[","\"extra\"","]",",","\"specifier\"",":","self",".","_updater",".","generate_specifier","(","event","[","\"user_id\"","]",",","event","[","\"extra\"","]",")",",","}","for","event","in","event_metadatas","]","# Drop any duplicate specifiers","events_with_specifiers_map","=","{","event","[","\"specifier\"","]",":","event","for","event","in","events_with_specifiers","}","events_with_specifiers","=","list","(","events_with_specifiers_map",".","values","(",")",")","specifiers",":","List","[","str","]","=","[","e","[","\"specifier\"","]","for","e","in","events_with_specifiers","]","# Because we reuse a single session between multiple","# challenge managers, we have to be extra careful in the case","# that we run into a Postgres level error, to rollback","# the session so it remains usable - hence, all the sensitive","# code belongs in a `try` block here.","try",":","# Gets all user challenges,","existing_user_challenges","=","fetch_user_challenges","(","session",",","self",".","challenge_id",",","specifiers",")","# Create users that need challenges still","existing_specifiers","=","{","challenge",".","specifier","for","challenge","in","existing_user_challenges","}","# Create new challenges","new_challenge_metadata","=","[","metadata","for","metadata","in","events_with_specifiers","if","metadata","[","\"specifier\"","]","not","in","existing_specifiers","]","to_create_metadata",":","List","[","FullEventMetadata","]","=","[","]","if","self",".","_challenge_type","==","ChallengeType",".","aggregate",":","# For aggregate challenges, only create them","# if we haven't maxed out completion yet, and","# we haven't overriden this via should_create_new_challenge","# Get *all* UserChallenges per user","user_ids","=","list","(","{","e","[","\"user_id\"","]","for","e","in","event_metadatas","}",")","all_user_challenges",":","List","[","Tuple","[","int",",","int","]","]","=","(","session",".","query","(","UserChallenge",".","user_id",",","func",".","count","(","UserChallenge",".","specifier",")",")",".","filter","(","UserChallenge",".","challenge_id","==","self",".","challenge_id",",","UserChallenge",".","user_id",".","in_","(","user_ids",")",",",")",".","group_by","(","UserChallenge",".","user_id",")",")",".","all","(",")","challenges_per_user","=","dict","(","all_user_challenges",")","new_user_challenges_specifiers",":","Dict","[","int",",","Set","[","str","]","]","=","defaultdict","(","set",")","for","new_metadata","in","new_challenge_metadata",":","user_id","=","new_metadata","[","\"user_id\"","]","completion_count","=","challenges_per_user",".","get","(","user_id",",","0",")","+","len","(","new_user_challenges_specifiers","[","user_id","]",")","if","self",".","_step_count","and","completion_count",">=","self",".","_step_count",":","continue","if","not","self",".","_updater",".","should_create_new_challenge","(","session",",","event_type",",","new_metadata","[","\"user_id\"","]",",","new_metadata","[","\"extra\"","]",",",")",":","continue","new_user_challenges_specifiers","[","user_id","]",".","add","(","new_metadata","[","\"specifier\"","]",")","to_create_metadata",".","append","(","new_metadata",")","else",":","to_create_metadata","=","new_challenge_metadata","new_user_challenges","=","[","self",".","_create_new_user_challenge","(","metadata","[","\"user_id\"","]",",","metadata","[","\"specifier\"","]",")","for","metadata","in","to_create_metadata","]","logger",".","warning","(","f\"new challenges ${new_user_challenges}\"",")","# Get the other challenges to update (the ones in progress)","in_progress_challenges","=","[","challenge","for","challenge","in","existing_user_challenges","if","not","challenge",".","is_complete","]","to_update","=","in_progress_challenges","+","new_user_challenges","# Filter out challenges for deactivated users","to_update_user_ids","=","list","(","{","c",".","user_id","for","c","in","to_update","}",")","deactivated_user_ids","=","(","session",".","query","(","User",".","user_id",")",".","filter","(","User",".","user_id",".","in_","(","to_update_user_ids",")",",","User",".","is_deactivated","==","True",",",")",".","all","(",")",")","to_create_metadata","=","list","(","filter","(","lambda","c",":","c","[","\"user_id\"","]","not","in","deactivated_user_ids",",","to_create_metadata",",",")",")","to_update","=","list","(","filter","(","lambda","c",":","c",".","user_id","not","in","deactivated_user_ids",",","to_update",")",")","# Do any other custom work needed after creating a challenge event","self",".","_updater",".","on_after_challenge_creation","(","session",",","to_create_metadata",")","# Update all the challenges","self",".","_updater",".","update_user_challenges","(","session",",","event_type",",","to_update",",","self",".","_step_count",",","events_with_specifiers",",","self",".","_starting_block",",",")","# Add block # to newly completed challenges","for","challenge","in","to_update",":","if","challenge",".","is_complete",":","block_number","=","events_with_specifiers_map","[","challenge",".","specifier","]","[","\"block_number\"","]","challenge",".","completed_blocknumber","=","block_number","logger",".","debug","(","f\"ChallengeManager: Updated challenges from event [{event_type}]: [{to_update}]\"",")","# Only add the new ones","session",".","add_all","(","new_user_challenges",")","# Commit, so if there are DB errors","# we encounter now and can roll back","# to keep the session valid","# for the next manager","session",".","commit","(",")","except","Exception","as","e",":","logger",".","warning","(","f\"ChallengeManager: caught error in manager [{self.challenge_id}]: [{e}]. Rolling back\"",")","session",".","rollback","(",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/challenges\/challenge.py#L128-L311"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/challenges\/challenge.py","language":"python","identifier":"ChallengeManager.get_metadata","parameters":"(self, session: Session, specifiers: List[str])","argument_list":"","return_statement":"return self._updater.get_metadata(session, specifiers)","docstring":"Gets additional metadata to render the challenge if needed.","docstring_summary":"Gets additional metadata to render the challenge if needed.","docstring_tokens":["Gets","additional","metadata","to","render","the","challenge","if","needed","."],"function":"def get_metadata(self, session: Session, specifiers: List[str]) -> List[Dict]:\n \"\"\"Gets additional metadata to render the challenge if needed.\"\"\"\n return self._updater.get_metadata(session, specifiers)","function_tokens":["def","get_metadata","(","self",",","session",":","Session",",","specifiers",":","List","[","str","]",")","->","List","[","Dict","]",":","return","self",".","_updater",".","get_metadata","(","session",",","specifiers",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/challenges\/challenge.py#L318-L320"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/challenges\/challenge.py","language":"python","identifier":"ChallengeManager.get_default_metadata","parameters":"(self)","argument_list":"","return_statement":"return self._updater.get_default_metadata()","docstring":"Gets default metadata for an challenge with no progress.","docstring_summary":"Gets default metadata for an challenge with no progress.","docstring_tokens":["Gets","default","metadata","for","an","challenge","with","no","progress","."],"function":"def get_default_metadata(self):\n \"\"\"Gets default metadata for an challenge with no progress.\"\"\"\n return self._updater.get_default_metadata()","function_tokens":["def","get_default_metadata","(","self",")",":","return","self",".","_updater",".","get_default_metadata","(",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/challenges\/challenge.py#L322-L324"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/challenges\/challenge.py","language":"python","identifier":"ChallengeManager.should_show_challenge_for_user","parameters":"(self, session: Session, user_id: int)","argument_list":"","return_statement":"return self._updater.should_show_challenge_for_user(session, user_id)","docstring":"Optional method to show\/hide a challenge for a particular user.","docstring_summary":"Optional method to show\/hide a challenge for a particular user.","docstring_tokens":["Optional","method","to","show","\/","hide","a","challenge","for","a","particular","user","."],"function":"def should_show_challenge_for_user(self, session: Session, user_id: int) -> bool:\n \"\"\"Optional method to show\/hide a challenge for a particular user.\"\"\"\n return self._updater.should_show_challenge_for_user(session, user_id)","function_tokens":["def","should_show_challenge_for_user","(","self",",","session",":","Session",",","user_id",":","int",")","->","bool",":","return","self",".","_updater",".","should_show_challenge_for_user","(","session",",","user_id",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/challenges\/challenge.py#L326-L328"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/challenges\/challenge.py","language":"python","identifier":"ChallengeManager.get_override_challenge_step_count","parameters":"(\n self, session: Session, user_id: int\n )","argument_list":"","return_statement":"return self._updater.get_override_challenge_step_count(session, user_id)","docstring":"Optional method to override a challenge step count for a particular user.","docstring_summary":"Optional method to override a challenge step count for a particular user.","docstring_tokens":["Optional","method","to","override","a","challenge","step","count","for","a","particular","user","."],"function":"def get_override_challenge_step_count(\n self, session: Session, user_id: int\n ) -> Optional[int]:\n \"\"\"Optional method to override a challenge step count for a particular user.\"\"\"\n return self._updater.get_override_challenge_step_count(session, user_id)","function_tokens":["def","get_override_challenge_step_count","(","self",",","session",":","Session",",","user_id",":","int",")","->","Optional","[","int","]",":","return","self",".","_updater",".","get_override_challenge_step_count","(","session",",","user_id",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/challenges\/challenge.py#L330-L334"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/api\/v1\/playlists.py","language":"python","identifier":"get_playlist","parameters":"(playlist_id, current_user_id)","argument_list":"","return_statement":"return None","docstring":"Returns a single playlist, or None","docstring_summary":"Returns a single playlist, or None","docstring_tokens":["Returns","a","single","playlist","or","None"],"function":"def get_playlist(playlist_id, current_user_id):\n \"\"\"Returns a single playlist, or None\"\"\"\n args = {\n \"playlist_id\": [playlist_id],\n \"with_users\": True,\n \"current_user_id\": current_user_id,\n }\n playlists = get_playlists(args)\n if playlists:\n return extend_playlist(playlists[0])\n return None","function_tokens":["def","get_playlist","(","playlist_id",",","current_user_id",")",":","args","=","{","\"playlist_id\"",":","[","playlist_id","]",",","\"with_users\"",":","True",",","\"current_user_id\"",":","current_user_id",",","}","playlists","=","get_playlists","(","args",")","if","playlists",":","return","extend_playlist","(","playlists","[","0","]",")","return","None"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/api\/v1\/playlists.py#L59-L69"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/api\/v1\/api.py","language":"python","identifier":"ApiWithHTTPS.specs_url","parameters":"(self)","argument_list":"","return_statement":"return url_for(self.endpoint(\"specs\"), _external=True, _scheme=scheme)","docstring":"Monkey patch for HTTPS or else swagger docs do not serve over HTTPS\n https:\/\/stackoverflow.com\/questions\/47508257\/serving-flask-restplus-on-https-server","docstring_summary":"Monkey patch for HTTPS or else swagger docs do not serve over HTTPS\n https:\/\/stackoverflow.com\/questions\/47508257\/serving-flask-restplus-on-https-server","docstring_tokens":["Monkey","patch","for","HTTPS","or","else","swagger","docs","do","not","serve","over","HTTPS","https",":","\/\/","stackoverflow",".","com","\/","questions","\/","47508257","\/","serving","-","flask","-","restplus","-","on","-","https","-","server"],"function":"def specs_url(self):\n \"\"\"\n Monkey patch for HTTPS or else swagger docs do not serve over HTTPS\n https:\/\/stackoverflow.com\/questions\/47508257\/serving-flask-restplus-on-https-server\n \"\"\"\n scheme = \"https\" if \"https\" in self.base_url else \"http\"\n return url_for(self.endpoint(\"specs\"), _external=True, _scheme=scheme)","function_tokens":["def","specs_url","(","self",")",":","scheme","=","\"https\"","if","\"https\"","in","self",".","base_url","else","\"http\"","return","url_for","(","self",".","endpoint","(","\"specs\"",")",",","_external","=","True",",","_scheme","=","scheme",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/api\/v1\/api.py#L19-L25"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/api\/v1\/helpers.py","language":"python","identifier":"to_dict","parameters":"(multi_dict)","argument_list":"","return_statement":"return {\n k: v if len(v) > 1 else v[0]\n for (k, v) in multi_dict.to_dict(flat=False).items()\n }","docstring":"Converts a multi dict into a dict where only list entries are not flat","docstring_summary":"Converts a multi dict into a dict where only list entries are not flat","docstring_tokens":["Converts","a","multi","dict","into","a","dict","where","only","list","entries","are","not","flat"],"function":"def to_dict(multi_dict):\n \"\"\"Converts a multi dict into a dict where only list entries are not flat\"\"\"\n return {\n k: v if len(v) > 1 else v[0]\n for (k, v) in multi_dict.to_dict(flat=False).items()\n }","function_tokens":["def","to_dict","(","multi_dict",")",":","return","{","k",":","v","if","len","(","v",")",">","1","else","v","[","0","]","for","(","k",",","v",")","in","multi_dict",".","to_dict","(","flat","=","False",")",".","items","(",")","}"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/api\/v1\/helpers.py#L321-L326"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/api\/v1\/helpers.py","language":"python","identifier":"get_current_user_id","parameters":"(args)","argument_list":"","return_statement":"return None","docstring":"Gets current_user_id from args featuring a \"user_id\" key","docstring_summary":"Gets current_user_id from args featuring a \"user_id\" key","docstring_tokens":["Gets","current_user_id","from","args","featuring","a","user_id","key"],"function":"def get_current_user_id(args):\n \"\"\"Gets current_user_id from args featuring a \"user_id\" key\"\"\"\n if args.get(\"user_id\"):\n return decode_string_id(args[\"user_id\"])\n return None","function_tokens":["def","get_current_user_id","(","args",")",":","if","args",".","get","(","\"user_id\"",")",":","return","decode_string_id","(","args","[","\"user_id\"","]",")","return","None"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/api\/v1\/helpers.py#L329-L333"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/api\/v1\/utils\/resolve_url.py","language":"python","identifier":"resolve_url","parameters":"(session, url)","argument_list":"","return_statement":"return None","docstring":"Resolves an Audius URL into the cannonical API route.\n Accepts fully formed urls as well as just url paths.","docstring_summary":"Resolves an Audius URL into the cannonical API route.\n Accepts fully formed urls as well as just url paths.","docstring_tokens":["Resolves","an","Audius","URL","into","the","cannonical","API","route",".","Accepts","fully","formed","urls","as","well","as","just","url","paths","."],"function":"def resolve_url(session, url):\n \"\"\"\n Resolves an Audius URL into the cannonical API route.\n Accepts fully formed urls as well as just url paths.\n \"\"\"\n parsed = urlparse(url)\n # Will strip out any preceding protocol & domain (e.g. https:\/\/audius.co)\n path = parsed.path\n\n match = track_url_regex.match(path)\n if match:\n slug = match.group(\"slug\")\n handle = match.group(\"handle\")\n return ns_url_for(tracks_ns, \"track_by_route\", slug=slug, handle=handle)\n\n match = playlist_url_regex.match(path)\n if match:\n playlist_id = match.group(\"id\")\n hashed_id = encode_int_id(int(playlist_id))\n return ns_url_for(playlists_ns, \"playlist\", playlist_id=hashed_id)\n\n match = user_url_regex.match(path)\n if match:\n handle = match.group(\"handle\")\n user = (\n session.query(User)\n .filter(User.handle_lc == handle.lower(), User.is_current == True)\n .one()\n )\n hashed_id = encode_int_id(user.user_id)\n return ns_url_for(users_ns, \"user\", user_id=hashed_id)\n\n return None","function_tokens":["def","resolve_url","(","session",",","url",")",":","parsed","=","urlparse","(","url",")","# Will strip out any preceding protocol & domain (e.g. https:\/\/audius.co)","path","=","parsed",".","path","match","=","track_url_regex",".","match","(","path",")","if","match",":","slug","=","match",".","group","(","\"slug\"",")","handle","=","match",".","group","(","\"handle\"",")","return","ns_url_for","(","tracks_ns",",","\"track_by_route\"",",","slug","=","slug",",","handle","=","handle",")","match","=","playlist_url_regex",".","match","(","path",")","if","match",":","playlist_id","=","match",".","group","(","\"id\"",")","hashed_id","=","encode_int_id","(","int","(","playlist_id",")",")","return","ns_url_for","(","playlists_ns",",","\"playlist\"",",","playlist_id","=","hashed_id",")","match","=","user_url_regex",".","match","(","path",")","if","match",":","handle","=","match",".","group","(","\"handle\"",")","user","=","(","session",".","query","(","User",")",".","filter","(","User",".","handle_lc","==","handle",".","lower","(",")",",","User",".","is_current","==","True",")",".","one","(",")",")","hashed_id","=","encode_int_id","(","user",".","user_id",")","return","ns_url_for","(","users_ns",",","\"user\"",",","user_id","=","hashed_id",")","return","None"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/api\/v1\/utils\/resolve_url.py#L23-L55"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/monitors\/filesystem.py","language":"python","identifier":"get_filesystem_size","parameters":"(**kwargs)","argument_list":"","return_statement":"return disk.total","docstring":"Gets the size of the entire filesystem (bytes)\n\n Kwargs:\n db: global database instance\n redis: global redis instance","docstring_summary":"Gets the size of the entire filesystem (bytes)","docstring_tokens":["Gets","the","size","of","the","entire","filesystem","(","bytes",")"],"function":"def get_filesystem_size(**kwargs):\n \"\"\"\n Gets the size of the entire filesystem (bytes)\n\n Kwargs:\n db: global database instance\n redis: global redis instance\n \"\"\"\n disk = None\n try:\n disk = psutil.disk_usage(\"\/var\/k8s\")\n except:\n disk = psutil.disk_usage(\"\/\")\n\n return disk.total","function_tokens":["def","get_filesystem_size","(","*","*","kwargs",")",":","disk","=","None","try",":","disk","=","psutil",".","disk_usage","(","\"\/var\/k8s\"",")","except",":","disk","=","psutil",".","disk_usage","(","\"\/\"",")","return","disk",".","total"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/monitors\/filesystem.py#L7-L21"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/monitors\/filesystem.py","language":"python","identifier":"get_filesystem_used","parameters":"(**kwargs)","argument_list":"","return_statement":"return disk.used","docstring":"Gets the used portion of the filesystem (bytes)\n\n Kwargs:\n db: global database instance\n redis: global redis instance","docstring_summary":"Gets the used portion of the filesystem (bytes)","docstring_tokens":["Gets","the","used","portion","of","the","filesystem","(","bytes",")"],"function":"def get_filesystem_used(**kwargs):\n \"\"\"\n Gets the used portion of the filesystem (bytes)\n\n Kwargs:\n db: global database instance\n redis: global redis instance\n \"\"\"\n disk = None\n try:\n disk = psutil.disk_usage(\"\/var\/k8s\")\n except:\n disk = psutil.disk_usage(\"\/\")\n\n return disk.used","function_tokens":["def","get_filesystem_used","(","*","*","kwargs",")",":","disk","=","None","try",":","disk","=","psutil",".","disk_usage","(","\"\/var\/k8s\"",")","except",":","disk","=","psutil",".","disk_usage","(","\"\/\"",")","return","disk",".","used"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/monitors\/filesystem.py#L24-L38"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/monitors\/network.py","language":"python","identifier":"get_received_bytes_per_sec","parameters":"(**kwargs)","argument_list":"","return_statement":"return rx_sec","docstring":"Gets the rate of received bytes per sec over the network\n\n Kwargs:\n db: global database instance\n redis: global redis instance","docstring_summary":"Gets the rate of received bytes per sec over the network","docstring_tokens":["Gets","the","rate","of","received","bytes","per","sec","over","the","network"],"function":"def get_received_bytes_per_sec(**kwargs):\n \"\"\"\n Gets the rate of received bytes per sec over the network\n\n Kwargs:\n db: global database instance\n redis: global redis instance\n \"\"\"\n # pylint: disable=W0603\n global bytes_recv, bytes_recv_time\n\n net_io = psutil.net_io_counters()\n recv = net_io.bytes_recv\n t = time.time()\n\n rx_sec = 0\n if bytes_recv and bytes_recv_time:\n rx_sec = (recv - bytes_recv) \/ float(t - bytes_recv_time)\n\n bytes_recv = recv\n bytes_recv_time = t\n\n return rx_sec","function_tokens":["def","get_received_bytes_per_sec","(","*","*","kwargs",")",":","# pylint: disable=W0603","global","bytes_recv",",","bytes_recv_time","net_io","=","psutil",".","net_io_counters","(",")","recv","=","net_io",".","bytes_recv","t","=","time",".","time","(",")","rx_sec","=","0","if","bytes_recv","and","bytes_recv_time",":","rx_sec","=","(","recv","-","bytes_recv",")","\/","float","(","t","-","bytes_recv_time",")","bytes_recv","=","recv","bytes_recv_time","=","t","return","rx_sec"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/monitors\/network.py#L12-L34"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/monitors\/network.py","language":"python","identifier":"get_transferred_bytes_per_sec","parameters":"(**kwargs)","argument_list":"","return_statement":"return tx_sec","docstring":"Gets the rate of transferred bytes per sec over the network\n\n Kwargs:\n db: global database instance\n redis: global redis instance","docstring_summary":"Gets the rate of transferred bytes per sec over the network","docstring_tokens":["Gets","the","rate","of","transferred","bytes","per","sec","over","the","network"],"function":"def get_transferred_bytes_per_sec(**kwargs):\n \"\"\"\n Gets the rate of transferred bytes per sec over the network\n\n Kwargs:\n db: global database instance\n redis: global redis instance\n \"\"\"\n # pylint: disable=W0603\n global bytes_sent, bytes_sent_time\n\n net_io = psutil.net_io_counters()\n sent = net_io.bytes_sent\n t = time.time()\n\n tx_sec = 0\n if bytes_sent and bytes_sent_time:\n tx_sec = (sent - bytes_sent) \/ float(t - bytes_sent_time)\n\n bytes_sent = sent\n bytes_sent_time = t\n return tx_sec","function_tokens":["def","get_transferred_bytes_per_sec","(","*","*","kwargs",")",":","# pylint: disable=W0603","global","bytes_sent",",","bytes_sent_time","net_io","=","psutil",".","net_io_counters","(",")","sent","=","net_io",".","bytes_sent","t","=","time",".","time","(",")","tx_sec","=","0","if","bytes_sent","and","bytes_sent_time",":","tx_sec","=","(","sent","-","bytes_sent",")","\/","float","(","t","-","bytes_sent_time",")","bytes_sent","=","sent","bytes_sent_time","=","t","return","tx_sec"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/monitors\/network.py#L37-L58"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/monitors\/monitors.py","language":"python","identifier":"parse_value","parameters":"(monitor, value)","argument_list":"","return_statement":"","docstring":"Parses a string value into the corresponding type\n\n Args:\n monitor: dict The monitor dictionary qwith name, func, ttl, and type\n value: string The value to parse","docstring_summary":"Parses a string value into the corresponding type","docstring_tokens":["Parses","a","string","value","into","the","corresponding","type"],"function":"def parse_value(monitor, value):\n \"\"\"\n Parses a string value into the corresponding type\n\n Args:\n monitor: dict The monitor dictionary qwith name, func, ttl, and type\n value: string The value to parse\n \"\"\"\n try:\n if str(value) == \"None\":\n return None\n # pylint: disable=R1705\n if monitor[monitor_names.type] == \"bool\":\n return value == \"True\"\n elif monitor[monitor_names.type] == \"int\":\n return int(value)\n elif monitor[monitor_names.type] == \"float\":\n return float(value)\n elif monitor[monitor_names.type] == \"json\":\n return json.loads(value)\n else: # string\n return str(value)\n except Exception:\n return str(value)","function_tokens":["def","parse_value","(","monitor",",","value",")",":","try",":","if","str","(","value",")","==","\"None\"",":","return","None","# pylint: disable=R1705","if","monitor","[","monitor_names",".","type","]","==","\"bool\"",":","return","value","==","\"True\"","elif","monitor","[","monitor_names",".","type","]","==","\"int\"",":","return","int","(","value",")","elif","monitor","[","monitor_names",".","type","]","==","\"float\"",":","return","float","(","value",")","elif","monitor","[","monitor_names",".","type","]","==","\"json\"",":","return","json",".","loads","(","value",")","else",":","# string","return","str","(","value",")","except","Exception",":","return","str","(","value",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/monitors\/monitors.py#L183-L206"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/monitors\/monitors.py","language":"python","identifier":"get_monitors","parameters":"(monitors)","argument_list":"","return_statement":"return ret","docstring":"Gets monitor values\n\n Args:\n monitors: list(dict) The list of monitors to get values of","docstring_summary":"Gets monitor values","docstring_tokens":["Gets","monitor","values"],"function":"def get_monitors(monitors):\n \"\"\"\n Gets monitor values\n\n Args:\n monitors: list(dict) The list of monitors to get values of\n \"\"\"\n pipe = redis.pipeline()\n for monitor in monitors:\n key = get_monitor_redis_key(monitor)\n pipe.get(key)\n ret = {}\n results = pipe.execute()\n for i, result in enumerate(results):\n ret[monitors[i][monitor_names.name]] = parse_value(monitors[i], result)\n return ret","function_tokens":["def","get_monitors","(","monitors",")",":","pipe","=","redis",".","pipeline","(",")","for","monitor","in","monitors",":","key","=","get_monitor_redis_key","(","monitor",")","pipe",".","get","(","key",")","ret","=","{","}","results","=","pipe",".","execute","(",")","for","i",",","result","in","enumerate","(","results",")",":","ret","[","monitors","[","i","]","[","monitor_names",".","name","]","]","=","parse_value","(","monitors","[","i","]",",","result",")","return","ret"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/monitors\/monitors.py#L209-L224"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/monitors\/memory.py","language":"python","identifier":"get_total_memory","parameters":"(**kwargs)","argument_list":"","return_statement":"return mem.total","docstring":"Gets the total vitual memory of the system\n\n Kwargs:\n db: global database instance\n redis: global redis instance","docstring_summary":"Gets the total vitual memory of the system","docstring_tokens":["Gets","the","total","vitual","memory","of","the","system"],"function":"def get_total_memory(**kwargs):\n \"\"\"\n Gets the total vitual memory of the system\n\n Kwargs:\n db: global database instance\n redis: global redis instance\n \"\"\"\n mem = psutil.virtual_memory()\n return mem.total","function_tokens":["def","get_total_memory","(","*","*","kwargs",")",":","mem","=","psutil",".","virtual_memory","(",")","return","mem",".","total"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/monitors\/memory.py#L4-L13"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/monitors\/memory.py","language":"python","identifier":"get_used_memory","parameters":"(**kwargs)","argument_list":"","return_statement":"return mem.used","docstring":"Gets the used virtual memory of the system\n\n Kwargs:\n db: global database instance\n redis: global redis instance","docstring_summary":"Gets the used virtual memory of the system","docstring_tokens":["Gets","the","used","virtual","memory","of","the","system"],"function":"def get_used_memory(**kwargs):\n \"\"\"\n Gets the used virtual memory of the system\n\n Kwargs:\n db: global database instance\n redis: global redis instance\n \"\"\"\n mem = psutil.virtual_memory()\n return mem.used","function_tokens":["def","get_used_memory","(","*","*","kwargs",")",":","mem","=","psutil",".","virtual_memory","(",")","return","mem",".","used"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/monitors\/memory.py#L16-L25"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/monitors\/monitoring_queue.py","language":"python","identifier":"refresh","parameters":"(redis, db, monitor)","argument_list":"","return_statement":"","docstring":"Refreshes the cached value for a monitor\n\n Args:\n redis: Singleton redis instance\n db: Singleton database instance\n monitor: dict The monitor dictionary qwith name, func, ttl, and type","docstring_summary":"Refreshes the cached value for a monitor","docstring_tokens":["Refreshes","the","cached","value","for","a","monitor"],"function":"def refresh(redis, db, monitor):\n \"\"\"\n Refreshes the cached value for a monitor\n\n Args:\n redis: Singleton redis instance\n db: Singleton database instance\n monitor: dict The monitor dictionary qwith name, func, ttl, and type\n \"\"\"\n key = get_monitor_redis_key(monitor)\n ttl_key = f\"{key}:ttl\"\n\n is_fresh = redis.get(ttl_key)\n if is_fresh:\n return\n\n # Invoke the monitor function with kwargs for db and redis.\n # This allows any monitor to access the db and\/or redis connection.\n value = monitor[monitor_names.func](db=db, redis=redis)\n logger.info(\n f\"monitoring_queue.py | Computed value for {monitor[monitor_names.name]}\"\n )\n\n redis.set(key, value)\n\n if \"ttl\" in monitor:\n # Set a TTL (in seconds) key to track when this value needs refreshing.\n # We store a separate TTL key rather than expiring the value itself\n # so that in the case of an error, the current value can still be read\n redis.set(ttl_key, 1, monitor[\"ttl\"])","function_tokens":["def","refresh","(","redis",",","db",",","monitor",")",":","key","=","get_monitor_redis_key","(","monitor",")","ttl_key","=","f\"{key}:ttl\"","is_fresh","=","redis",".","get","(","ttl_key",")","if","is_fresh",":","return","# Invoke the monitor function with kwargs for db and redis.","# This allows any monitor to access the db and\/or redis connection.","value","=","monitor","[","monitor_names",".","func","]","(","db","=","db",",","redis","=","redis",")","logger",".","info","(","f\"monitoring_queue.py | Computed value for {monitor[monitor_names.name]}\"",")","redis",".","set","(","key",",","value",")","if","\"ttl\"","in","monitor",":","# Set a TTL (in seconds) key to track when this value needs refreshing.","# We store a separate TTL key rather than expiring the value itself","# so that in the case of an error, the current value can still be read","redis",".","set","(","ttl_key",",","1",",","monitor","[","\"ttl\"","]",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/monitors\/monitoring_queue.py#L11-L40"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/monitors\/monitoring_queue.py","language":"python","identifier":"monitoring_queue_task","parameters":"(self)","argument_list":"","return_statement":"","docstring":"A persistent cron-style queue that periodically monitors various\n health metrics and caches values in redis.\n\n The queue runs every minute on cron, but individual monitors establish\n their own freshness\/refresh rate to operate on.\n 1. The queue spins up and for each monitor checks to see if it needs a refresh\n 2. Refreshes the value and stores the update in redis","docstring_summary":"A persistent cron-style queue that periodically monitors various\n health metrics and caches values in redis.","docstring_tokens":["A","persistent","cron","-","style","queue","that","periodically","monitors","various","health","metrics","and","caches","values","in","redis","."],"function":"def monitoring_queue_task(self):\n \"\"\"\n A persistent cron-style queue that periodically monitors various\n health metrics and caches values in redis.\n\n The queue runs every minute on cron, but individual monitors establish\n their own freshness\/refresh rate to operate on.\n 1. The queue spins up and for each monitor checks to see if it needs a refresh\n 2. Refreshes the value and stores the update in redis\n \"\"\"\n db = monitoring_queue_task.db\n redis = monitoring_queue_task.redis\n\n have_lock = False\n update_lock = redis.lock(\"monitoring_queue_lock\", timeout=2000)\n\n try:\n have_lock = update_lock.acquire(blocking=False)\n\n if have_lock:\n start_time = time.time()\n\n for monitor in MONITORS.values():\n try:\n refresh(redis, db, monitor)\n except Exception as e:\n logger.warning(\n f\"monitoring_queue.py | Error computing {monitor['name']} {e}\"\n )\n\n end_time = time.time()\n logger.info(\n f\"monitoring_queue.py | Finished monitoring_queue in {end_time - start_time} seconds\"\n )\n else:\n logger.info(\"monitoring_queue.py | Failed to acquire lock\")\n except Exception as e:\n logger.error(\"monitoring_queue.py | Fatal error in main loop\", exc_info=True)\n raise e\n finally:\n if have_lock:\n update_lock.release()","function_tokens":["def","monitoring_queue_task","(","self",")",":","db","=","monitoring_queue_task",".","db","redis","=","monitoring_queue_task",".","redis","have_lock","=","False","update_lock","=","redis",".","lock","(","\"monitoring_queue_lock\"",",","timeout","=","2000",")","try",":","have_lock","=","update_lock",".","acquire","(","blocking","=","False",")","if","have_lock",":","start_time","=","time",".","time","(",")","for","monitor","in","MONITORS",".","values","(",")",":","try",":","refresh","(","redis",",","db",",","monitor",")","except","Exception","as","e",":","logger",".","warning","(","f\"monitoring_queue.py | Error computing {monitor['name']} {e}\"",")","end_time","=","time",".","time","(",")","logger",".","info","(","f\"monitoring_queue.py | Finished monitoring_queue in {end_time - start_time} seconds\"",")","else",":","logger",".","info","(","\"monitoring_queue.py | Failed to acquire lock\"",")","except","Exception","as","e",":","logger",".","error","(","\"monitoring_queue.py | Fatal error in main loop\"",",","exc_info","=","True",")","raise","e","finally",":","if","have_lock",":","update_lock",".","release","(",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/monitors\/monitoring_queue.py#L44-L85"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/monitors\/database.py","language":"python","identifier":"get_database_liveness","parameters":"(**kwargs)","argument_list":"","return_statement":"","docstring":"Gets database liveness with a `select 1` query\n\n Kwargs:\n db: global database instance\n redis: global redis instance","docstring_summary":"Gets database liveness with a `select 1` query","docstring_tokens":["Gets","database","liveness","with","a","select","1","query"],"function":"def get_database_liveness(**kwargs):\n \"\"\"\n Gets database liveness with a `select 1` query\n\n Kwargs:\n db: global database instance\n redis: global redis instance\n \"\"\"\n db = kwargs[\"db\"]\n try:\n with db.scoped_session() as session:\n q = sqlalchemy.text(\"SELECT 1\")\n session.execute(q).fetchone()\n return str(True)\n except Exception:\n return str(False)","function_tokens":["def","get_database_liveness","(","*","*","kwargs",")",":","db","=","kwargs","[","\"db\"","]","try",":","with","db",".","scoped_session","(",")","as","session",":","q","=","sqlalchemy",".","text","(","\"SELECT 1\"",")","session",".","execute","(","q",")",".","fetchone","(",")","return","str","(","True",")","except","Exception",":","return","str","(","False",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/monitors\/database.py#L9-L24"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/monitors\/database.py","language":"python","identifier":"get_database_size","parameters":"(**kwargs)","argument_list":"","return_statement":"","docstring":"Gets the size of the database in bytes\n\n Kwargs:\n db: global database instance\n redis: global redis instance","docstring_summary":"Gets the size of the database in bytes","docstring_tokens":["Gets","the","size","of","the","database","in","bytes"],"function":"def get_database_size(**kwargs):\n \"\"\"\n Gets the size of the database in bytes\n\n Kwargs:\n db: global database instance\n redis: global redis instance\n \"\"\"\n db = kwargs[\"db\"]\n with db.scoped_session() as session:\n q = sqlalchemy.text(\"SELECT pg_database_size(current_database())\")\n res = session.execute(q).fetchone()[0]\n return res","function_tokens":["def","get_database_size","(","*","*","kwargs",")",":","db","=","kwargs","[","\"db\"","]","with","db",".","scoped_session","(",")","as","session",":","q","=","sqlalchemy",".","text","(","\"SELECT pg_database_size(current_database())\"",")","res","=","session",".","execute","(","q",")",".","fetchone","(",")","[","0","]","return","res"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/monitors\/database.py#L27-L39"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/monitors\/database.py","language":"python","identifier":"get_database_connections","parameters":"(**kwargs)","argument_list":"","return_statement":"","docstring":"Gets the number of active database connections\n\n Kwargs:\n db: global database instance\n redis: global redis instance","docstring_summary":"Gets the number of active database connections","docstring_tokens":["Gets","the","number","of","active","database","connections"],"function":"def get_database_connections(**kwargs):\n \"\"\"\n Gets the number of active database connections\n\n Kwargs:\n db: global database instance\n redis: global redis instance\n \"\"\"\n db = kwargs[\"db\"]\n with db.scoped_session() as session:\n q = sqlalchemy.text(\n \"SELECT numbackends from pg_stat_database where datname = current_database()\"\n )\n res = session.execute(q).fetchone()[0]\n return res","function_tokens":["def","get_database_connections","(","*","*","kwargs",")",":","db","=","kwargs","[","\"db\"","]","with","db",".","scoped_session","(",")","as","session",":","q","=","sqlalchemy",".","text","(","\"SELECT numbackends from pg_stat_database where datname = current_database()\"",")","res","=","session",".","execute","(","q",")",".","fetchone","(",")","[","0","]","return","res"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/monitors\/database.py#L42-L56"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/monitors\/database.py","language":"python","identifier":"get_database_connection_info","parameters":"(**kwargs)","argument_list":"","return_statement":"","docstring":"Gets full database query connection information (waits, state, query string)\n\n Kwargs:\n db: global database instance\n redis: global redis instance","docstring_summary":"Gets full database query connection information (waits, state, query string)","docstring_tokens":["Gets","full","database","query","connection","information","(","waits","state","query","string",")"],"function":"def get_database_connection_info(**kwargs):\n \"\"\"\n Gets full database query connection information (waits, state, query string)\n\n Kwargs:\n db: global database instance\n redis: global redis instance\n \"\"\"\n db = kwargs[\"db\"]\n with db.scoped_session() as session:\n q = sqlalchemy.text(\n \"select wait_event_type, wait_event, state, query, to_char(query_start, 'DD Mon YYYY HH:MI:SSPM')\"\n + 'as \"query_start\" from pg_stat_activity where datname = current_database()'\n )\n\n result = session.execute(q).fetchall()\n connection_info = [dict(row) for row in result]\n return json.dumps(connection_info)","function_tokens":["def","get_database_connection_info","(","*","*","kwargs",")",":","db","=","kwargs","[","\"db\"","]","with","db",".","scoped_session","(",")","as","session",":","q","=","sqlalchemy",".","text","(","\"select wait_event_type, wait_event, state, query, to_char(query_start, 'DD Mon YYYY HH:MI:SSPM')\"","+","'as \"query_start\" from pg_stat_activity where datname = current_database()'",")","result","=","session",".","execute","(","q",")",".","fetchall","(",")","connection_info","=","[","dict","(","row",")","for","row","in","result","]","return","json",".","dumps","(","connection_info",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/monitors\/database.py#L59-L76"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/monitors\/database.py","language":"python","identifier":"get_database_index_count","parameters":"(**kwargs)","argument_list":"","return_statement":"","docstring":"Gets number of indexes in the database\n\n Kwargs:\n db: global database instance\n redis: global redis instance","docstring_summary":"Gets number of indexes in the database","docstring_tokens":["Gets","number","of","indexes","in","the","database"],"function":"def get_database_index_count(**kwargs):\n \"\"\"\n Gets number of indexes in the database\n\n Kwargs:\n db: global database instance\n redis: global redis instance\n \"\"\"\n db = kwargs[\"db\"]\n with db.scoped_session() as session:\n q = sqlalchemy.text(\n \"SELECT COUNT(*) FROM pg_indexes WHERE schemaname = 'public'\"\n )\n res = session.execute(q).fetchone()[0]\n return res","function_tokens":["def","get_database_index_count","(","*","*","kwargs",")",":","db","=","kwargs","[","\"db\"","]","with","db",".","scoped_session","(",")","as","session",":","q","=","sqlalchemy",".","text","(","\"SELECT COUNT(*) FROM pg_indexes WHERE schemaname = 'public'\"",")","res","=","session",".","execute","(","q",")",".","fetchone","(",")","[","0","]","return","res"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/monitors\/database.py#L79-L93"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/monitors\/database.py","language":"python","identifier":"get_database_index_info","parameters":"(**kwargs)","argument_list":"","return_statement":"","docstring":"Gets full database index information (tablename, indexname, indexdef)\n\n Kwargs:\n db: global database instance\n redis: global redis instance","docstring_summary":"Gets full database index information (tablename, indexname, indexdef)","docstring_tokens":["Gets","full","database","index","information","(","tablename","indexname","indexdef",")"],"function":"def get_database_index_info(**kwargs):\n \"\"\"\n Gets full database index information (tablename, indexname, indexdef)\n\n Kwargs:\n db: global database instance\n redis: global redis instance\n \"\"\"\n db = kwargs[\"db\"]\n with db.scoped_session() as session:\n q = sqlalchemy.text(\n \"SELECT tablename, indexname, indexdef FROM pg_indexes WHERE schemaname = 'public'\"\n + \"ORDER BY tablename, indexname\"\n )\n result = session.execute(q).fetchall()\n connection_info = [dict(row) for row in result]\n return json.dumps(connection_info)","function_tokens":["def","get_database_index_info","(","*","*","kwargs",")",":","db","=","kwargs","[","\"db\"","]","with","db",".","scoped_session","(",")","as","session",":","q","=","sqlalchemy",".","text","(","\"SELECT tablename, indexname, indexdef FROM pg_indexes WHERE schemaname = 'public'\"","+","\"ORDER BY tablename, indexname\"",")","result","=","session",".","execute","(","q",")",".","fetchall","(",")","connection_info","=","[","dict","(","row",")","for","row","in","result","]","return","json",".","dumps","(","connection_info",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/monitors\/database.py#L96-L112"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/monitors\/database.py","language":"python","identifier":"get_table_size_info","parameters":"(**kwargs)","argument_list":"","return_statement":"","docstring":"Gets table information (number of rows, data size).\n\n Kwargs:\n db: global database instance\n redis: global redis instance","docstring_summary":"Gets table information (number of rows, data size).","docstring_tokens":["Gets","table","information","(","number","of","rows","data","size",")","."],"function":"def get_table_size_info(**kwargs):\n \"\"\"\n Gets table information (number of rows, data size).\n\n Kwargs:\n db: global database instance\n redis: global redis instance\n \"\"\"\n db = kwargs[\"db\"]\n with db.scoped_session() as session:\n sql_statement = \"\"\"SELECT c.relname AS table_name,\n c.reltuples::text AS rows,\n pg_size_pretty(pg_relation_size(s.relid)) AS data_size\n FROM pg_class c\n JOIN pg_catalog.pg_statio_user_tables s ON s.relname = c.relname\n WHERE c.relkind = 'r'\n ORDER BY c.reltuples DESC;\"\"\"\n\n q = sqlalchemy.text(sql_statement)\n result = session.execute(q).fetchall()\n table_size_info = [dict(row) for row in result]\n return json.dumps(table_size_info)","function_tokens":["def","get_table_size_info","(","*","*","kwargs",")",":","db","=","kwargs","[","\"db\"","]","with","db",".","scoped_session","(",")","as","session",":","sql_statement","=","\"\"\"SELECT c.relname AS table_name,\n c.reltuples::text AS rows,\n pg_size_pretty(pg_relation_size(s.relid)) AS data_size\n FROM pg_class c\n JOIN pg_catalog.pg_statio_user_tables s ON s.relname = c.relname\n WHERE c.relkind = 'r'\n ORDER BY c.reltuples DESC;\"\"\"","q","=","sqlalchemy",".","text","(","sql_statement",")","result","=","session",".","execute","(","q",")",".","fetchall","(",")","table_size_info","=","[","dict","(","row",")","for","row","in","result","]","return","json",".","dumps","(","table_size_info",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/monitors\/database.py#L115-L136"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/monitors\/database.py","language":"python","identifier":"get_frequent_queries","parameters":"(**kwargs)","argument_list":"","return_statement":"","docstring":"Gets the most frequent queries\n\n Kwargs:\n db: global database instance\n redis: global redis instance","docstring_summary":"Gets the most frequent queries","docstring_tokens":["Gets","the","most","frequent","queries"],"function":"def get_frequent_queries(**kwargs):\n \"\"\"\n Gets the most frequent queries\n\n Kwargs:\n db: global database instance\n redis: global redis instance\n \"\"\"\n db = kwargs[\"db\"]\n with db.scoped_session() as session:\n q = sqlalchemy.text(\n \"SELECT query, calls FROM pg_stat_statements ORDER BY calls DESC LIMIT 100\"\n )\n result = session.execute(q).fetchall()\n frequent_queries = [dict(row) for row in result]\n return json.dumps(frequent_queries)","function_tokens":["def","get_frequent_queries","(","*","*","kwargs",")",":","db","=","kwargs","[","\"db\"","]","with","db",".","scoped_session","(",")","as","session",":","q","=","sqlalchemy",".","text","(","\"SELECT query, calls FROM pg_stat_statements ORDER BY calls DESC LIMIT 100\"",")","result","=","session",".","execute","(","q",")",".","fetchall","(",")","frequent_queries","=","[","dict","(","row",")","for","row","in","result","]","return","json",".","dumps","(","frequent_queries",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/monitors\/database.py#L143-L158"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/monitors\/database.py","language":"python","identifier":"get_slow_queries","parameters":"(**kwargs)","argument_list":"","return_statement":"","docstring":"Gets the queries with the highest average latency\n\n Kwargs:\n db: global database instance\n redis: global redis instance","docstring_summary":"Gets the queries with the highest average latency","docstring_tokens":["Gets","the","queries","with","the","highest","average","latency"],"function":"def get_slow_queries(**kwargs):\n \"\"\"\n Gets the queries with the highest average latency\n\n Kwargs:\n db: global database instance\n redis: global redis instance\n \"\"\"\n db = kwargs[\"db\"]\n with db.scoped_session() as session:\n q = sqlalchemy.text(\n \"SELECT query, mean_time FROM pg_stat_statements ORDER BY mean_time DESC LIMIT 100\"\n )\n result = session.execute(q).fetchall()\n slow_queries = [dict(row) for row in result]\n\n return json.dumps(slow_queries)","function_tokens":["def","get_slow_queries","(","*","*","kwargs",")",":","db","=","kwargs","[","\"db\"","]","with","db",".","scoped_session","(",")","as","session",":","q","=","sqlalchemy",".","text","(","\"SELECT query, mean_time FROM pg_stat_statements ORDER BY mean_time DESC LIMIT 100\"",")","result","=","session",".","execute","(","q",")",".","fetchall","(",")","slow_queries","=","[","dict","(","row",")","for","row","in","result","]","return","json",".","dumps","(","slow_queries",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/monitors\/database.py#L161-L177"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/monitors\/redis.py","language":"python","identifier":"get_redis_num_keys","parameters":"(**kwargs)","argument_list":"","return_statement":"return num_keys","docstring":"Gets the total number of keys\n\n Kwargs:\n db: global database instance\n redis: global redis instance","docstring_summary":"Gets the total number of keys","docstring_tokens":["Gets","the","total","number","of","keys"],"function":"def get_redis_num_keys(**kwargs):\n \"\"\"\n Gets the total number of keys\n\n Kwargs:\n db: global database instance\n redis: global redis instance\n \"\"\"\n redis = kwargs[\"redis\"]\n num_keys = redis.dbsize()\n return num_keys","function_tokens":["def","get_redis_num_keys","(","*","*","kwargs",")",":","redis","=","kwargs","[","\"redis\"","]","num_keys","=","redis",".","dbsize","(",")","return","num_keys"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/monitors\/redis.py#L4-L14"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/monitors\/redis.py","language":"python","identifier":"get_redis_used_memory","parameters":"(**kwargs)","argument_list":"","return_statement":"return info[\"used_memory\"]","docstring":"Gets the total memory used by redis\n\n Kwargs:\n db: global database instance\n redis: global redis instance","docstring_summary":"Gets the total memory used by redis","docstring_tokens":["Gets","the","total","memory","used","by","redis"],"function":"def get_redis_used_memory(**kwargs):\n \"\"\"\n Gets the total memory used by redis\n\n Kwargs:\n db: global database instance\n redis: global redis instance\n \"\"\"\n redis = kwargs[\"redis\"]\n info = redis.info()\n return info[\"used_memory\"]","function_tokens":["def","get_redis_used_memory","(","*","*","kwargs",")",":","redis","=","kwargs","[","\"redis\"","]","info","=","redis",".","info","(",")","return","info","[","\"used_memory\"","]"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/monitors\/redis.py#L17-L27"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/monitors\/redis.py","language":"python","identifier":"get_redis_total_memory","parameters":"(**kwargs)","argument_list":"","return_statement":"return mem.total","docstring":"Gets the total memory available to redis\n\n Kwargs:\n db: global database instance\n redis: global redis instance","docstring_summary":"Gets the total memory available to redis","docstring_tokens":["Gets","the","total","memory","available","to","redis"],"function":"def get_redis_total_memory(**kwargs):\n \"\"\"\n Gets the total memory available to redis\n\n Kwargs:\n db: global database instance\n redis: global redis instance\n \"\"\"\n # The current version of redis used does not\n # support `total_system_memory`.\n #\n # Future versions of redis (> 3.2) add support\n # TODO: change this code to the following after upgrading.\n # redis = kwargs['redis']\n # info = redis.info()\n # return info['total_system_memory']\n #\n mem = psutil.virtual_memory()\n return mem.total","function_tokens":["def","get_redis_total_memory","(","*","*","kwargs",")",":","# The current version of redis used does not","# support `total_system_memory`.","#","# Future versions of redis (> 3.2) add support","# TODO: change this code to the following after upgrading.","# redis = kwargs['redis']","# info = redis.info()","# return info['total_system_memory']","#","mem","=","psutil",".","virtual_memory","(",")","return","mem",".","total"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/monitors\/redis.py#L30-L48"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/user_library.py","language":"python","identifier":"user_library_state_update","parameters":"(\n self,\n update_task: DatabaseTask,\n session: Session,\n user_library_factory_txs,\n block_number,\n block_timestamp,\n block_hash,\n _ipfs_metadata, # prefix unused args with underscore to prevent pylint\n _blacklisted_cids,\n)","argument_list":"","return_statement":"return num_total_changes, empty_set","docstring":"Return Tuple containing int representing number of User Library model state changes found in transaction and empty Set (to align with fn signature of other _state_update functions.","docstring_summary":"Return Tuple containing int representing number of User Library model state changes found in transaction and empty Set (to align with fn signature of other _state_update functions.","docstring_tokens":["Return","Tuple","containing","int","representing","number","of","User","Library","model","state","changes","found","in","transaction","and","empty","Set","(","to","align","with","fn","signature","of","other","_state_update","functions","."],"function":"def user_library_state_update(\n self,\n update_task: DatabaseTask,\n session: Session,\n user_library_factory_txs,\n block_number,\n block_timestamp,\n block_hash,\n _ipfs_metadata, # prefix unused args with underscore to prevent pylint\n _blacklisted_cids,\n) -> Tuple[int, Set]:\n \"\"\"Return Tuple containing int representing number of User Library model state changes found in transaction and empty Set (to align with fn signature of other _state_update functions.\"\"\"\n empty_set: Set[int] = set()\n num_total_changes = 0\n if not user_library_factory_txs:\n return num_total_changes, empty_set\n\n user_library_abi = update_task.abi_values[\"UserLibraryFactory\"][\"abi\"]\n user_library_contract = update_task.web3.eth.contract(\n address=get_contract_addresses()[\"user_library_factory\"], abi=user_library_abi\n )\n challenge_bus = update_task.challenge_event_bus\n block_datetime = datetime.utcfromtimestamp(block_timestamp)\n\n track_save_state_changes: Dict[int, Dict[int, Save]] = {}\n playlist_save_state_changes: Dict[int, Dict[int, Save]] = {}\n\n for tx_receipt in user_library_factory_txs:\n try:\n add_track_save(\n self,\n user_library_contract,\n update_task,\n session,\n tx_receipt,\n block_number,\n block_datetime,\n track_save_state_changes,\n )\n\n add_playlist_save(\n self,\n user_library_contract,\n update_task,\n session,\n tx_receipt,\n block_number,\n block_datetime,\n playlist_save_state_changes,\n )\n\n delete_track_save(\n self,\n user_library_contract,\n update_task,\n session,\n tx_receipt,\n block_number,\n block_datetime,\n track_save_state_changes,\n )\n\n delete_playlist_save(\n self,\n user_library_contract,\n update_task,\n session,\n tx_receipt,\n block_number,\n block_datetime,\n playlist_save_state_changes,\n )\n except Exception as e:\n logger.info(\"Error in user library transaction\")\n txhash = update_task.web3.toHex(tx_receipt.transactionHash)\n blockhash = update_task.web3.toHex(block_hash)\n raise IndexingError(\n \"user_library\", block_number, blockhash, txhash, str(e)\n ) from e\n\n for user_id, track_ids in track_save_state_changes.items():\n for track_id in track_ids:\n invalidate_old_save(session, user_id, track_id, SaveType.track)\n save = track_ids[track_id]\n session.add(save)\n dispatch_favorite(challenge_bus, save, block_number)\n num_total_changes += len(track_ids)\n\n for user_id, playlist_ids in playlist_save_state_changes.items():\n for playlist_id in playlist_ids:\n invalidate_old_save(\n session,\n user_id,\n playlist_id,\n playlist_ids[playlist_id].save_type,\n )\n save = playlist_ids[playlist_id]\n session.add(save)\n dispatch_favorite(challenge_bus, save, block_number)\n num_total_changes += len(playlist_ids)\n\n return num_total_changes, empty_set","function_tokens":["def","user_library_state_update","(","self",",","update_task",":","DatabaseTask",",","session",":","Session",",","user_library_factory_txs",",","block_number",",","block_timestamp",",","block_hash",",","_ipfs_metadata",",","# prefix unused args with underscore to prevent pylint","_blacklisted_cids",",",")","->","Tuple","[","int",",","Set","]",":","empty_set",":","Set","[","int","]","=","set","(",")","num_total_changes","=","0","if","not","user_library_factory_txs",":","return","num_total_changes",",","empty_set","user_library_abi","=","update_task",".","abi_values","[","\"UserLibraryFactory\"","]","[","\"abi\"","]","user_library_contract","=","update_task",".","web3",".","eth",".","contract","(","address","=","get_contract_addresses","(",")","[","\"user_library_factory\"","]",",","abi","=","user_library_abi",")","challenge_bus","=","update_task",".","challenge_event_bus","block_datetime","=","datetime",".","utcfromtimestamp","(","block_timestamp",")","track_save_state_changes",":","Dict","[","int",",","Dict","[","int",",","Save","]","]","=","{","}","playlist_save_state_changes",":","Dict","[","int",",","Dict","[","int",",","Save","]","]","=","{","}","for","tx_receipt","in","user_library_factory_txs",":","try",":","add_track_save","(","self",",","user_library_contract",",","update_task",",","session",",","tx_receipt",",","block_number",",","block_datetime",",","track_save_state_changes",",",")","add_playlist_save","(","self",",","user_library_contract",",","update_task",",","session",",","tx_receipt",",","block_number",",","block_datetime",",","playlist_save_state_changes",",",")","delete_track_save","(","self",",","user_library_contract",",","update_task",",","session",",","tx_receipt",",","block_number",",","block_datetime",",","track_save_state_changes",",",")","delete_playlist_save","(","self",",","user_library_contract",",","update_task",",","session",",","tx_receipt",",","block_number",",","block_datetime",",","playlist_save_state_changes",",",")","except","Exception","as","e",":","logger",".","info","(","\"Error in user library transaction\"",")","txhash","=","update_task",".","web3",".","toHex","(","tx_receipt",".","transactionHash",")","blockhash","=","update_task",".","web3",".","toHex","(","block_hash",")","raise","IndexingError","(","\"user_library\"",",","block_number",",","blockhash",",","txhash",",","str","(","e",")",")","from","e","for","user_id",",","track_ids","in","track_save_state_changes",".","items","(",")",":","for","track_id","in","track_ids",":","invalidate_old_save","(","session",",","user_id",",","track_id",",","SaveType",".","track",")","save","=","track_ids","[","track_id","]","session",".","add","(","save",")","dispatch_favorite","(","challenge_bus",",","save",",","block_number",")","num_total_changes","+=","len","(","track_ids",")","for","user_id",",","playlist_ids","in","playlist_save_state_changes",".","items","(",")",":","for","playlist_id","in","playlist_ids",":","invalidate_old_save","(","session",",","user_id",",","playlist_id",",","playlist_ids","[","playlist_id","]",".","save_type",",",")","save","=","playlist_ids","[","playlist_id","]","session",".","add","(","save",")","dispatch_favorite","(","challenge_bus",",","save",",","block_number",")","num_total_changes","+=","len","(","playlist_ids",")","return","num_total_changes",",","empty_set"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/user_library.py#L16-L117"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/playlists.py","language":"python","identifier":"playlist_state_update","parameters":"(\n self,\n update_task: DatabaseTask,\n session: Session,\n playlist_factory_txs,\n block_number,\n block_timestamp,\n block_hash,\n _ipfs_metadata, # prefix unused args with underscore to prevent pylint\n _blacklisted_cids,\n)","argument_list":"","return_statement":"return num_total_changes, playlist_ids","docstring":"Return Tuple containing int representing number of Playlist model state changes found in transaction and set of processed playlist IDs.","docstring_summary":"Return Tuple containing int representing number of Playlist model state changes found in transaction and set of processed playlist IDs.","docstring_tokens":["Return","Tuple","containing","int","representing","number","of","Playlist","model","state","changes","found","in","transaction","and","set","of","processed","playlist","IDs","."],"function":"def playlist_state_update(\n self,\n update_task: DatabaseTask,\n session: Session,\n playlist_factory_txs,\n block_number,\n block_timestamp,\n block_hash,\n _ipfs_metadata, # prefix unused args with underscore to prevent pylint\n _blacklisted_cids,\n) -> Tuple[int, Set]:\n \"\"\"Return Tuple containing int representing number of Playlist model state changes found in transaction and set of processed playlist IDs.\"\"\"\n blockhash = update_task.web3.toHex(block_hash)\n num_total_changes = 0\n skipped_tx_count = 0\n # This stores the playlist_ids created or updated in the set of transactions\n playlist_ids: Set[int] = set()\n\n if not playlist_factory_txs:\n return num_total_changes, playlist_ids\n\n playlist_events_lookup: Dict[int, Dict[str, Any]] = {}\n for tx_receipt in playlist_factory_txs:\n txhash = update_task.web3.toHex(tx_receipt.transactionHash)\n for event_type in playlist_event_types_arr:\n playlist_events_tx = get_playlist_events_tx(\n update_task, event_type, tx_receipt\n )\n processedEntries = 0 # if record does not get added, do not count towards num_total_changes\n for entry in playlist_events_tx:\n existing_playlist_record = None\n playlist_id = helpers.get_tx_arg(entry, \"_playlistId\")\n try:\n # look up or populate existing record\n if playlist_id in playlist_events_lookup:\n existing_playlist_record = playlist_events_lookup[playlist_id][\n \"playlist\"\n ]\n else:\n existing_playlist_record = lookup_playlist_record(\n update_task, session, entry, block_number, txhash\n )\n\n # parse playlist event to add metadata to record\n playlist_record = parse_playlist_event(\n self,\n update_task,\n entry,\n event_type,\n existing_playlist_record,\n block_timestamp,\n session,\n )\n\n # process playlist record\n if playlist_record is not None:\n if playlist_id not in playlist_events_lookup:\n playlist_events_lookup[playlist_id] = {\n \"playlist\": playlist_record,\n \"events\": [],\n }\n else:\n playlist_events_lookup[playlist_id][\n \"playlist\"\n ] = playlist_record\n playlist_events_lookup[playlist_id][\"events\"].append(event_type)\n playlist_ids.add(playlist_id)\n processedEntries += 1\n except EntityMissingRequiredFieldError as e:\n logger.warning(f\"Skipping tx {txhash} with error {e}\")\n skipped_tx_count += 1\n add_node_level_skipped_transaction(\n session, block_number, blockhash, txhash\n )\n pass\n except Exception as e:\n logger.info(\"Error in parse playlist transaction\")\n raise IndexingError(\n \"playlist\", block_number, blockhash, txhash, str(e)\n ) from e\n num_total_changes += processedEntries\n\n logger.info(\n f\"index.py | playlists.py | There are {num_total_changes} events processed and {skipped_tx_count} skipped transactions.\"\n )\n\n for playlist_id, value_obj in playlist_events_lookup.items():\n logger.info(f\"index.py | playlists.py | Adding {value_obj['playlist']})\")\n if value_obj[\"events\"]:\n invalidate_old_playlist(session, playlist_id)\n session.add(value_obj[\"playlist\"])\n\n return num_total_changes, playlist_ids","function_tokens":["def","playlist_state_update","(","self",",","update_task",":","DatabaseTask",",","session",":","Session",",","playlist_factory_txs",",","block_number",",","block_timestamp",",","block_hash",",","_ipfs_metadata",",","# prefix unused args with underscore to prevent pylint","_blacklisted_cids",",",")","->","Tuple","[","int",",","Set","]",":","blockhash","=","update_task",".","web3",".","toHex","(","block_hash",")","num_total_changes","=","0","skipped_tx_count","=","0","# This stores the playlist_ids created or updated in the set of transactions","playlist_ids",":","Set","[","int","]","=","set","(",")","if","not","playlist_factory_txs",":","return","num_total_changes",",","playlist_ids","playlist_events_lookup",":","Dict","[","int",",","Dict","[","str",",","Any","]","]","=","{","}","for","tx_receipt","in","playlist_factory_txs",":","txhash","=","update_task",".","web3",".","toHex","(","tx_receipt",".","transactionHash",")","for","event_type","in","playlist_event_types_arr",":","playlist_events_tx","=","get_playlist_events_tx","(","update_task",",","event_type",",","tx_receipt",")","processedEntries","=","0","# if record does not get added, do not count towards num_total_changes","for","entry","in","playlist_events_tx",":","existing_playlist_record","=","None","playlist_id","=","helpers",".","get_tx_arg","(","entry",",","\"_playlistId\"",")","try",":","# look up or populate existing record","if","playlist_id","in","playlist_events_lookup",":","existing_playlist_record","=","playlist_events_lookup","[","playlist_id","]","[","\"playlist\"","]","else",":","existing_playlist_record","=","lookup_playlist_record","(","update_task",",","session",",","entry",",","block_number",",","txhash",")","# parse playlist event to add metadata to record","playlist_record","=","parse_playlist_event","(","self",",","update_task",",","entry",",","event_type",",","existing_playlist_record",",","block_timestamp",",","session",",",")","# process playlist record","if","playlist_record","is","not","None",":","if","playlist_id","not","in","playlist_events_lookup",":","playlist_events_lookup","[","playlist_id","]","=","{","\"playlist\"",":","playlist_record",",","\"events\"",":","[","]",",","}","else",":","playlist_events_lookup","[","playlist_id","]","[","\"playlist\"","]","=","playlist_record","playlist_events_lookup","[","playlist_id","]","[","\"events\"","]",".","append","(","event_type",")","playlist_ids",".","add","(","playlist_id",")","processedEntries","+=","1","except","EntityMissingRequiredFieldError","as","e",":","logger",".","warning","(","f\"Skipping tx {txhash} with error {e}\"",")","skipped_tx_count","+=","1","add_node_level_skipped_transaction","(","session",",","block_number",",","blockhash",",","txhash",")","pass","except","Exception","as","e",":","logger",".","info","(","\"Error in parse playlist transaction\"",")","raise","IndexingError","(","\"playlist\"",",","block_number",",","blockhash",",","txhash",",","str","(","e",")",")","from","e","num_total_changes","+=","processedEntries","logger",".","info","(","f\"index.py | playlists.py | There are {num_total_changes} events processed and {skipped_tx_count} skipped transactions.\"",")","for","playlist_id",",","value_obj","in","playlist_events_lookup",".","items","(",")",":","logger",".","info","(","f\"index.py | playlists.py | Adding {value_obj['playlist']})\"",")","if","value_obj","[","\"events\"","]",":","invalidate_old_playlist","(","session",",","playlist_id",")","session",".","add","(","value_obj","[","\"playlist\"","]",")","return","num_total_changes",",","playlist_ids"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/playlists.py#L22-L114"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/cache_user_balance.py","language":"python","identifier":"update_user_balances_task","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Caches user Audio balances, in wei.","docstring_summary":"Caches user Audio balances, in wei.","docstring_tokens":["Caches","user","Audio","balances","in","wei","."],"function":"def update_user_balances_task(self):\n \"\"\"Caches user Audio balances, in wei.\"\"\"\n\n db = update_user_balances_task.db\n redis = update_user_balances_task.redis\n eth_web3 = update_user_balances_task.eth_web3\n solana_client_manager = update_user_balances_task.solana_client_manager\n\n have_lock = False\n update_lock = redis.lock(\"update_user_balances_lock\", timeout=7200)\n\n try:\n have_lock = update_lock.acquire(blocking=False)\n\n if have_lock:\n start_time = time.time()\n\n delegate_manager_inst = get_delegate_manager_contract(eth_web3)\n staking_inst = get_staking_contract(eth_web3)\n token_inst = get_token_contract(\n eth_web3, update_user_balances_task.shared_config\n )\n waudio_token = get_audio_token(solana_client_manager.get_client())\n refresh_user_ids(\n redis,\n db,\n token_inst,\n delegate_manager_inst,\n staking_inst,\n eth_web3,\n waudio_token,\n )\n\n end_time = time.time()\n redis.set(user_balances_refresh_last_completion_redis_key, int(end_time))\n logger.info(\n f\"cache_user_balance.py | Finished cache_user_balance in {end_time - start_time} seconds\"\n )\n else:\n logger.info(\"cache_user_balance.py | Failed to acquire lock\")\n except Exception as e:\n logger.error(\"cache_user_balance.py | Fatal error in main loop\", exc_info=True)\n raise e\n finally:\n if have_lock:\n update_lock.release()","function_tokens":["def","update_user_balances_task","(","self",")",":","db","=","update_user_balances_task",".","db","redis","=","update_user_balances_task",".","redis","eth_web3","=","update_user_balances_task",".","eth_web3","solana_client_manager","=","update_user_balances_task",".","solana_client_manager","have_lock","=","False","update_lock","=","redis",".","lock","(","\"update_user_balances_lock\"",",","timeout","=","7200",")","try",":","have_lock","=","update_lock",".","acquire","(","blocking","=","False",")","if","have_lock",":","start_time","=","time",".","time","(",")","delegate_manager_inst","=","get_delegate_manager_contract","(","eth_web3",")","staking_inst","=","get_staking_contract","(","eth_web3",")","token_inst","=","get_token_contract","(","eth_web3",",","update_user_balances_task",".","shared_config",")","waudio_token","=","get_audio_token","(","solana_client_manager",".","get_client","(",")",")","refresh_user_ids","(","redis",",","db",",","token_inst",",","delegate_manager_inst",",","staking_inst",",","eth_web3",",","waudio_token",",",")","end_time","=","time",".","time","(",")","redis",".","set","(","user_balances_refresh_last_completion_redis_key",",","int","(","end_time",")",")","logger",".","info","(","f\"cache_user_balance.py | Finished cache_user_balance in {end_time - start_time} seconds\"",")","else",":","logger",".","info","(","\"cache_user_balance.py | Failed to acquire lock\"",")","except","Exception","as","e",":","logger",".","error","(","\"cache_user_balance.py | Fatal error in main loop\"",",","exc_info","=","True",")","raise","e","finally",":","if","have_lock",":","update_lock",".","release","(",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/cache_user_balance.py#L472-L517"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/social_features.py","language":"python","identifier":"social_feature_state_update","parameters":"(\n self,\n update_task: DatabaseTask,\n session: Session,\n social_feature_factory_txs,\n block_number,\n block_timestamp,\n block_hash,\n _ipfs_metadata, # prefix unused args with underscore to prevent pylint\n _blacklisted_cids,\n)","argument_list":"","return_statement":"return num_total_changes, empty_set","docstring":"Return Tuple containing int representing number of social feature related state changes in this transaction and empty Set (to align with other _state_update function signatures)","docstring_summary":"Return Tuple containing int representing number of social feature related state changes in this transaction and empty Set (to align with other _state_update function signatures)","docstring_tokens":["Return","Tuple","containing","int","representing","number","of","social","feature","related","state","changes","in","this","transaction","and","empty","Set","(","to","align","with","other","_state_update","function","signatures",")"],"function":"def social_feature_state_update(\n self,\n update_task: DatabaseTask,\n session: Session,\n social_feature_factory_txs,\n block_number,\n block_timestamp,\n block_hash,\n _ipfs_metadata, # prefix unused args with underscore to prevent pylint\n _blacklisted_cids,\n) -> Tuple[int, Set]:\n \"\"\"Return Tuple containing int representing number of social feature related state changes in this transaction and empty Set (to align with other _state_update function signatures)\"\"\"\n empty_set: Set[int] = set()\n num_total_changes = 0\n if not social_feature_factory_txs:\n return num_total_changes, empty_set\n\n social_feature_factory_abi = update_task.abi_values[\"SocialFeatureFactory\"][\"abi\"]\n social_feature_factory_contract = update_task.web3.eth.contract(\n address=get_contract_addresses()[\"social_feature_factory\"],\n abi=social_feature_factory_abi,\n )\n challenge_bus = update_task.challenge_event_bus\n block_datetime = datetime.utcfromtimestamp(block_timestamp)\n\n # stores net state changes of all reposts and follows and corresponding events in current block\n # track_repost_state_changes = { \"user_id\": { \"track_id\": {__Repost__} } }\n # playlist_repost_state_changes = { \"user_id\": { \"playlist_id\": {__Repost__} } }\n # follow_state_changes = { \"follower_user_id\": { \"followee_user_id\": {__Follow__} } }\n track_repost_state_changes: Dict[int, Dict[int, Repost]] = {}\n playlist_repost_state_changes: Dict[int, Dict[int, Repost]] = {}\n follow_state_changes: Dict[int, Dict[int, Follow]] = {}\n\n for tx_receipt in social_feature_factory_txs:\n try:\n add_track_repost(\n self,\n social_feature_factory_contract,\n update_task,\n session,\n tx_receipt,\n block_number,\n block_datetime,\n track_repost_state_changes,\n )\n delete_track_repost(\n self,\n social_feature_factory_contract,\n update_task,\n session,\n tx_receipt,\n block_number,\n block_datetime,\n track_repost_state_changes,\n )\n add_playlist_repost(\n self,\n social_feature_factory_contract,\n update_task,\n session,\n tx_receipt,\n block_number,\n block_datetime,\n playlist_repost_state_changes,\n )\n delete_playlist_repost(\n self,\n social_feature_factory_contract,\n update_task,\n session,\n tx_receipt,\n block_number,\n block_datetime,\n playlist_repost_state_changes,\n )\n add_follow(\n self,\n social_feature_factory_contract,\n update_task,\n session,\n tx_receipt,\n block_number,\n block_datetime,\n follow_state_changes,\n )\n delete_follow(\n self,\n social_feature_factory_contract,\n update_task,\n session,\n tx_receipt,\n block_number,\n block_datetime,\n follow_state_changes,\n )\n except Exception as e:\n logger.info(\"Error in parse track transaction\")\n txhash = update_task.web3.toHex(tx_receipt.transactionHash)\n blockhash = update_task.web3.toHex(block_hash)\n raise IndexingError(\n \"social_feature\", block_number, blockhash, txhash, str(e)\n ) from e\n\n # bulk process all repost and follow changes\n\n for repost_user_id, repost_track_ids in track_repost_state_changes.items():\n for repost_track_id in repost_track_ids:\n invalidate_old_repost(\n session, repost_user_id, repost_track_id, RepostType.track\n )\n repost = repost_track_ids[repost_track_id]\n session.add(repost)\n dispatch_challenge_repost(challenge_bus, repost, block_number)\n num_total_changes += len(repost_track_ids)\n\n for repost_user_id, repost_playlist_ids in playlist_repost_state_changes.items():\n for repost_playlist_id in repost_playlist_ids:\n invalidate_old_repost(\n session,\n repost_user_id,\n repost_playlist_id,\n repost_playlist_ids[repost_playlist_id].repost_type,\n )\n repost = repost_playlist_ids[repost_playlist_id]\n session.add(repost)\n dispatch_challenge_repost(challenge_bus, repost, block_number)\n num_total_changes += len(repost_playlist_ids)\n\n for follower_user_id, followee_user_ids in follow_state_changes.items():\n for followee_user_id in followee_user_ids:\n invalidate_old_follow(session, follower_user_id, followee_user_id)\n follow = followee_user_ids[followee_user_id]\n session.add(follow)\n dispatch_challenge_follow(challenge_bus, follow, block_number)\n queue_related_artist_calculation(update_task.redis, followee_user_id)\n num_total_changes += len(followee_user_ids)\n return num_total_changes, empty_set","function_tokens":["def","social_feature_state_update","(","self",",","update_task",":","DatabaseTask",",","session",":","Session",",","social_feature_factory_txs",",","block_number",",","block_timestamp",",","block_hash",",","_ipfs_metadata",",","# prefix unused args with underscore to prevent pylint","_blacklisted_cids",",",")","->","Tuple","[","int",",","Set","]",":","empty_set",":","Set","[","int","]","=","set","(",")","num_total_changes","=","0","if","not","social_feature_factory_txs",":","return","num_total_changes",",","empty_set","social_feature_factory_abi","=","update_task",".","abi_values","[","\"SocialFeatureFactory\"","]","[","\"abi\"","]","social_feature_factory_contract","=","update_task",".","web3",".","eth",".","contract","(","address","=","get_contract_addresses","(",")","[","\"social_feature_factory\"","]",",","abi","=","social_feature_factory_abi",",",")","challenge_bus","=","update_task",".","challenge_event_bus","block_datetime","=","datetime",".","utcfromtimestamp","(","block_timestamp",")","# stores net state changes of all reposts and follows and corresponding events in current block","# track_repost_state_changes = { \"user_id\": { \"track_id\": {__Repost__} } }","# playlist_repost_state_changes = { \"user_id\": { \"playlist_id\": {__Repost__} } }","# follow_state_changes = { \"follower_user_id\": { \"followee_user_id\": {__Follow__} } }","track_repost_state_changes",":","Dict","[","int",",","Dict","[","int",",","Repost","]","]","=","{","}","playlist_repost_state_changes",":","Dict","[","int",",","Dict","[","int",",","Repost","]","]","=","{","}","follow_state_changes",":","Dict","[","int",",","Dict","[","int",",","Follow","]","]","=","{","}","for","tx_receipt","in","social_feature_factory_txs",":","try",":","add_track_repost","(","self",",","social_feature_factory_contract",",","update_task",",","session",",","tx_receipt",",","block_number",",","block_datetime",",","track_repost_state_changes",",",")","delete_track_repost","(","self",",","social_feature_factory_contract",",","update_task",",","session",",","tx_receipt",",","block_number",",","block_datetime",",","track_repost_state_changes",",",")","add_playlist_repost","(","self",",","social_feature_factory_contract",",","update_task",",","session",",","tx_receipt",",","block_number",",","block_datetime",",","playlist_repost_state_changes",",",")","delete_playlist_repost","(","self",",","social_feature_factory_contract",",","update_task",",","session",",","tx_receipt",",","block_number",",","block_datetime",",","playlist_repost_state_changes",",",")","add_follow","(","self",",","social_feature_factory_contract",",","update_task",",","session",",","tx_receipt",",","block_number",",","block_datetime",",","follow_state_changes",",",")","delete_follow","(","self",",","social_feature_factory_contract",",","update_task",",","session",",","tx_receipt",",","block_number",",","block_datetime",",","follow_state_changes",",",")","except","Exception","as","e",":","logger",".","info","(","\"Error in parse track transaction\"",")","txhash","=","update_task",".","web3",".","toHex","(","tx_receipt",".","transactionHash",")","blockhash","=","update_task",".","web3",".","toHex","(","block_hash",")","raise","IndexingError","(","\"social_feature\"",",","block_number",",","blockhash",",","txhash",",","str","(","e",")",")","from","e","# bulk process all repost and follow changes","for","repost_user_id",",","repost_track_ids","in","track_repost_state_changes",".","items","(",")",":","for","repost_track_id","in","repost_track_ids",":","invalidate_old_repost","(","session",",","repost_user_id",",","repost_track_id",",","RepostType",".","track",")","repost","=","repost_track_ids","[","repost_track_id","]","session",".","add","(","repost",")","dispatch_challenge_repost","(","challenge_bus",",","repost",",","block_number",")","num_total_changes","+=","len","(","repost_track_ids",")","for","repost_user_id",",","repost_playlist_ids","in","playlist_repost_state_changes",".","items","(",")",":","for","repost_playlist_id","in","repost_playlist_ids",":","invalidate_old_repost","(","session",",","repost_user_id",",","repost_playlist_id",",","repost_playlist_ids","[","repost_playlist_id","]",".","repost_type",",",")","repost","=","repost_playlist_ids","[","repost_playlist_id","]","session",".","add","(","repost",")","dispatch_challenge_repost","(","challenge_bus",",","repost",",","block_number",")","num_total_changes","+=","len","(","repost_playlist_ids",")","for","follower_user_id",",","followee_user_ids","in","follow_state_changes",".","items","(",")",":","for","followee_user_id","in","followee_user_ids",":","invalidate_old_follow","(","session",",","follower_user_id",",","followee_user_id",")","follow","=","followee_user_ids","[","followee_user_id","]","session",".","add","(","follow",")","dispatch_challenge_follow","(","challenge_bus",",","follow",",","block_number",")","queue_related_artist_calculation","(","update_task",".","redis",",","followee_user_id",")","num_total_changes","+=","len","(","followee_user_ids",")","return","num_total_changes",",","empty_set"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/social_features.py#L17-L153"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_rewards_manager.py","language":"python","identifier":"parse_transfer_instruction_data","parameters":"(data: str)","argument_list":"","return_statement":"return parse_instruction_data(data, rewards_manager_transfer_instr)","docstring":"Parse Transfer instruction data submitted to Audius Rewards Manager program\n\n Instruction struct:\n pub struct TransferArgs {\n \/\/\/ Amount to transfer\n pub amount: u64,\n \/\/\/ ID generated on backend\n pub id: String,\n \/\/\/ Recipient's Eth address\n pub eth_recipient: EthereumAddress,\n }\n\n Decodes the data and parses each param into the correct type","docstring_summary":"Parse Transfer instruction data submitted to Audius Rewards Manager program","docstring_tokens":["Parse","Transfer","instruction","data","submitted","to","Audius","Rewards","Manager","program"],"function":"def parse_transfer_instruction_data(data: str) -> RewardsManagerTransfer:\n \"\"\"Parse Transfer instruction data submitted to Audius Rewards Manager program\n\n Instruction struct:\n pub struct TransferArgs {\n \/\/\/ Amount to transfer\n pub amount: u64,\n \/\/\/ ID generated on backend\n pub id: String,\n \/\/\/ Recipient's Eth address\n pub eth_recipient: EthereumAddress,\n }\n\n Decodes the data and parses each param into the correct type\n \"\"\"\n\n return parse_instruction_data(data, rewards_manager_transfer_instr)","function_tokens":["def","parse_transfer_instruction_data","(","data",":","str",")","->","RewardsManagerTransfer",":","return","parse_instruction_data","(","data",",","rewards_manager_transfer_instr",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_rewards_manager.py#L103-L119"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_rewards_manager.py","language":"python","identifier":"parse_transfer_instruction_id","parameters":"(transfer_id: str)","argument_list":"","return_statement":"return id_parts","docstring":"Parses the transfer instruction id into [challenge_id, specifier]\n The id in the transfer instruction is formatted as \":\"","docstring_summary":"Parses the transfer instruction id into [challenge_id, specifier]\n The id in the transfer instruction is formatted as \":\"","docstring_tokens":["Parses","the","transfer","instruction","id","into","[","challenge_id","specifier","]","The","id","in","the","transfer","instruction","is","formatted","as","",":",""],"function":"def parse_transfer_instruction_id(transfer_id: str) -> Optional[List[str]]:\n \"\"\"Parses the transfer instruction id into [challenge_id, specifier]\n The id in the transfer instruction is formatted as \":\"\n \"\"\"\n id_parts = transfer_id.split(\":\", 1)\n if len(id_parts) != 2:\n logger.error(\n \"index_rewards_manager.py | Unable to parse transfer instruction id\"\n f\"into challenge_id and specifier {transfer_id}\"\n )\n return None\n return id_parts","function_tokens":["def","parse_transfer_instruction_id","(","transfer_id",":","str",")","->","Optional","[","List","[","str","]","]",":","id_parts","=","transfer_id",".","split","(","\":\"",",","1",")","if","len","(","id_parts",")","!=","2",":","logger",".","error","(","\"index_rewards_manager.py | Unable to parse transfer instruction id\"","f\"into challenge_id and specifier {transfer_id}\"",")","return","None","return","id_parts"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_rewards_manager.py#L122-L133"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_rewards_manager.py","language":"python","identifier":"get_valid_instruction","parameters":"(\n tx_message: TransactionMessage, meta: ResultMeta\n)","argument_list":"","return_statement":"","docstring":"Checks that the tx is valid\n checks for the transaction message for correct instruction log\n checks accounts keys for rewards manager account\n checks for rewards manager program in instruction","docstring_summary":"Checks that the tx is valid\n checks for the transaction message for correct instruction log\n checks accounts keys for rewards manager account\n checks for rewards manager program in instruction","docstring_tokens":["Checks","that","the","tx","is","valid","checks","for","the","transaction","message","for","correct","instruction","log","checks","accounts","keys","for","rewards","manager","account","checks","for","rewards","manager","program","in","instruction"],"function":"def get_valid_instruction(\n tx_message: TransactionMessage, meta: ResultMeta\n) -> Optional[TransactionMessageInstruction]:\n \"\"\"Checks that the tx is valid\n checks for the transaction message for correct instruction log\n checks accounts keys for rewards manager account\n checks for rewards manager program in instruction\n \"\"\"\n try:\n account_keys = tx_message[\"accountKeys\"]\n has_transfer_instruction = any(\n log == \"Program log: Instruction: Transfer\" for log in meta[\"logMessages\"]\n )\n\n if not has_transfer_instruction:\n return None\n\n if not any(REWARDS_MANAGER_ACCOUNT == key for key in account_keys):\n logger.error(\n \"index_rewards_manager.py | Rewards manager account missing from account keys\"\n )\n return None\n\n instructions = tx_message[\"instructions\"]\n rewards_manager_program_index = account_keys.index(REWARDS_MANAGER_PROGRAM)\n for instruction in instructions:\n if instruction[\"programIdIndex\"] == rewards_manager_program_index:\n return instruction\n\n return None\n except Exception as e:\n logger.error(\n f\"index_rewards_manager.py | Error processing instruction valid, {e}\",\n exc_info=True,\n )\n return None","function_tokens":["def","get_valid_instruction","(","tx_message",":","TransactionMessage",",","meta",":","ResultMeta",")","->","Optional","[","TransactionMessageInstruction","]",":","try",":","account_keys","=","tx_message","[","\"accountKeys\"","]","has_transfer_instruction","=","any","(","log","==","\"Program log: Instruction: Transfer\"","for","log","in","meta","[","\"logMessages\"","]",")","if","not","has_transfer_instruction",":","return","None","if","not","any","(","REWARDS_MANAGER_ACCOUNT","==","key","for","key","in","account_keys",")",":","logger",".","error","(","\"index_rewards_manager.py | Rewards manager account missing from account keys\"",")","return","None","instructions","=","tx_message","[","\"instructions\"","]","rewards_manager_program_index","=","account_keys",".","index","(","REWARDS_MANAGER_PROGRAM",")","for","instruction","in","instructions",":","if","instruction","[","\"programIdIndex\"","]","==","rewards_manager_program_index",":","return","instruction","return","None","except","Exception","as","e",":","logger",".","error","(","f\"index_rewards_manager.py | Error processing instruction valid, {e}\"",",","exc_info","=","True",",",")","return","None"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_rewards_manager.py#L136-L171"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_rewards_manager.py","language":"python","identifier":"fetch_and_parse_sol_rewards_transfer_instruction","parameters":"(\n solana_client_manager: SolanaClientManager, tx_sig: str\n)","argument_list":"","return_statement":"","docstring":"Fetches metadata for rewards transfer transactions and parses data\n\n Fetches the transaction metadata from solana using the tx signature\n Checks the metadata for a transfer instruction\n Decodes and parses the transfer instruction metadata\n Validates the metadata fields","docstring_summary":"Fetches metadata for rewards transfer transactions and parses data","docstring_tokens":["Fetches","metadata","for","rewards","transfer","transactions","and","parses","data"],"function":"def fetch_and_parse_sol_rewards_transfer_instruction(\n solana_client_manager: SolanaClientManager, tx_sig: str\n) -> RewardManagerTransactionInfo:\n \"\"\"Fetches metadata for rewards transfer transactions and parses data\n\n Fetches the transaction metadata from solana using the tx signature\n Checks the metadata for a transfer instruction\n Decodes and parses the transfer instruction metadata\n Validates the metadata fields\n \"\"\"\n try:\n tx_info = solana_client_manager.get_sol_tx_info(tx_sig)\n result: TransactionInfoResult = tx_info[\"result\"]\n # Create transaction metadata\n tx_metadata: RewardManagerTransactionInfo = {\n \"tx_sig\": tx_sig,\n \"slot\": result[\"slot\"],\n \"timestamp\": result[\"blockTime\"],\n \"transfer_instruction\": None,\n }\n meta = result[\"meta\"]\n if meta[\"err\"]:\n logger.info(\n f\"index_rewards_manager.py | Skipping error transaction from chain {tx_info}\"\n )\n return tx_metadata\n tx_message = result[\"transaction\"][\"message\"]\n instruction = get_valid_instruction(tx_message, meta)\n if instruction is None:\n return tx_metadata\n transfer_instruction_data = parse_transfer_instruction_data(instruction[\"data\"])\n amount = transfer_instruction_data[\"amount\"]\n eth_recipient = transfer_instruction_data[\"eth_recipient\"]\n id = transfer_instruction_data[\"id\"]\n transfer_instruction = parse_transfer_instruction_id(id)\n if transfer_instruction is None:\n return tx_metadata\n\n challenge_id, specifier = transfer_instruction\n tx_metadata[\"transfer_instruction\"] = {\n \"amount\": amount,\n \"eth_recipient\": eth_recipient,\n \"challenge_id\": challenge_id,\n \"specifier\": specifier,\n }\n return tx_metadata\n except Exception as e:\n logger.error(\n f\"index_rewards_manager.py | Error processing {tx_sig}, {e}\", exc_info=True\n )\n raise e","function_tokens":["def","fetch_and_parse_sol_rewards_transfer_instruction","(","solana_client_manager",":","SolanaClientManager",",","tx_sig",":","str",")","->","RewardManagerTransactionInfo",":","try",":","tx_info","=","solana_client_manager",".","get_sol_tx_info","(","tx_sig",")","result",":","TransactionInfoResult","=","tx_info","[","\"result\"","]","# Create transaction metadata","tx_metadata",":","RewardManagerTransactionInfo","=","{","\"tx_sig\"",":","tx_sig",",","\"slot\"",":","result","[","\"slot\"","]",",","\"timestamp\"",":","result","[","\"blockTime\"","]",",","\"transfer_instruction\"",":","None",",","}","meta","=","result","[","\"meta\"","]","if","meta","[","\"err\"","]",":","logger",".","info","(","f\"index_rewards_manager.py | Skipping error transaction from chain {tx_info}\"",")","return","tx_metadata","tx_message","=","result","[","\"transaction\"","]","[","\"message\"","]","instruction","=","get_valid_instruction","(","tx_message",",","meta",")","if","instruction","is","None",":","return","tx_metadata","transfer_instruction_data","=","parse_transfer_instruction_data","(","instruction","[","\"data\"","]",")","amount","=","transfer_instruction_data","[","\"amount\"","]","eth_recipient","=","transfer_instruction_data","[","\"eth_recipient\"","]","id","=","transfer_instruction_data","[","\"id\"","]","transfer_instruction","=","parse_transfer_instruction_id","(","id",")","if","transfer_instruction","is","None",":","return","tx_metadata","challenge_id",",","specifier","=","transfer_instruction","tx_metadata","[","\"transfer_instruction\"","]","=","{","\"amount\"",":","amount",",","\"eth_recipient\"",":","eth_recipient",",","\"challenge_id\"",":","challenge_id",",","\"specifier\"",":","specifier",",","}","return","tx_metadata","except","Exception","as","e",":","logger",".","error","(","f\"index_rewards_manager.py | Error processing {tx_sig}, {e}\"",",","exc_info","=","True",")","raise","e"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_rewards_manager.py#L174-L224"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_rewards_manager.py","language":"python","identifier":"process_batch_sol_reward_manager_txs","parameters":"(\n session: Session,\n reward_manager_txs: List[RewardManagerTransactionInfo],\n redis: Redis,\n)","argument_list":"","return_statement":"","docstring":"Validates that the transfer instruction is consistent with DB and inserts ChallengeDisbursement DB entries","docstring_summary":"Validates that the transfer instruction is consistent with DB and inserts ChallengeDisbursement DB entries","docstring_tokens":["Validates","that","the","transfer","instruction","is","consistent","with","DB","and","inserts","ChallengeDisbursement","DB","entries"],"function":"def process_batch_sol_reward_manager_txs(\n session: Session,\n reward_manager_txs: List[RewardManagerTransactionInfo],\n redis: Redis,\n):\n \"\"\"Validates that the transfer instruction is consistent with DB and inserts ChallengeDisbursement DB entries\"\"\"\n try:\n logger.error(f\"index_reward_manager | {reward_manager_txs}\")\n eth_recipients = [\n tx[\"transfer_instruction\"][\"eth_recipient\"]\n for tx in reward_manager_txs\n if tx[\"transfer_instruction\"] is not None\n ]\n users = (\n session.query(User.wallet, User.user_id)\n .filter(User.wallet.in_(eth_recipients), User.is_current == True)\n .all()\n )\n users_map = {user[0]: user[1] for user in users}\n\n specifiers = [\n tx[\"transfer_instruction\"][\"specifier\"]\n for tx in reward_manager_txs\n if tx[\"transfer_instruction\"] is not None\n ]\n\n user_challenges = (\n session.query(UserChallenge.specifier)\n .filter(\n UserChallenge.specifier.in_(specifiers),\n )\n .all()\n )\n user_challenge_specifiers = {challenge[0] for challenge in user_challenges}\n\n challenge_disbursements = []\n for tx in reward_manager_txs:\n # Add transaction\n session.add(\n RewardManagerTransaction(\n signature=tx[\"tx_sig\"],\n slot=tx[\"slot\"],\n created_at=datetime.datetime.utcfromtimestamp(tx[\"timestamp\"]),\n )\n )\n # No instruction found\n if tx[\"transfer_instruction\"] is None:\n logger.warning(\n f\"index_rewards_manager.py | No transfer instruction found in {tx}\"\n )\n continue\n transfer_instr: RewardTransferInstruction = tx[\"transfer_instruction\"]\n specifier = transfer_instr[\"specifier\"]\n eth_recipient = transfer_instr[\"eth_recipient\"]\n if specifier not in user_challenge_specifiers:\n logger.error(\n f\"index_rewards_manager.py | Challenge specifier {specifier} not found\"\n \"while processing disbursement\"\n )\n if eth_recipient not in users_map:\n logger.error(\n f\"index_rewards_manager.py | eth_recipient {eth_recipient} not found while processing disbursement\"\n )\n tx_signature = tx[\"tx_sig\"]\n raise MissingEthRecipientError(\n eth_recipient,\n transfer_instr[\"challenge_id\"],\n specifier,\n tx[\"tx_sig\"],\n tx[\"slot\"],\n f\"Error: eth_recipient {eth_recipient} not found while indexing rewards manager for tx signature {tx_signature}\",\n )\n\n user_id = users_map[eth_recipient]\n logger.info(\n f\"index_rewards_manager.py | found successful disbursement for user_id: [{user_id}]\"\n )\n\n challenge_disbursements.append(\n ChallengeDisbursement(\n challenge_id=transfer_instr[\"challenge_id\"],\n user_id=user_id,\n specifier=specifier,\n amount=str(transfer_instr[\"amount\"]),\n slot=tx[\"slot\"],\n signature=tx[\"tx_sig\"],\n )\n )\n\n if challenge_disbursements:\n # Save out the disbursements\n session.bulk_save_objects(challenge_disbursements)\n # Enqueue balance refreshes for the users\n user_ids = [c.user_id for c in challenge_disbursements]\n enqueue_immediate_balance_refresh(redis, user_ids)\n\n except Exception as e:\n logger.error(f\"index_rewards_manager.py | Error processing {e}\", exc_info=True)\n raise e","function_tokens":["def","process_batch_sol_reward_manager_txs","(","session",":","Session",",","reward_manager_txs",":","List","[","RewardManagerTransactionInfo","]",",","redis",":","Redis",",",")",":","try",":","logger",".","error","(","f\"index_reward_manager | {reward_manager_txs}\"",")","eth_recipients","=","[","tx","[","\"transfer_instruction\"","]","[","\"eth_recipient\"","]","for","tx","in","reward_manager_txs","if","tx","[","\"transfer_instruction\"","]","is","not","None","]","users","=","(","session",".","query","(","User",".","wallet",",","User",".","user_id",")",".","filter","(","User",".","wallet",".","in_","(","eth_recipients",")",",","User",".","is_current","==","True",")",".","all","(",")",")","users_map","=","{","user","[","0","]",":","user","[","1","]","for","user","in","users","}","specifiers","=","[","tx","[","\"transfer_instruction\"","]","[","\"specifier\"","]","for","tx","in","reward_manager_txs","if","tx","[","\"transfer_instruction\"","]","is","not","None","]","user_challenges","=","(","session",".","query","(","UserChallenge",".","specifier",")",".","filter","(","UserChallenge",".","specifier",".","in_","(","specifiers",")",",",")",".","all","(",")",")","user_challenge_specifiers","=","{","challenge","[","0","]","for","challenge","in","user_challenges","}","challenge_disbursements","=","[","]","for","tx","in","reward_manager_txs",":","# Add transaction","session",".","add","(","RewardManagerTransaction","(","signature","=","tx","[","\"tx_sig\"","]",",","slot","=","tx","[","\"slot\"","]",",","created_at","=","datetime",".","datetime",".","utcfromtimestamp","(","tx","[","\"timestamp\"","]",")",",",")",")","# No instruction found","if","tx","[","\"transfer_instruction\"","]","is","None",":","logger",".","warning","(","f\"index_rewards_manager.py | No transfer instruction found in {tx}\"",")","continue","transfer_instr",":","RewardTransferInstruction","=","tx","[","\"transfer_instruction\"","]","specifier","=","transfer_instr","[","\"specifier\"","]","eth_recipient","=","transfer_instr","[","\"eth_recipient\"","]","if","specifier","not","in","user_challenge_specifiers",":","logger",".","error","(","f\"index_rewards_manager.py | Challenge specifier {specifier} not found\"","\"while processing disbursement\"",")","if","eth_recipient","not","in","users_map",":","logger",".","error","(","f\"index_rewards_manager.py | eth_recipient {eth_recipient} not found while processing disbursement\"",")","tx_signature","=","tx","[","\"tx_sig\"","]","raise","MissingEthRecipientError","(","eth_recipient",",","transfer_instr","[","\"challenge_id\"","]",",","specifier",",","tx","[","\"tx_sig\"","]",",","tx","[","\"slot\"","]",",","f\"Error: eth_recipient {eth_recipient} not found while indexing rewards manager for tx signature {tx_signature}\"",",",")","user_id","=","users_map","[","eth_recipient","]","logger",".","info","(","f\"index_rewards_manager.py | found successful disbursement for user_id: [{user_id}]\"",")","challenge_disbursements",".","append","(","ChallengeDisbursement","(","challenge_id","=","transfer_instr","[","\"challenge_id\"","]",",","user_id","=","user_id",",","specifier","=","specifier",",","amount","=","str","(","transfer_instr","[","\"amount\"","]",")",",","slot","=","tx","[","\"slot\"","]",",","signature","=","tx","[","\"tx_sig\"","]",",",")",")","if","challenge_disbursements",":","# Save out the disbursements","session",".","bulk_save_objects","(","challenge_disbursements",")","# Enqueue balance refreshes for the users","user_ids","=","[","c",".","user_id","for","c","in","challenge_disbursements","]","enqueue_immediate_balance_refresh","(","redis",",","user_ids",")","except","Exception","as","e",":","logger",".","error","(","f\"index_rewards_manager.py | Error processing {e}\"",",","exc_info","=","True",")","raise","e"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_rewards_manager.py#L227-L325"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_rewards_manager.py","language":"python","identifier":"get_latest_reward_disbursment_slot","parameters":"(session: Session)","argument_list":"","return_statement":"return latest_slot","docstring":"Fetches the most recent slot for Challenge Disburements","docstring_summary":"Fetches the most recent slot for Challenge Disburements","docstring_tokens":["Fetches","the","most","recent","slot","for","Challenge","Disburements"],"function":"def get_latest_reward_disbursment_slot(session: Session):\n \"\"\"Fetches the most recent slot for Challenge Disburements\"\"\"\n latest_slot = None\n highest_slot_query = (\n session.query(RewardManagerTransaction.slot).order_by(\n desc(RewardManagerTransaction.slot)\n )\n ).first()\n # Can be None prior to first write operations\n if highest_slot_query is not None:\n latest_slot = highest_slot_query[0]\n\n # If no slots have yet been recorded, assume all are valid\n if latest_slot is None:\n latest_slot = 0\n\n return latest_slot","function_tokens":["def","get_latest_reward_disbursment_slot","(","session",":","Session",")",":","latest_slot","=","None","highest_slot_query","=","(","session",".","query","(","RewardManagerTransaction",".","slot",")",".","order_by","(","desc","(","RewardManagerTransaction",".","slot",")",")",")",".","first","(",")","# Can be None prior to first write operations","if","highest_slot_query","is","not","None",":","latest_slot","=","highest_slot_query","[","0","]","# If no slots have yet been recorded, assume all are valid","if","latest_slot","is","None",":","latest_slot","=","0","return","latest_slot"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_rewards_manager.py#L328-L344"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_rewards_manager.py","language":"python","identifier":"get_tx_in_db","parameters":"(session: Session, tx_sig: str)","argument_list":"","return_statement":"return exists","docstring":"Checks if the transaction signature already exists for Challenge Disburements","docstring_summary":"Checks if the transaction signature already exists for Challenge Disburements","docstring_tokens":["Checks","if","the","transaction","signature","already","exists","for","Challenge","Disburements"],"function":"def get_tx_in_db(session: Session, tx_sig: str) -> bool:\n \"\"\"Checks if the transaction signature already exists for Challenge Disburements\"\"\"\n tx_sig_db_count = (\n session.query(RewardManagerTransaction).filter(\n RewardManagerTransaction.signature == tx_sig\n )\n ).count()\n exists = tx_sig_db_count > 0\n return exists","function_tokens":["def","get_tx_in_db","(","session",":","Session",",","tx_sig",":","str",")","->","bool",":","tx_sig_db_count","=","(","session",".","query","(","RewardManagerTransaction",")",".","filter","(","RewardManagerTransaction",".","signature","==","tx_sig",")",")",".","count","(",")","exists","=","tx_sig_db_count",">","0","return","exists"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_rewards_manager.py#L347-L355"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_rewards_manager.py","language":"python","identifier":"get_transaction_signatures","parameters":"(\n solana_client_manager: SolanaClientManager,\n db: SessionManager,\n program: str,\n get_latest_slot: Callable[[Session], int],\n check_tx_exists: Callable[[Session, str], bool],\n min_slot=None,\n)","argument_list":"","return_statement":"return transaction_signatures","docstring":"Fetches next batch of transaction signature offset from the previous latest processed slot\n\n Fetches the latest processed slot for the rewards manager program\n Iterates backwards from the current tx until an intersection is found with the latest processed slot\n Returns the next set of transaction signature from the current offset slot to process","docstring_summary":"Fetches next batch of transaction signature offset from the previous latest processed slot","docstring_tokens":["Fetches","next","batch","of","transaction","signature","offset","from","the","previous","latest","processed","slot"],"function":"def get_transaction_signatures(\n solana_client_manager: SolanaClientManager,\n db: SessionManager,\n program: str,\n get_latest_slot: Callable[[Session], int],\n check_tx_exists: Callable[[Session, str], bool],\n min_slot=None,\n) -> List[List[str]]:\n \"\"\"Fetches next batch of transaction signature offset from the previous latest processed slot\n\n Fetches the latest processed slot for the rewards manager program\n Iterates backwards from the current tx until an intersection is found with the latest processed slot\n Returns the next set of transaction signature from the current offset slot to process\n \"\"\"\n # List of signatures that will be populated as we traverse recent operations\n transaction_signatures = []\n\n last_tx_signature = None\n\n # Loop exit condition\n intersection_found = False\n\n # Query for solana transactions until an intersection is found\n with db.scoped_session() as session:\n latest_processed_slot = get_latest_slot(session)\n while not intersection_found:\n transactions_history = solana_client_manager.get_signatures_for_address(\n program, before=last_tx_signature, limit=TX_SIGNATURES_BATCH_SIZE\n )\n\n transactions_array = transactions_history[\"result\"]\n if not transactions_array:\n intersection_found = True\n logger.info(\n f\"index_rewards_manager.py | No transactions found before {last_tx_signature}\"\n )\n else:\n # Current batch of transactions\n transaction_signature_batch = []\n for tx_info in transactions_array:\n tx_sig = tx_info[\"signature\"]\n tx_slot = tx_info[\"slot\"]\n logger.info(\n f\"index_rewards_manager.py | Processing tx={tx_sig} | slot={tx_slot}\"\n )\n if tx_info[\"slot\"] > latest_processed_slot:\n transaction_signature_batch.append(tx_sig)\n elif tx_info[\"slot\"] <= latest_processed_slot and (\n min_slot is None or tx_info[\"slot\"] > min_slot\n ):\n # Check the tx signature for any txs in the latest batch,\n # and if not present in DB, add to processing\n logger.info(\n f\"index_rewards_manager.py | Latest slot re-traversal\\\n slot={tx_slot}, sig={tx_sig},\\\n latest_processed_slot(db)={latest_processed_slot}\"\n )\n exists = check_tx_exists(session, tx_sig)\n if exists:\n intersection_found = True\n break\n # Ensure this transaction is still processed\n transaction_signature_batch.append(tx_sig)\n\n # Restart processing at the end of this transaction signature batch\n last_tx = transactions_array[-1]\n last_tx_signature = last_tx[\"signature\"]\n\n # Append batch of processed signatures\n if transaction_signature_batch:\n transaction_signatures.append(transaction_signature_batch)\n\n # Ensure processing does not grow unbounded\n if len(transaction_signatures) > TX_SIGNATURES_MAX_BATCHES:\n # Only take the oldest transaction from the transaction_signatures array\n # transaction_signatures is sorted from newest to oldest\n transaction_signatures = transaction_signatures[\n -TX_SIGNATURES_RESIZE_LENGTH:\n ]\n\n # Reverse batches aggregated so oldest transactions are processed first\n transaction_signatures.reverse()\n return transaction_signatures","function_tokens":["def","get_transaction_signatures","(","solana_client_manager",":","SolanaClientManager",",","db",":","SessionManager",",","program",":","str",",","get_latest_slot",":","Callable","[","[","Session","]",",","int","]",",","check_tx_exists",":","Callable","[","[","Session",",","str","]",",","bool","]",",","min_slot","=","None",",",")","->","List","[","List","[","str","]","]",":","# List of signatures that will be populated as we traverse recent operations","transaction_signatures","=","[","]","last_tx_signature","=","None","# Loop exit condition","intersection_found","=","False","# Query for solana transactions until an intersection is found","with","db",".","scoped_session","(",")","as","session",":","latest_processed_slot","=","get_latest_slot","(","session",")","while","not","intersection_found",":","transactions_history","=","solana_client_manager",".","get_signatures_for_address","(","program",",","before","=","last_tx_signature",",","limit","=","TX_SIGNATURES_BATCH_SIZE",")","transactions_array","=","transactions_history","[","\"result\"","]","if","not","transactions_array",":","intersection_found","=","True","logger",".","info","(","f\"index_rewards_manager.py | No transactions found before {last_tx_signature}\"",")","else",":","# Current batch of transactions","transaction_signature_batch","=","[","]","for","tx_info","in","transactions_array",":","tx_sig","=","tx_info","[","\"signature\"","]","tx_slot","=","tx_info","[","\"slot\"","]","logger",".","info","(","f\"index_rewards_manager.py | Processing tx={tx_sig} | slot={tx_slot}\"",")","if","tx_info","[","\"slot\"","]",">","latest_processed_slot",":","transaction_signature_batch",".","append","(","tx_sig",")","elif","tx_info","[","\"slot\"","]","<=","latest_processed_slot","and","(","min_slot","is","None","or","tx_info","[","\"slot\"","]",">","min_slot",")",":","# Check the tx signature for any txs in the latest batch,","# and if not present in DB, add to processing","logger",".","info","(","f\"index_rewards_manager.py | Latest slot re-traversal\\\n slot={tx_slot}, sig={tx_sig},\\\n latest_processed_slot(db)={latest_processed_slot}\"",")","exists","=","check_tx_exists","(","session",",","tx_sig",")","if","exists",":","intersection_found","=","True","break","# Ensure this transaction is still processed","transaction_signature_batch",".","append","(","tx_sig",")","# Restart processing at the end of this transaction signature batch","last_tx","=","transactions_array","[","-","1","]","last_tx_signature","=","last_tx","[","\"signature\"","]","# Append batch of processed signatures","if","transaction_signature_batch",":","transaction_signatures",".","append","(","transaction_signature_batch",")","# Ensure processing does not grow unbounded","if","len","(","transaction_signatures",")",">","TX_SIGNATURES_MAX_BATCHES",":","# Only take the oldest transaction from the transaction_signatures array","# transaction_signatures is sorted from newest to oldest","transaction_signatures","=","transaction_signatures","[","-","TX_SIGNATURES_RESIZE_LENGTH",":","]","# Reverse batches aggregated so oldest transactions are processed first","transaction_signatures",".","reverse","(",")","return","transaction_signatures"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_rewards_manager.py#L358-L440"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_rewards_manager.py","language":"python","identifier":"process_transaction_signatures","parameters":"(\n solana_client_manager: SolanaClientManager,\n db: SessionManager,\n redis: Redis,\n transaction_signatures: List[List[str]],\n)","argument_list":"","return_statement":"","docstring":"Concurrently processes the transactions to update the DB state for reward transfer instructions","docstring_summary":"Concurrently processes the transactions to update the DB state for reward transfer instructions","docstring_tokens":["Concurrently","processes","the","transactions","to","update","the","DB","state","for","reward","transfer","instructions"],"function":"def process_transaction_signatures(\n solana_client_manager: SolanaClientManager,\n db: SessionManager,\n redis: Redis,\n transaction_signatures: List[List[str]],\n):\n \"\"\"Concurrently processes the transactions to update the DB state for reward transfer instructions\"\"\"\n last_tx_sig: Optional[str] = None\n last_tx: Optional[RewardManagerTransactionInfo] = None\n if transaction_signatures and transaction_signatures[-1]:\n last_tx_sig = transaction_signatures[-1][0]\n\n for tx_sig_batch in transaction_signatures:\n logger.info(f\"index_rewards_manager.py | processing {tx_sig_batch}\")\n batch_start_time = time.time()\n\n transfer_instructions: List[RewardManagerTransactionInfo] = []\n # Process each batch in parallel\n with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:\n parse_sol_tx_futures = {\n executor.submit(\n fetch_and_parse_sol_rewards_transfer_instruction,\n solana_client_manager,\n tx_sig,\n ): tx_sig\n for tx_sig in tx_sig_batch\n }\n for future in concurrent.futures.as_completed(parse_sol_tx_futures):\n try:\n # No return value expected here so we just ensure all futures are resolved\n parsed_solana_transfer_instruction = future.result()\n if parsed_solana_transfer_instruction is not None:\n transfer_instructions.append(parsed_solana_transfer_instruction)\n if (\n last_tx_sig\n and last_tx_sig\n == parsed_solana_transfer_instruction[\"tx_sig\"]\n ):\n last_tx = parsed_solana_transfer_instruction\n except Exception as exc:\n logger.error(f\"index_rewards_manager.py | {exc}\")\n raise exc\n with db.scoped_session() as session:\n process_batch_sol_reward_manager_txs(session, transfer_instructions, redis)\n batch_end_time = time.time()\n batch_duration = batch_end_time - batch_start_time\n logger.info(\n f\"index_rewards_manager.py | processed batch {len(tx_sig_batch)} txs in {batch_duration}s\"\n )\n\n if last_tx:\n cache_latest_sol_rewards_manager_db_tx(\n redis,\n {\n \"signature\": last_tx[\"tx_sig\"],\n \"slot\": last_tx[\"slot\"],\n \"timestamp\": last_tx[\"timestamp\"],\n },\n )","function_tokens":["def","process_transaction_signatures","(","solana_client_manager",":","SolanaClientManager",",","db",":","SessionManager",",","redis",":","Redis",",","transaction_signatures",":","List","[","List","[","str","]","]",",",")",":","last_tx_sig",":","Optional","[","str","]","=","None","last_tx",":","Optional","[","RewardManagerTransactionInfo","]","=","None","if","transaction_signatures","and","transaction_signatures","[","-","1","]",":","last_tx_sig","=","transaction_signatures","[","-","1","]","[","0","]","for","tx_sig_batch","in","transaction_signatures",":","logger",".","info","(","f\"index_rewards_manager.py | processing {tx_sig_batch}\"",")","batch_start_time","=","time",".","time","(",")","transfer_instructions",":","List","[","RewardManagerTransactionInfo","]","=","[","]","# Process each batch in parallel","with","concurrent",".","futures",".","ThreadPoolExecutor","(","max_workers","=","5",")","as","executor",":","parse_sol_tx_futures","=","{","executor",".","submit","(","fetch_and_parse_sol_rewards_transfer_instruction",",","solana_client_manager",",","tx_sig",",",")",":","tx_sig","for","tx_sig","in","tx_sig_batch","}","for","future","in","concurrent",".","futures",".","as_completed","(","parse_sol_tx_futures",")",":","try",":","# No return value expected here so we just ensure all futures are resolved","parsed_solana_transfer_instruction","=","future",".","result","(",")","if","parsed_solana_transfer_instruction","is","not","None",":","transfer_instructions",".","append","(","parsed_solana_transfer_instruction",")","if","(","last_tx_sig","and","last_tx_sig","==","parsed_solana_transfer_instruction","[","\"tx_sig\"","]",")",":","last_tx","=","parsed_solana_transfer_instruction","except","Exception","as","exc",":","logger",".","error","(","f\"index_rewards_manager.py | {exc}\"",")","raise","exc","with","db",".","scoped_session","(",")","as","session",":","process_batch_sol_reward_manager_txs","(","session",",","transfer_instructions",",","redis",")","batch_end_time","=","time",".","time","(",")","batch_duration","=","batch_end_time","-","batch_start_time","logger",".","info","(","f\"index_rewards_manager.py | processed batch {len(tx_sig_batch)} txs in {batch_duration}s\"",")","if","last_tx",":","cache_latest_sol_rewards_manager_db_tx","(","redis",",","{","\"signature\"",":","last_tx","[","\"tx_sig\"","]",",","\"slot\"",":","last_tx","[","\"slot\"","]",",","\"timestamp\"",":","last_tx","[","\"timestamp\"","]",",","}",",",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_rewards_manager.py#L443-L501"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_rewards_manager.py","language":"python","identifier":"process_solana_rewards_manager","parameters":"(\n solana_client_manager: SolanaClientManager, db: SessionManager, redis: Redis\n)","argument_list":"","return_statement":"","docstring":"Fetches the next set of reward manager transactions and updates the DB with Challenge Disbursements","docstring_summary":"Fetches the next set of reward manager transactions and updates the DB with Challenge Disbursements","docstring_tokens":["Fetches","the","next","set","of","reward","manager","transactions","and","updates","the","DB","with","Challenge","Disbursements"],"function":"def process_solana_rewards_manager(\n solana_client_manager: SolanaClientManager, db: SessionManager, redis: Redis\n):\n \"\"\"Fetches the next set of reward manager transactions and updates the DB with Challenge Disbursements\"\"\"\n if not is_valid_rewards_manager_program:\n logger.error(\n \"index_rewards_manager.py | no valid reward manager program passed\"\n )\n return\n if not REWARDS_MANAGER_ACCOUNT:\n logger.error(\"index_rewards_manager.py | reward manager account missing\")\n return\n # List of signatures that will be populated as we traverse recent operations\n transaction_signatures = get_transaction_signatures(\n solana_client_manager,\n db,\n REWARDS_MANAGER_PROGRAM,\n get_latest_reward_disbursment_slot,\n get_tx_in_db,\n MIN_SLOT,\n )\n logger.info(f\"index_rewards_manager.py | {transaction_signatures}\")\n\n process_transaction_signatures(\n solana_client_manager, db, redis, transaction_signatures\n )","function_tokens":["def","process_solana_rewards_manager","(","solana_client_manager",":","SolanaClientManager",",","db",":","SessionManager",",","redis",":","Redis",")",":","if","not","is_valid_rewards_manager_program",":","logger",".","error","(","\"index_rewards_manager.py | no valid reward manager program passed\"",")","return","if","not","REWARDS_MANAGER_ACCOUNT",":","logger",".","error","(","\"index_rewards_manager.py | reward manager account missing\"",")","return","# List of signatures that will be populated as we traverse recent operations","transaction_signatures","=","get_transaction_signatures","(","solana_client_manager",",","db",",","REWARDS_MANAGER_PROGRAM",",","get_latest_reward_disbursment_slot",",","get_tx_in_db",",","MIN_SLOT",",",")","logger",".","info","(","f\"index_rewards_manager.py | {transaction_signatures}\"",")","process_transaction_signatures","(","solana_client_manager",",","db",",","redis",",","transaction_signatures",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_rewards_manager.py#L504-L529"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_solana_plays.py","language":"python","identifier":"parse_sol_tx_batch","parameters":"(\n db, solana_client_manager, redis, tx_sig_batch_records, retries=10\n)","argument_list":"","return_statement":"return None","docstring":"Parse a batch of solana transactions in parallel by calling parse_sol_play_transaction\n with a ThreaPoolExecutor\n\n This function also has a recursive retry upto a certain limit in case a future doesn't complete\n within the alloted time. It clears the futures thread queue and the batch is retried","docstring_summary":"Parse a batch of solana transactions in parallel by calling parse_sol_play_transaction\n with a ThreaPoolExecutor","docstring_tokens":["Parse","a","batch","of","solana","transactions","in","parallel","by","calling","parse_sol_play_transaction","with","a","ThreaPoolExecutor"],"function":"def parse_sol_tx_batch(\n db, solana_client_manager, redis, tx_sig_batch_records, retries=10\n):\n \"\"\"\n Parse a batch of solana transactions in parallel by calling parse_sol_play_transaction\n with a ThreaPoolExecutor\n\n This function also has a recursive retry upto a certain limit in case a future doesn't complete\n within the alloted time. It clears the futures thread queue and the batch is retried\n \"\"\"\n batch_start_time = time.time()\n challenge_bus_events = []\n plays = []\n\n # Last record in this batch to be cached\n # Important to note that the batch records are in time DESC order\n last_tx_in_batch = tx_sig_batch_records[0]\n challenge_bus = index_solana_plays.challenge_event_bus\n\n # Process each batch in parallel\n with concurrent.futures.ThreadPoolExecutor() as executor:\n parse_sol_tx_futures = {\n executor.submit(\n parse_sol_play_transaction,\n solana_client_manager,\n tx_sig,\n ): tx_sig\n for tx_sig in tx_sig_batch_records\n }\n try:\n for future in concurrent.futures.as_completed(\n parse_sol_tx_futures, timeout=45\n ):\n # Returns the properties for a Play object to be created in the db\n # can be None so check the value exists\n result = future.result()\n if result:\n user_id, track_id, created_at, source, slot, tx_sig = result\n\n # Append plays to a list that will be written if all plays are successfully retrieved\n # from the rpc pool\n play: PlayInfo = {\n \"user_id\": user_id,\n \"play_item_id\": track_id,\n \"created_at\": created_at,\n \"updated_at\": datetime.now(),\n \"source\": source,\n \"slot\": slot,\n \"signature\": tx_sig,\n }\n plays.append(play)\n # Only enqueue a challenge event if it's *not*\n # an anonymous listen\n if user_id is not None:\n challenge_bus_events.append(\n {\n \"slot\": slot,\n \"user_id\": user_id,\n \"created_at\": created_at.timestamp(),\n }\n )\n\n except Exception as exc:\n logger.error(\n f\"index_solana_plays.py | Error parsing sol play transaction: {exc}\"\n )\n # timeout in a ThreadPoolExecutor doesn't actually stop execution of the underlying thread\n # in order to do that we need to actually clear the queue which we do here to force this\n # task to stop execution\n executor._threads.clear()\n concurrent.futures.thread._threads_queues.clear()\n\n # if we have retries left, recursively call this function again\n if retries > 0:\n return parse_sol_tx_batch(\n db, solana_client_manager, redis, tx_sig_batch_records, retries - 1\n )\n\n # if no more retries, raise\n raise exc\n\n # if the thread pool executor completes successfully without raising an exception\n # the data is successfully fetched so we can add it to the db session and dispatch\n # events to challenge bus\n\n # Cache the latest play from this batch\n # This reflects the ordering from chain\n for play in plays:\n if play.get(\"signature\") == last_tx_in_batch:\n most_recent_db_play = {\n \"signature\": play.get(\"signature\"),\n \"slot\": play.get(\"slot\"),\n \"timestamp\": int(play.get(\"created_at\").timestamp()),\n }\n cache_latest_sol_play_db_tx(redis, most_recent_db_play)\n break\n\n db_save_start = time.time()\n logger.info(\n f\"index_solana_plays.py | DB | Saving test to DB, fetched batch tx details in {db_save_start - batch_start_time}\"\n )\n\n with db.scoped_session() as session:\n logger.info(\n f\"index_solana_plays.py | DB | Acquired session in {time.time() - db_save_start}\"\n )\n session_execute_start = time.time()\n # Save in bulk\n session.execute(Play.__table__.insert().values(plays))\n logger.info(\n f\"index_solana_plays.py | DB | Session execute completed in {time.time() - session_execute_start}\"\n )\n\n logger.info(\n f\"index_solana_plays.py | DB | Saved to DB in {time.time() - db_save_start}\"\n )\n\n track_play_ids = [play[\"play_item_id\"] for play in plays]\n if track_play_ids:\n redis.sadd(TRACK_LISTEN_IDS, *track_play_ids)\n\n logger.info(\"index_solana_plays.py | Dispatching listen events\")\n listen_dispatch_start = time.time()\n for event in challenge_bus_events:\n challenge_bus.dispatch(\n ChallengeEvent.track_listen,\n event.get(\"slot\"),\n event.get(\"user_id\"),\n {\"created_at\": event.get(\"created_at\")},\n )\n listen_dispatch_end = time.time()\n listen_dispatch_diff = listen_dispatch_end - listen_dispatch_start\n logger.info(\n f\"index_solana_plays.py | Dispatched listen events in {listen_dispatch_diff}\"\n )\n\n batch_end_time = time.time()\n batch_duration = batch_end_time - batch_start_time\n logger.info(\n f\"index_solana_plays.py | processed batch {len(tx_sig_batch_records)} txs in {batch_duration}s\"\n )\n return None","function_tokens":["def","parse_sol_tx_batch","(","db",",","solana_client_manager",",","redis",",","tx_sig_batch_records",",","retries","=","10",")",":","batch_start_time","=","time",".","time","(",")","challenge_bus_events","=","[","]","plays","=","[","]","# Last record in this batch to be cached","# Important to note that the batch records are in time DESC order","last_tx_in_batch","=","tx_sig_batch_records","[","0","]","challenge_bus","=","index_solana_plays",".","challenge_event_bus","# Process each batch in parallel","with","concurrent",".","futures",".","ThreadPoolExecutor","(",")","as","executor",":","parse_sol_tx_futures","=","{","executor",".","submit","(","parse_sol_play_transaction",",","solana_client_manager",",","tx_sig",",",")",":","tx_sig","for","tx_sig","in","tx_sig_batch_records","}","try",":","for","future","in","concurrent",".","futures",".","as_completed","(","parse_sol_tx_futures",",","timeout","=","45",")",":","# Returns the properties for a Play object to be created in the db","# can be None so check the value exists","result","=","future",".","result","(",")","if","result",":","user_id",",","track_id",",","created_at",",","source",",","slot",",","tx_sig","=","result","# Append plays to a list that will be written if all plays are successfully retrieved","# from the rpc pool","play",":","PlayInfo","=","{","\"user_id\"",":","user_id",",","\"play_item_id\"",":","track_id",",","\"created_at\"",":","created_at",",","\"updated_at\"",":","datetime",".","now","(",")",",","\"source\"",":","source",",","\"slot\"",":","slot",",","\"signature\"",":","tx_sig",",","}","plays",".","append","(","play",")","# Only enqueue a challenge event if it's *not*","# an anonymous listen","if","user_id","is","not","None",":","challenge_bus_events",".","append","(","{","\"slot\"",":","slot",",","\"user_id\"",":","user_id",",","\"created_at\"",":","created_at",".","timestamp","(",")",",","}",")","except","Exception","as","exc",":","logger",".","error","(","f\"index_solana_plays.py | Error parsing sol play transaction: {exc}\"",")","# timeout in a ThreadPoolExecutor doesn't actually stop execution of the underlying thread","# in order to do that we need to actually clear the queue which we do here to force this","# task to stop execution","executor",".","_threads",".","clear","(",")","concurrent",".","futures",".","thread",".","_threads_queues",".","clear","(",")","# if we have retries left, recursively call this function again","if","retries",">","0",":","return","parse_sol_tx_batch","(","db",",","solana_client_manager",",","redis",",","tx_sig_batch_records",",","retries","-","1",")","# if no more retries, raise","raise","exc","# if the thread pool executor completes successfully without raising an exception","# the data is successfully fetched so we can add it to the db session and dispatch","# events to challenge bus","# Cache the latest play from this batch","# This reflects the ordering from chain","for","play","in","plays",":","if","play",".","get","(","\"signature\"",")","==","last_tx_in_batch",":","most_recent_db_play","=","{","\"signature\"",":","play",".","get","(","\"signature\"",")",",","\"slot\"",":","play",".","get","(","\"slot\"",")",",","\"timestamp\"",":","int","(","play",".","get","(","\"created_at\"",")",".","timestamp","(",")",")",",","}","cache_latest_sol_play_db_tx","(","redis",",","most_recent_db_play",")","break","db_save_start","=","time",".","time","(",")","logger",".","info","(","f\"index_solana_plays.py | DB | Saving test to DB, fetched batch tx details in {db_save_start - batch_start_time}\"",")","with","db",".","scoped_session","(",")","as","session",":","logger",".","info","(","f\"index_solana_plays.py | DB | Acquired session in {time.time() - db_save_start}\"",")","session_execute_start","=","time",".","time","(",")","# Save in bulk","session",".","execute","(","Play",".","__table__",".","insert","(",")",".","values","(","plays",")",")","logger",".","info","(","f\"index_solana_plays.py | DB | Session execute completed in {time.time() - session_execute_start}\"",")","logger",".","info","(","f\"index_solana_plays.py | DB | Saved to DB in {time.time() - db_save_start}\"",")","track_play_ids","=","[","play","[","\"play_item_id\"","]","for","play","in","plays","]","if","track_play_ids",":","redis",".","sadd","(","TRACK_LISTEN_IDS",",","*","track_play_ids",")","logger",".","info","(","\"index_solana_plays.py | Dispatching listen events\"",")","listen_dispatch_start","=","time",".","time","(",")","for","event","in","challenge_bus_events",":","challenge_bus",".","dispatch","(","ChallengeEvent",".","track_listen",",","event",".","get","(","\"slot\"",")",",","event",".","get","(","\"user_id\"",")",",","{","\"created_at\"",":","event",".","get","(","\"created_at\"",")","}",",",")","listen_dispatch_end","=","time",".","time","(",")","listen_dispatch_diff","=","listen_dispatch_end","-","listen_dispatch_start","logger",".","info","(","f\"index_solana_plays.py | Dispatched listen events in {listen_dispatch_diff}\"",")","batch_end_time","=","time",".","time","(",")","batch_duration","=","batch_end_time","-","batch_start_time","logger",".","info","(","f\"index_solana_plays.py | processed batch {len(tx_sig_batch_records)} txs in {batch_duration}s\"",")","return","None"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_solana_plays.py#L318-L459"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_trending.py","language":"python","identifier":"get_genres","parameters":"(session: Session)","argument_list":"","return_statement":"return list(map(lambda x: x[0], genres))","docstring":"Returns all genres","docstring_summary":"Returns all genres","docstring_tokens":["Returns","all","genres"],"function":"def get_genres(session: Session) -> List[str]:\n \"\"\"Returns all genres\"\"\"\n genres: List[Tuple[str]] = (session.query(Track.genre).distinct(Track.genre)).all()\n genres = filter( # type: ignore\n lambda x: x[0] is not None and x[0] != \"\" and x[0] in genre_allowlist, genres\n )\n return list(map(lambda x: x[0], genres))","function_tokens":["def","get_genres","(","session",":","Session",")","->","List","[","str","]",":","genres",":","List","[","Tuple","[","str","]","]","=","(","session",".","query","(","Track",".","genre",")",".","distinct","(","Track",".","genre",")",")",".","all","(",")","genres","=","filter","(","# type: ignore","lambda","x",":","x","[","0","]","is","not","None","and","x","[","0","]","!=","\"\"","and","x","[","0","]","in","genre_allowlist",",","genres",")","return","list","(","map","(","lambda","x",":","x","[","0","]",",","genres",")",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_trending.py#L87-L93"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_trending.py","language":"python","identifier":"floor_time","parameters":"(dt: datetime, interval_seconds: int)","argument_list":"","return_statement":"return dt + timedelta(0, rounding - seconds, -dt.microsecond)","docstring":"Floor a datetime object to a time-span in seconds\n interval_seconds: Closest number of seconds to floor to\n\n For example, if floor_time is invoked with `interval_seconds` of 15,\n the provided datetime is rounded down to the nearest 15 minute interval.\n E.g. 10:48 rounds to 10:45, 11:02 rounds to 11:00, etc.","docstring_summary":"Floor a datetime object to a time-span in seconds\n interval_seconds: Closest number of seconds to floor to","docstring_tokens":["Floor","a","datetime","object","to","a","time","-","span","in","seconds","interval_seconds",":","Closest","number","of","seconds","to","floor","to"],"function":"def floor_time(dt: datetime, interval_seconds: int):\n \"\"\"\n Floor a datetime object to a time-span in seconds\n interval_seconds: Closest number of seconds to floor to\n\n For example, if floor_time is invoked with `interval_seconds` of 15,\n the provided datetime is rounded down to the nearest 15 minute interval.\n E.g. 10:48 rounds to 10:45, 11:02 rounds to 11:00, etc.\n \"\"\"\n seconds = (dt.replace(tzinfo=None) - dt.min).seconds\n rounding = seconds \/\/ interval_seconds * interval_seconds\n return dt + timedelta(0, rounding - seconds, -dt.microsecond)","function_tokens":["def","floor_time","(","dt",":","datetime",",","interval_seconds",":","int",")",":","seconds","=","(","dt",".","replace","(","tzinfo","=","None",")","-","dt",".","min",")",".","seconds","rounding","=","seconds","\/\/","interval_seconds","*","interval_seconds","return","dt","+","timedelta","(","0",",","rounding","-","seconds",",","-","dt",".","microsecond",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_trending.py#L206-L217"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_trending.py","language":"python","identifier":"get_should_update_trending","parameters":"(\n db: SessionManager, web3: Web3, redis: Redis, interval_seconds: int\n)","argument_list":"","return_statement":"return None","docstring":"Checks if the trending job should re-run based off the last trending run's timestamp and\n the most recently indexed block's timestamp.\n If the most recently indexed block (rounded down to the nearest interval) is `interval_seconds`\n ahead of the last trending job run, then the job should re-run.\n The function returns the an int, representing the timestamp, if the jobs should re-run, else None","docstring_summary":"Checks if the trending job should re-run based off the last trending run's timestamp and\n the most recently indexed block's timestamp.\n If the most recently indexed block (rounded down to the nearest interval) is `interval_seconds`\n ahead of the last trending job run, then the job should re-run.\n The function returns the an int, representing the timestamp, if the jobs should re-run, else None","docstring_tokens":["Checks","if","the","trending","job","should","re","-","run","based","off","the","last","trending","run","s","timestamp","and","the","most","recently","indexed","block","s","timestamp",".","If","the","most","recently","indexed","block","(","rounded","down","to","the","nearest","interval",")","is","interval_seconds","ahead","of","the","last","trending","job","run","then","the","job","should","re","-","run",".","The","function","returns","the","an","int","representing","the","timestamp","if","the","jobs","should","re","-","run","else","None"],"function":"def get_should_update_trending(\n db: SessionManager, web3: Web3, redis: Redis, interval_seconds: int\n) -> Optional[int]:\n \"\"\"\n Checks if the trending job should re-run based off the last trending run's timestamp and\n the most recently indexed block's timestamp.\n If the most recently indexed block (rounded down to the nearest interval) is `interval_seconds`\n ahead of the last trending job run, then the job should re-run.\n The function returns the an int, representing the timestamp, if the jobs should re-run, else None\n \"\"\"\n with db.scoped_session() as session:\n current_db_block = (\n session.query(Block.blockhash).filter(Block.is_current == True).first()\n )\n current_block = web3.eth.getBlock(current_db_block[0], True)\n current_timestamp = current_block[\"timestamp\"]\n block_datetime = floor_time(\n datetime.fromtimestamp(current_timestamp), interval_seconds\n )\n\n last_trending_datetime = get_last_trending_datetime(redis)\n if not last_trending_datetime:\n return int(block_datetime.timestamp())\n\n duration_since_last_index = block_datetime - last_trending_datetime\n if duration_since_last_index.total_seconds() >= interval_seconds:\n return int(block_datetime.timestamp())\n\n return None","function_tokens":["def","get_should_update_trending","(","db",":","SessionManager",",","web3",":","Web3",",","redis",":","Redis",",","interval_seconds",":","int",")","->","Optional","[","int","]",":","with","db",".","scoped_session","(",")","as","session",":","current_db_block","=","(","session",".","query","(","Block",".","blockhash",")",".","filter","(","Block",".","is_current","==","True",")",".","first","(",")",")","current_block","=","web3",".","eth",".","getBlock","(","current_db_block","[","0","]",",","True",")","current_timestamp","=","current_block","[","\"timestamp\"","]","block_datetime","=","floor_time","(","datetime",".","fromtimestamp","(","current_timestamp",")",",","interval_seconds",")","last_trending_datetime","=","get_last_trending_datetime","(","redis",")","if","not","last_trending_datetime",":","return","int","(","block_datetime",".","timestamp","(",")",")","duration_since_last_index","=","block_datetime","-","last_trending_datetime","if","duration_since_last_index",".","total_seconds","(",")",">=","interval_seconds",":","return","int","(","block_datetime",".","timestamp","(",")",")","return","None"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_trending.py#L220-L248"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_trending.py","language":"python","identifier":"index_trending_task","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Caches all trending combination of time-range and genre (including no genre).","docstring_summary":"Caches all trending combination of time-range and genre (including no genre).","docstring_tokens":["Caches","all","trending","combination","of","time","-","range","and","genre","(","including","no","genre",")","."],"function":"def index_trending_task(self):\n \"\"\"Caches all trending combination of time-range and genre (including no genre).\"\"\"\n db = index_trending_task.db\n redis = index_trending_task.redis\n web3 = index_trending_task.web3\n have_lock = False\n update_lock = redis.lock(\"index_trending_lock\", timeout=7200)\n try:\n should_update_timestamp = get_should_update_trending(\n db, web3, redis, UPDATE_TRENDING_DURATION_DIFF_SEC\n )\n have_lock = update_lock.acquire(blocking=False)\n if should_update_timestamp and have_lock:\n index_trending(self, db, redis, should_update_timestamp)\n else:\n logger.info(\n f\"index_trending.py | \\\n skip indexing: have lock {have_lock}, \\\n shoud update {should_update_timestamp}\"\n )\n except Exception as e:\n logger.error(\"index_trending.py | Fatal error in main loop\", exc_info=True)\n raise e\n finally:\n if have_lock:\n update_lock.release()","function_tokens":["def","index_trending_task","(","self",")",":","db","=","index_trending_task",".","db","redis","=","index_trending_task",".","redis","web3","=","index_trending_task",".","web3","have_lock","=","False","update_lock","=","redis",".","lock","(","\"index_trending_lock\"",",","timeout","=","7200",")","try",":","should_update_timestamp","=","get_should_update_trending","(","db",",","web3",",","redis",",","UPDATE_TRENDING_DURATION_DIFF_SEC",")","have_lock","=","update_lock",".","acquire","(","blocking","=","False",")","if","should_update_timestamp","and","have_lock",":","index_trending","(","self",",","db",",","redis",",","should_update_timestamp",")","else",":","logger",".","info","(","f\"index_trending.py | \\\n skip indexing: have lock {have_lock}, \\\n shoud update {should_update_timestamp}\"",")","except","Exception","as","e",":","logger",".","error","(","\"index_trending.py | Fatal error in main loop\"",",","exc_info","=","True",")","raise","e","finally",":","if","have_lock",":","update_lock",".","release","(",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_trending.py#L253-L278"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/calculate_trending_challenges.py","language":"python","identifier":"calculate_trending_challenges_task","parameters":"(self, date=None)","argument_list":"","return_statement":"","docstring":"Caches all trending combination of time-range and genre (including no genre).","docstring_summary":"Caches all trending combination of time-range and genre (including no genre).","docstring_tokens":["Caches","all","trending","combination","of","time","-","range","and","genre","(","including","no","genre",")","."],"function":"def calculate_trending_challenges_task(self, date=None):\n \"\"\"Caches all trending combination of time-range and genre (including no genre).\"\"\"\n if date is None:\n logger.error(\"calculate_trending_challenges.py | Must be called with a date\")\n return\n # Celery gives this to us formatted as '2022-01-01T00:00:00', need to parse into datetime\n date = datetime.fromisoformat(date)\n db = calculate_trending_challenges_task.db\n redis = calculate_trending_challenges_task.redis\n challenge_bus = calculate_trending_challenges_task.challenge_event_bus\n have_lock = False\n update_lock = redis.lock(\"calculate_trending_challenges_lock\", timeout=7200)\n try:\n have_lock = update_lock.acquire(blocking=False)\n if have_lock:\n enqueue_trending_challenges(db, redis, challenge_bus, date)\n else:\n logger.info(\n \"calculate_trending_challenges.py | Failed to acquire index trending lock\"\n )\n except Exception as e:\n logger.error(\n \"calculate_trending_challenges.py | Fatal error in main loop\", exc_info=True\n )\n raise e\n finally:\n if have_lock:\n update_lock.release()","function_tokens":["def","calculate_trending_challenges_task","(","self",",","date","=","None",")",":","if","date","is","None",":","logger",".","error","(","\"calculate_trending_challenges.py | Must be called with a date\"",")","return","# Celery gives this to us formatted as '2022-01-01T00:00:00', need to parse into datetime","date","=","datetime",".","fromisoformat","(","date",")","db","=","calculate_trending_challenges_task",".","db","redis","=","calculate_trending_challenges_task",".","redis","challenge_bus","=","calculate_trending_challenges_task",".","challenge_event_bus","have_lock","=","False","update_lock","=","redis",".","lock","(","\"calculate_trending_challenges_lock\"",",","timeout","=","7200",")","try",":","have_lock","=","update_lock",".","acquire","(","blocking","=","False",")","if","have_lock",":","enqueue_trending_challenges","(","db",",","redis",",","challenge_bus",",","date",")","else",":","logger",".","info","(","\"calculate_trending_challenges.py | Failed to acquire index trending lock\"",")","except","Exception","as","e",":","logger",".","error","(","\"calculate_trending_challenges.py | Fatal error in main loop\"",",","exc_info","=","True",")","raise","e","finally",":","if","have_lock",":","update_lock",".","release","(",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/calculate_trending_challenges.py#L177-L204"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_user_bank.py","language":"python","identifier":"parse_create_token_data","parameters":"(data: str)","argument_list":"","return_statement":"return parse_instruction_data(data, create_token_account_instr)","docstring":"Parse Transfer instruction data submitted to Audius Claimable Token program\n\n Instruction struct:\n pub struct TransferArgs {\n pub eth_address: EthereumAddress,\n }\n\n Decodes the data and parses each param into the correct type","docstring_summary":"Parse Transfer instruction data submitted to Audius Claimable Token program","docstring_tokens":["Parse","Transfer","instruction","data","submitted","to","Audius","Claimable","Token","program"],"function":"def parse_create_token_data(data: str) -> CreateTokenAccount:\n \"\"\"Parse Transfer instruction data submitted to Audius Claimable Token program\n\n Instruction struct:\n pub struct TransferArgs {\n pub eth_address: EthereumAddress,\n }\n\n Decodes the data and parses each param into the correct type\n \"\"\"\n\n return parse_instruction_data(data, create_token_account_instr)","function_tokens":["def","parse_create_token_data","(","data",":","str",")","->","CreateTokenAccount",":","return","parse_instruction_data","(","data",",","create_token_account_instr",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_user_bank.py#L132-L143"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_user_bank.py","language":"python","identifier":"get_valid_instruction","parameters":"(\n tx_message: TransactionMessage, meta: ResultMeta\n)","argument_list":"","return_statement":"","docstring":"Checks that the tx is valid\n checks for the transaction message for correct instruction log\n checks accounts keys for claimable token program","docstring_summary":"Checks that the tx is valid\n checks for the transaction message for correct instruction log\n checks accounts keys for claimable token program","docstring_tokens":["Checks","that","the","tx","is","valid","checks","for","the","transaction","message","for","correct","instruction","log","checks","accounts","keys","for","claimable","token","program"],"function":"def get_valid_instruction(\n tx_message: TransactionMessage, meta: ResultMeta\n) -> Optional[TransactionMessageInstruction]:\n \"\"\"Checks that the tx is valid\n checks for the transaction message for correct instruction log\n checks accounts keys for claimable token program\n \"\"\"\n try:\n account_keys = tx_message[\"accountKeys\"]\n instructions = tx_message[\"instructions\"]\n user_bank_program_index = account_keys.index(USER_BANK_ADDRESS)\n for instruction in instructions:\n if instruction[\"programIdIndex\"] == user_bank_program_index:\n return instruction\n\n return None\n except Exception as e:\n logger.error(\n f\"index_user_bank.py | Error processing instruction valid, {e}\",\n exc_info=True,\n )\n return None","function_tokens":["def","get_valid_instruction","(","tx_message",":","TransactionMessage",",","meta",":","ResultMeta",")","->","Optional","[","TransactionMessageInstruction","]",":","try",":","account_keys","=","tx_message","[","\"accountKeys\"","]","instructions","=","tx_message","[","\"instructions\"","]","user_bank_program_index","=","account_keys",".","index","(","USER_BANK_ADDRESS",")","for","instruction","in","instructions",":","if","instruction","[","\"programIdIndex\"","]","==","user_bank_program_index",":","return","instruction","return","None","except","Exception","as","e",":","logger",".","error","(","f\"index_user_bank.py | Error processing instruction valid, {e}\"",",","exc_info","=","True",",",")","return","None"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_user_bank.py#L146-L167"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/users.py","language":"python","identifier":"user_state_update","parameters":"(\n self,\n update_task: DatabaseTask,\n session: Session,\n user_factory_txs,\n block_number,\n block_timestamp,\n block_hash,\n ipfs_metadata,\n blacklisted_cids,\n)","argument_list":"","return_statement":"return num_total_changes, user_ids","docstring":"Return tuple containing int representing number of User model state changes found in transaction and set of processed user IDs.","docstring_summary":"Return tuple containing int representing number of User model state changes found in transaction and set of processed user IDs.","docstring_tokens":["Return","tuple","containing","int","representing","number","of","User","model","state","changes","found","in","transaction","and","set","of","processed","user","IDs","."],"function":"def user_state_update(\n self,\n update_task: DatabaseTask,\n session: Session,\n user_factory_txs,\n block_number,\n block_timestamp,\n block_hash,\n ipfs_metadata,\n blacklisted_cids,\n) -> Tuple[int, Set]:\n \"\"\"Return tuple containing int representing number of User model state changes found in transaction and set of processed user IDs.\"\"\"\n\n blockhash = update_task.web3.toHex(block_hash)\n num_total_changes = 0\n skipped_tx_count = 0\n user_ids: Set[int] = set()\n if not user_factory_txs:\n return num_total_changes, user_ids\n\n challenge_bus = update_task.challenge_event_bus\n\n # This stores the state of the user object along with all the events applied to it\n # before it gets committed to the db\n # Data format is {\"user_id\": {\"user\", \"events\": []}}\n # NOTE - events are stored only for debugging purposes and not used or persisted anywhere\n user_events_lookup: Dict[int, Dict[str, Any]] = {}\n\n # for each user factory transaction, loop through every tx\n # loop through all audius event types within that tx and get all event logs\n # for each event, apply changes to the user in user_events_lookup\n for tx_receipt in user_factory_txs:\n txhash = update_task.web3.toHex(tx_receipt.transactionHash)\n for event_type in user_event_types_arr:\n user_events_tx = get_user_events_tx(update_task, event_type, tx_receipt)\n # if record does not get added, do not count towards num_total_changes\n processedEntries = 0\n for entry in user_events_tx:\n existing_user_record = None\n user_id = helpers.get_tx_arg(entry, \"_userId\")\n try:\n # look up or populate existing record\n if user_id in user_events_lookup:\n existing_user_record = user_events_lookup[user_id][\"user\"]\n else:\n existing_user_record = lookup_user_record(\n update_task,\n session,\n entry,\n block_number,\n block_timestamp,\n txhash,\n )\n\n # parse user event to add metadata to record\n if event_type == user_event_types_lookup[\"update_multihash\"]:\n metadata_multihash = helpers.multihash_digest_to_cid(\n helpers.get_tx_arg(entry, \"_multihashDigest\")\n )\n user_record = (\n parse_user_event(\n self,\n update_task,\n session,\n tx_receipt,\n block_number,\n entry,\n event_type,\n existing_user_record,\n ipfs_metadata[metadata_multihash],\n block_timestamp,\n )\n if metadata_multihash not in blacklisted_cids\n else None\n )\n else:\n user_record = parse_user_event(\n self,\n update_task,\n session,\n tx_receipt,\n block_number,\n entry,\n event_type,\n existing_user_record,\n None,\n block_timestamp,\n )\n\n # process user record\n if user_record is not None:\n if user_id not in user_events_lookup:\n user_events_lookup[user_id] = {\n \"user\": user_record,\n \"events\": [],\n }\n else:\n user_events_lookup[user_id][\"user\"] = user_record\n user_events_lookup[user_id][\"events\"].append(event_type)\n user_ids.add(user_id)\n processedEntries += 1\n except EntityMissingRequiredFieldError as e:\n logger.warning(f\"Skipping tx {txhash} with error {e}\")\n skipped_tx_count += 1\n add_node_level_skipped_transaction(\n session, block_number, blockhash, txhash\n )\n pass\n except Exception as e:\n logger.error(\"Error in parse user transaction\")\n raise IndexingError(\n \"user\", block_number, blockhash, txhash, str(e)\n ) from e\n\n num_total_changes += processedEntries\n\n logger.info(\n f\"index.py | users.py | There are {num_total_changes} events processed and {skipped_tx_count} skipped transactions.\"\n )\n\n # for each record in user_events_lookup, invalidate the old record and add the new record\n # we do this after all processing has completed so the user record is atomic by block, not tx\n for user_id, value_obj in user_events_lookup.items():\n logger.info(f\"index.py | users.py | Adding {value_obj['user']}\")\n if value_obj[\"events\"]:\n invalidate_old_user(session, user_id)\n challenge_bus.dispatch(ChallengeEvent.profile_update, block_number, user_id)\n session.add(value_obj[\"user\"])\n\n return num_total_changes, user_ids","function_tokens":["def","user_state_update","(","self",",","update_task",":","DatabaseTask",",","session",":","Session",",","user_factory_txs",",","block_number",",","block_timestamp",",","block_hash",",","ipfs_metadata",",","blacklisted_cids",",",")","->","Tuple","[","int",",","Set","]",":","blockhash","=","update_task",".","web3",".","toHex","(","block_hash",")","num_total_changes","=","0","skipped_tx_count","=","0","user_ids",":","Set","[","int","]","=","set","(",")","if","not","user_factory_txs",":","return","num_total_changes",",","user_ids","challenge_bus","=","update_task",".","challenge_event_bus","# This stores the state of the user object along with all the events applied to it","# before it gets committed to the db","# Data format is {\"user_id\": {\"user\", \"events\": []}}","# NOTE - events are stored only for debugging purposes and not used or persisted anywhere","user_events_lookup",":","Dict","[","int",",","Dict","[","str",",","Any","]","]","=","{","}","# for each user factory transaction, loop through every tx","# loop through all audius event types within that tx and get all event logs","# for each event, apply changes to the user in user_events_lookup","for","tx_receipt","in","user_factory_txs",":","txhash","=","update_task",".","web3",".","toHex","(","tx_receipt",".","transactionHash",")","for","event_type","in","user_event_types_arr",":","user_events_tx","=","get_user_events_tx","(","update_task",",","event_type",",","tx_receipt",")","# if record does not get added, do not count towards num_total_changes","processedEntries","=","0","for","entry","in","user_events_tx",":","existing_user_record","=","None","user_id","=","helpers",".","get_tx_arg","(","entry",",","\"_userId\"",")","try",":","# look up or populate existing record","if","user_id","in","user_events_lookup",":","existing_user_record","=","user_events_lookup","[","user_id","]","[","\"user\"","]","else",":","existing_user_record","=","lookup_user_record","(","update_task",",","session",",","entry",",","block_number",",","block_timestamp",",","txhash",",",")","# parse user event to add metadata to record","if","event_type","==","user_event_types_lookup","[","\"update_multihash\"","]",":","metadata_multihash","=","helpers",".","multihash_digest_to_cid","(","helpers",".","get_tx_arg","(","entry",",","\"_multihashDigest\"",")",")","user_record","=","(","parse_user_event","(","self",",","update_task",",","session",",","tx_receipt",",","block_number",",","entry",",","event_type",",","existing_user_record",",","ipfs_metadata","[","metadata_multihash","]",",","block_timestamp",",",")","if","metadata_multihash","not","in","blacklisted_cids","else","None",")","else",":","user_record","=","parse_user_event","(","self",",","update_task",",","session",",","tx_receipt",",","block_number",",","entry",",","event_type",",","existing_user_record",",","None",",","block_timestamp",",",")","# process user record","if","user_record","is","not","None",":","if","user_id","not","in","user_events_lookup",":","user_events_lookup","[","user_id","]","=","{","\"user\"",":","user_record",",","\"events\"",":","[","]",",","}","else",":","user_events_lookup","[","user_id","]","[","\"user\"","]","=","user_record","user_events_lookup","[","user_id","]","[","\"events\"","]",".","append","(","event_type",")","user_ids",".","add","(","user_id",")","processedEntries","+=","1","except","EntityMissingRequiredFieldError","as","e",":","logger",".","warning","(","f\"Skipping tx {txhash} with error {e}\"",")","skipped_tx_count","+=","1","add_node_level_skipped_transaction","(","session",",","block_number",",","blockhash",",","txhash",")","pass","except","Exception","as","e",":","logger",".","error","(","\"Error in parse user transaction\"",")","raise","IndexingError","(","\"user\"",",","block_number",",","blockhash",",","txhash",",","str","(","e",")",")","from","e","num_total_changes","+=","processedEntries","logger",".","info","(","f\"index.py | users.py | There are {num_total_changes} events processed and {skipped_tx_count} skipped transactions.\"",")","# for each record in user_events_lookup, invalidate the old record and add the new record","# we do this after all processing has completed so the user record is atomic by block, not tx","for","user_id",",","value_obj","in","user_events_lookup",".","items","(",")",":","logger",".","info","(","f\"index.py | users.py | Adding {value_obj['user']}\"",")","if","value_obj","[","\"events\"","]",":","invalidate_old_user","(","session",",","user_id",")","challenge_bus",".","dispatch","(","ChallengeEvent",".","profile_update",",","block_number",",","user_id",")","session",".","add","(","value_obj","[","\"user\"","]",")","return","num_total_changes",",","user_ids"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/users.py#L26-L155"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/users.py","language":"python","identifier":"update_user_associated_wallets","parameters":"(\n session, update_task, user_record, associated_wallets, chain\n)","argument_list":"","return_statement":"","docstring":"Updates the user associated wallets table","docstring_summary":"Updates the user associated wallets table","docstring_tokens":["Updates","the","user","associated","wallets","table"],"function":"def update_user_associated_wallets(\n session, update_task, user_record, associated_wallets, chain\n):\n \"\"\"Updates the user associated wallets table\"\"\"\n try:\n if not isinstance(associated_wallets, dict):\n # With malformed associated wallets, we update the associated wallets\n # to be an empty dict. This has the effect of generating new rows for the\n # already associated wallets and marking them as deleted.\n associated_wallets = {}\n\n prev_user_associated_wallets_response = (\n session.query(AssociatedWallet.wallet)\n .filter_by(\n user_id=user_record.user_id,\n is_current=True,\n is_delete=False,\n chain=chain,\n )\n .all()\n )\n\n previous_wallets = [\n wallet for [wallet] in prev_user_associated_wallets_response\n ]\n added_associated_wallets = set()\n\n session.query(AssociatedWallet).filter_by(\n user_id=user_record.user_id, chain=chain\n ).update({\"is_current\": False})\n\n # Verify the wallet signatures and create the user id to wallet associations\n for associated_wallet, wallet_metadata in associated_wallets.items():\n if \"signature\" not in wallet_metadata or not isinstance(\n wallet_metadata[\"signature\"], str\n ):\n continue\n is_valid_signature = validate_signature(\n chain,\n update_task.web3,\n user_record.user_id,\n associated_wallet,\n wallet_metadata[\"signature\"],\n )\n\n if is_valid_signature:\n # Check that the wallet doesn't already exist\n wallet_exists = (\n session.query(AssociatedWallet)\n .filter_by(\n wallet=associated_wallet,\n is_current=True,\n is_delete=False,\n chain=chain,\n )\n .count()\n > 0\n )\n if not wallet_exists:\n added_associated_wallets.add(associated_wallet)\n associated_wallet_entry = AssociatedWallet(\n user_id=user_record.user_id,\n wallet=associated_wallet,\n chain=chain,\n is_current=True,\n is_delete=False,\n blocknumber=user_record.blocknumber,\n blockhash=user_record.blockhash,\n )\n session.add(associated_wallet_entry)\n\n # Mark the previously associated wallets as deleted\n for previously_associated_wallet in previous_wallets:\n if previously_associated_wallet not in added_associated_wallets:\n associated_wallet_entry = AssociatedWallet(\n user_id=user_record.user_id,\n wallet=previously_associated_wallet,\n chain=chain,\n is_current=True,\n is_delete=True,\n blocknumber=user_record.blocknumber,\n blockhash=user_record.blockhash,\n )\n session.add(associated_wallet_entry)\n\n is_updated_wallets = set(previous_wallets) != added_associated_wallets\n if is_updated_wallets:\n enqueue_immediate_balance_refresh(update_task.redis, [user_record.user_id])\n except Exception as e:\n logger.error(\n f\"index.py | users.py | Fatal updating user associated wallets while indexing {e}\",\n exc_info=True,\n )","function_tokens":["def","update_user_associated_wallets","(","session",",","update_task",",","user_record",",","associated_wallets",",","chain",")",":","try",":","if","not","isinstance","(","associated_wallets",",","dict",")",":","# With malformed associated wallets, we update the associated wallets","# to be an empty dict. This has the effect of generating new rows for the","# already associated wallets and marking them as deleted.","associated_wallets","=","{","}","prev_user_associated_wallets_response","=","(","session",".","query","(","AssociatedWallet",".","wallet",")",".","filter_by","(","user_id","=","user_record",".","user_id",",","is_current","=","True",",","is_delete","=","False",",","chain","=","chain",",",")",".","all","(",")",")","previous_wallets","=","[","wallet","for","[","wallet","]","in","prev_user_associated_wallets_response","]","added_associated_wallets","=","set","(",")","session",".","query","(","AssociatedWallet",")",".","filter_by","(","user_id","=","user_record",".","user_id",",","chain","=","chain",")",".","update","(","{","\"is_current\"",":","False","}",")","# Verify the wallet signatures and create the user id to wallet associations","for","associated_wallet",",","wallet_metadata","in","associated_wallets",".","items","(",")",":","if","\"signature\"","not","in","wallet_metadata","or","not","isinstance","(","wallet_metadata","[","\"signature\"","]",",","str",")",":","continue","is_valid_signature","=","validate_signature","(","chain",",","update_task",".","web3",",","user_record",".","user_id",",","associated_wallet",",","wallet_metadata","[","\"signature\"","]",",",")","if","is_valid_signature",":","# Check that the wallet doesn't already exist","wallet_exists","=","(","session",".","query","(","AssociatedWallet",")",".","filter_by","(","wallet","=","associated_wallet",",","is_current","=","True",",","is_delete","=","False",",","chain","=","chain",",",")",".","count","(",")",">","0",")","if","not","wallet_exists",":","added_associated_wallets",".","add","(","associated_wallet",")","associated_wallet_entry","=","AssociatedWallet","(","user_id","=","user_record",".","user_id",",","wallet","=","associated_wallet",",","chain","=","chain",",","is_current","=","True",",","is_delete","=","False",",","blocknumber","=","user_record",".","blocknumber",",","blockhash","=","user_record",".","blockhash",",",")","session",".","add","(","associated_wallet_entry",")","# Mark the previously associated wallets as deleted","for","previously_associated_wallet","in","previous_wallets",":","if","previously_associated_wallet","not","in","added_associated_wallets",":","associated_wallet_entry","=","AssociatedWallet","(","user_id","=","user_record",".","user_id",",","wallet","=","previously_associated_wallet",",","chain","=","chain",",","is_current","=","True",",","is_delete","=","True",",","blocknumber","=","user_record",".","blocknumber",",","blockhash","=","user_record",".","blockhash",",",")","session",".","add","(","associated_wallet_entry",")","is_updated_wallets","=","set","(","previous_wallets",")","!=","added_associated_wallets","if","is_updated_wallets",":","enqueue_immediate_balance_refresh","(","update_task",".","redis",",","[","user_record",".","user_id","]",")","except","Exception","as","e",":","logger",".","error","(","f\"index.py | users.py | Fatal updating user associated wallets while indexing {e}\"",",","exc_info","=","True",",",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/users.py#L413-L505"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/users.py","language":"python","identifier":"update_user_events","parameters":"(\n session: Session,\n user_record: User,\n events: UserEventsMetadata,\n bus: ChallengeEventBus,\n)","argument_list":"","return_statement":"","docstring":"Updates the user events table","docstring_summary":"Updates the user events table","docstring_tokens":["Updates","the","user","events","table"],"function":"def update_user_events(\n session: Session,\n user_record: User,\n events: UserEventsMetadata,\n bus: ChallengeEventBus,\n) -> None:\n \"\"\"Updates the user events table\"\"\"\n try:\n if not isinstance(events, dict):\n # There is something wrong with events, don't process it\n return\n\n # Get existing UserEvents entry\n existing_user_events = (\n session.query(UserEvents)\n .filter_by(user_id=user_record.user_id, is_current=True)\n .one_or_none()\n )\n existing_referrer = (\n existing_user_events.referrer if existing_user_events else None\n )\n existing_mobile_user = (\n existing_user_events.is_mobile_user if existing_user_events else False\n )\n user_events = UserEvents(\n user_id=user_record.user_id,\n is_current=True,\n blocknumber=user_record.blocknumber,\n blockhash=user_record.blockhash,\n referrer=existing_referrer,\n is_mobile_user=existing_mobile_user,\n )\n for event, value in events.items():\n if (\n event == \"referrer\"\n and isinstance(value, int)\n and user_events.referrer is None\n ):\n user_events.referrer = value\n bus.dispatch(\n ChallengeEvent.referral_signup,\n user_record.blocknumber,\n value,\n {\"referred_user_id\": user_record.user_id},\n )\n bus.dispatch(\n ChallengeEvent.referred_signup,\n user_record.blocknumber,\n user_record.user_id,\n )\n elif (\n event == \"is_mobile_user\"\n and isinstance(value, bool)\n and not user_events.is_mobile_user\n ):\n user_events.is_mobile_user = value\n if value:\n bus.dispatch(\n ChallengeEvent.mobile_install,\n user_record.blocknumber,\n user_record.user_id,\n )\n # Only add a row if there's an update\n if (\n existing_user_events is None\n or user_events.is_mobile_user != existing_mobile_user\n or user_events.referrer != existing_referrer\n ):\n # Mark existing UserEvents entries as not current\n session.query(UserEvents).filter_by(\n user_id=user_record.user_id, is_current=True\n ).update({\"is_current\": False})\n session.add(user_events)\n\n except Exception as e:\n logger.error(\n f\"index.py | users.py | Fatal updating user events while indexing {e}\",\n exc_info=True,\n )","function_tokens":["def","update_user_events","(","session",":","Session",",","user_record",":","User",",","events",":","UserEventsMetadata",",","bus",":","ChallengeEventBus",",",")","->","None",":","try",":","if","not","isinstance","(","events",",","dict",")",":","# There is something wrong with events, don't process it","return","# Get existing UserEvents entry","existing_user_events","=","(","session",".","query","(","UserEvents",")",".","filter_by","(","user_id","=","user_record",".","user_id",",","is_current","=","True",")",".","one_or_none","(",")",")","existing_referrer","=","(","existing_user_events",".","referrer","if","existing_user_events","else","None",")","existing_mobile_user","=","(","existing_user_events",".","is_mobile_user","if","existing_user_events","else","False",")","user_events","=","UserEvents","(","user_id","=","user_record",".","user_id",",","is_current","=","True",",","blocknumber","=","user_record",".","blocknumber",",","blockhash","=","user_record",".","blockhash",",","referrer","=","existing_referrer",",","is_mobile_user","=","existing_mobile_user",",",")","for","event",",","value","in","events",".","items","(",")",":","if","(","event","==","\"referrer\"","and","isinstance","(","value",",","int",")","and","user_events",".","referrer","is","None",")",":","user_events",".","referrer","=","value","bus",".","dispatch","(","ChallengeEvent",".","referral_signup",",","user_record",".","blocknumber",",","value",",","{","\"referred_user_id\"",":","user_record",".","user_id","}",",",")","bus",".","dispatch","(","ChallengeEvent",".","referred_signup",",","user_record",".","blocknumber",",","user_record",".","user_id",",",")","elif","(","event","==","\"is_mobile_user\"","and","isinstance","(","value",",","bool",")","and","not","user_events",".","is_mobile_user",")",":","user_events",".","is_mobile_user","=","value","if","value",":","bus",".","dispatch","(","ChallengeEvent",".","mobile_install",",","user_record",".","blocknumber",",","user_record",".","user_id",",",")","# Only add a row if there's an update","if","(","existing_user_events","is","None","or","user_events",".","is_mobile_user","!=","existing_mobile_user","or","user_events",".","referrer","!=","existing_referrer",")",":","# Mark existing UserEvents entries as not current","session",".","query","(","UserEvents",")",".","filter_by","(","user_id","=","user_record",".","user_id",",","is_current","=","True",")",".","update","(","{","\"is_current\"",":","False","}",")","session",".","add","(","user_events",")","except","Exception","as","e",":","logger",".","error","(","f\"index.py | users.py | Fatal updating user events while indexing {e}\"",",","exc_info","=","True",",",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/users.py#L535-L613"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_listen_count_milestones.py","language":"python","identifier":"get_next_track_milestone","parameters":"(play_count: int, prev_milestone: Optional[int] = None)","argument_list":"","return_statement":"return next_milestone","docstring":"Gets the next hightest milstone threshold avaiable given the play count,\n if past the last threshold or given an invalid previous milestone, will return None","docstring_summary":"Gets the next hightest milstone threshold avaiable given the play count,\n if past the last threshold or given an invalid previous milestone, will return None","docstring_tokens":["Gets","the","next","hightest","milstone","threshold","avaiable","given","the","play","count","if","past","the","last","threshold","or","given","an","invalid","previous","milestone","will","return","None"],"function":"def get_next_track_milestone(play_count: int, prev_milestone: Optional[int] = None):\n \"\"\"\n Gets the next hightest milstone threshold avaiable given the play count,\n if past the last threshold or given an invalid previous milestone, will return None\n \"\"\"\n next_milestone = milestone_threshold[0]\n if prev_milestone:\n if prev_milestone in next_threshold:\n next_milestone = next_threshold[prev_milestone]\n else:\n # track is past the last milestone, so return none and stop\n return None\n\n # If play counts have not passed the next threshold, return None\n if play_count < next_milestone:\n return None\n\n # If play counts have pasted the next milestone threshold, continue to compare against higher thresholds\n next_next_milestone = (\n next_threshold[next_milestone] if next_milestone in next_threshold else None\n )\n while next_next_milestone and play_count >= next_next_milestone:\n next_milestone = next_next_milestone\n next_next_milestone = (\n next_threshold[next_milestone] if next_milestone in next_threshold else None\n )\n\n return next_milestone","function_tokens":["def","get_next_track_milestone","(","play_count",":","int",",","prev_milestone",":","Optional","[","int","]","=","None",")",":","next_milestone","=","milestone_threshold","[","0","]","if","prev_milestone",":","if","prev_milestone","in","next_threshold",":","next_milestone","=","next_threshold","[","prev_milestone","]","else",":","# track is past the last milestone, so return none and stop","return","None","# If play counts have not passed the next threshold, return None","if","play_count","<","next_milestone",":","return","None","# If play counts have pasted the next milestone threshold, continue to compare against higher thresholds","next_next_milestone","=","(","next_threshold","[","next_milestone","]","if","next_milestone","in","next_threshold","else","None",")","while","next_next_milestone","and","play_count",">=","next_next_milestone",":","next_milestone","=","next_next_milestone","next_next_milestone","=","(","next_threshold","[","next_milestone","]","if","next_milestone","in","next_threshold","else","None",")","return","next_milestone"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_listen_count_milestones.py#L39-L66"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_listen_count_milestones.py","language":"python","identifier":"index_listen_count_milestones_task","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Caches all trending combination of time-range and genre (including no genre).","docstring_summary":"Caches all trending combination of time-range and genre (including no genre).","docstring_tokens":["Caches","all","trending","combination","of","time","-","range","and","genre","(","including","no","genre",")","."],"function":"def index_listen_count_milestones_task(self):\n \"\"\"Caches all trending combination of time-range and genre (including no genre).\"\"\"\n db = index_listen_count_milestones_task.db\n redis = index_listen_count_milestones_task.redis\n have_lock = False\n # Max timeout is 60 sec * 10 min\n update_lock = redis.lock(\"index_listen_count_milestones_lock\", timeout=600)\n try:\n have_lock = update_lock.acquire(blocking=False)\n if have_lock:\n index_listen_count_milestones(db, redis)\n else:\n logger.info(\n \"index_listen_count_milestones.py | Failed to acquire index trending lock\"\n )\n except Exception as e:\n logger.error(\n \"index_listen_count_milestones.py | Fatal error in main loop\", exc_info=True\n )\n raise e\n finally:\n if have_lock:\n update_lock.release()","function_tokens":["def","index_listen_count_milestones_task","(","self",")",":","db","=","index_listen_count_milestones_task",".","db","redis","=","index_listen_count_milestones_task",".","redis","have_lock","=","False","# Max timeout is 60 sec * 10 min","update_lock","=","redis",".","lock","(","\"index_listen_count_milestones_lock\"",",","timeout","=","600",")","try",":","have_lock","=","update_lock",".","acquire","(","blocking","=","False",")","if","have_lock",":","index_listen_count_milestones","(","db",",","redis",")","else",":","logger",".","info","(","\"index_listen_count_milestones.py | Failed to acquire index trending lock\"",")","except","Exception","as","e",":","logger",".","error","(","\"index_listen_count_milestones.py | Fatal error in main loop\"",",","exc_info","=","True",")","raise","e","finally",":","if","have_lock",":","update_lock",".","release","(",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_listen_count_milestones.py#L149-L171"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/user_replica_set.py","language":"python","identifier":"user_replica_set_state_update","parameters":"(\n self,\n update_task: DatabaseTask,\n session: Session,\n user_replica_set_mgr_txs,\n block_number,\n block_timestamp,\n block_hash,\n _ipfs_metadata, # prefix unused args with underscore to prevent pylint\n _blacklisted_cids,\n)","argument_list":"","return_statement":"return num_user_replica_set_changes, user_ids","docstring":"Return Tuple containing int representing number of User model state changes found in transaction and set of user_id values","docstring_summary":"Return Tuple containing int representing number of User model state changes found in transaction and set of user_id values","docstring_tokens":["Return","Tuple","containing","int","representing","number","of","User","model","state","changes","found","in","transaction","and","set","of","user_id","values"],"function":"def user_replica_set_state_update(\n self,\n update_task: DatabaseTask,\n session: Session,\n user_replica_set_mgr_txs,\n block_number,\n block_timestamp,\n block_hash,\n _ipfs_metadata, # prefix unused args with underscore to prevent pylint\n _blacklisted_cids,\n) -> Tuple[int, Set]:\n \"\"\"Return Tuple containing int representing number of User model state changes found in transaction and set of user_id values\"\"\"\n\n event_blockhash = update_task.web3.toHex(block_hash)\n num_user_replica_set_changes = 0\n skipped_tx_count = 0\n\n user_ids: Set[int] = set()\n if not user_replica_set_mgr_txs:\n return num_user_replica_set_changes, user_ids\n\n # This stores the state of the user object along with all the events applied to it\n # before it gets committed to the db\n # Data format is {\"user_id\": {\"user\", \"events\": []}}\n # NOTE - events are stored only for debugging purposes and not used or persisted anywhere\n user_replica_set_events_lookup = {}\n\n # This stores the state of the cnode object along with all events applied\n # Data format is {\"cnode_sp_id\": {\"cnode_record\", \"events\":[]}}\n cnode_events_lookup = {}\n\n # pylint: disable=too-many-nested-blocks\n for tx_receipt in user_replica_set_mgr_txs:\n txhash = update_task.web3.toHex(tx_receipt.transactionHash)\n for event_type in user_replica_set_manager_event_types_arr:\n user_events_tx = get_user_replica_set_mgr_tx(\n update_task, event_type, tx_receipt\n )\n processedEntries = 0 # if record does not get added, do not count towards num_total_changes\n for entry in user_events_tx:\n args = entry[\"args\"]\n existing_user_record = None\n existing_cnode_record = None\n user_id = (\n helpers.get_tx_arg(entry, \"_userId\") if \"_userId\" in args else None\n )\n cnode_sp_id = (\n helpers.get_tx_arg(entry, \"_cnodeSpId\")\n if \"_cnodeSpId\" in args\n else None\n )\n try:\n # if the user id is not in the lookup object, it hasn't been initialized yet\n # first, get the user object from the db(if exists or create a new one)\n # then set the lookup object for user_id with the appropriate props\n if user_id and (user_id not in user_replica_set_events_lookup):\n existing_user_record = lookup_user_record(\n update_task,\n session,\n entry,\n block_number,\n block_timestamp,\n txhash,\n )\n\n if cnode_sp_id and (cnode_sp_id not in cnode_events_lookup):\n existing_cnode_record = lookup_ursm_cnode(\n update_task,\n session,\n entry,\n block_number,\n block_timestamp,\n txhash,\n )\n\n # Add or update the value of the user record for this block in user_replica_set_events_lookup,\n # ensuring that multiple events for a single user result in only 1 row insert operation\n # (even if multiple operations are present)\n if (\n event_type\n == user_replica_set_manager_event_types_lookup[\n \"update_replica_set\"\n ]\n ):\n parsed_user_record = parse_user_record(\n update_task,\n entry,\n existing_user_record,\n block_timestamp,\n )\n if user_id not in user_replica_set_events_lookup:\n user_replica_set_events_lookup[user_id] = {\n \"user\": parsed_user_record,\n \"events\": [],\n }\n else:\n user_replica_set_events_lookup[user_id][\n \"user\"\n ] = parsed_user_record\n user_replica_set_events_lookup[user_id][\"events\"].append(\n event_type\n )\n user_ids.add(user_id)\n processedEntries += 1\n # Process L2 Content Node operations\n elif (\n event_type\n == user_replica_set_manager_event_types_lookup[\n \"add_or_update_content_node\"\n ]\n ):\n parsed_cnode_record = parse_ursm_cnode_record(\n update_task,\n entry,\n existing_cnode_record,\n )\n if cnode_sp_id not in cnode_events_lookup:\n cnode_events_lookup[cnode_sp_id] = {\n \"content_node\": parsed_cnode_record,\n \"events\": [],\n }\n else:\n cnode_events_lookup[cnode_sp_id][\n \"content_node\"\n ] = parsed_cnode_record\n cnode_events_lookup[cnode_sp_id][\"events\"].append(event_type)\n processedEntries += 1\n except EntityMissingRequiredFieldError as e:\n logger.warning(f\"Skipping tx {txhash} with error {e}\")\n skipped_tx_count += 1\n add_node_level_skipped_transaction(\n session, block_number, event_blockhash, txhash\n )\n pass\n except Exception as e:\n logger.info(\"Error in parse user replica set transaction\")\n raise IndexingError(\n \"user_replica_set\",\n block_number,\n event_blockhash,\n txhash,\n str(e),\n ) from e\n num_user_replica_set_changes += processedEntries\n\n logger.info(\n f\"index.py | user_replica_set.py | [URSM indexing] There are {num_user_replica_set_changes} events processed and {skipped_tx_count} skipped transactions.\"\n )\n\n # for each record in user_replica_set_events_lookup, invalidate the old record and add the new record\n # we do this after all processing has completed so the user record is atomic by block, not tx\n for user_id, value_obj in user_replica_set_events_lookup.items():\n logger.info(\n f\"index.py | user_replica_set.py | Replica Set Processing Adding {value_obj['user']}\"\n )\n invalidate_old_user(session, user_id)\n session.add(value_obj[\"user\"])\n\n for content_node_id, value_obj in cnode_events_lookup.items():\n logger.info(\n f\"index.py | user_replica_set.py | Content Node Processing Adding {value_obj['content_node']}\"\n )\n invalidate_old_cnode_record(session, content_node_id)\n session.add(value_obj[\"content_node\"])\n\n return num_user_replica_set_changes, user_ids","function_tokens":["def","user_replica_set_state_update","(","self",",","update_task",":","DatabaseTask",",","session",":","Session",",","user_replica_set_mgr_txs",",","block_number",",","block_timestamp",",","block_hash",",","_ipfs_metadata",",","# prefix unused args with underscore to prevent pylint","_blacklisted_cids",",",")","->","Tuple","[","int",",","Set","]",":","event_blockhash","=","update_task",".","web3",".","toHex","(","block_hash",")","num_user_replica_set_changes","=","0","skipped_tx_count","=","0","user_ids",":","Set","[","int","]","=","set","(",")","if","not","user_replica_set_mgr_txs",":","return","num_user_replica_set_changes",",","user_ids","# This stores the state of the user object along with all the events applied to it","# before it gets committed to the db","# Data format is {\"user_id\": {\"user\", \"events\": []}}","# NOTE - events are stored only for debugging purposes and not used or persisted anywhere","user_replica_set_events_lookup","=","{","}","# This stores the state of the cnode object along with all events applied","# Data format is {\"cnode_sp_id\": {\"cnode_record\", \"events\":[]}}","cnode_events_lookup","=","{","}","# pylint: disable=too-many-nested-blocks","for","tx_receipt","in","user_replica_set_mgr_txs",":","txhash","=","update_task",".","web3",".","toHex","(","tx_receipt",".","transactionHash",")","for","event_type","in","user_replica_set_manager_event_types_arr",":","user_events_tx","=","get_user_replica_set_mgr_tx","(","update_task",",","event_type",",","tx_receipt",")","processedEntries","=","0","# if record does not get added, do not count towards num_total_changes","for","entry","in","user_events_tx",":","args","=","entry","[","\"args\"","]","existing_user_record","=","None","existing_cnode_record","=","None","user_id","=","(","helpers",".","get_tx_arg","(","entry",",","\"_userId\"",")","if","\"_userId\"","in","args","else","None",")","cnode_sp_id","=","(","helpers",".","get_tx_arg","(","entry",",","\"_cnodeSpId\"",")","if","\"_cnodeSpId\"","in","args","else","None",")","try",":","# if the user id is not in the lookup object, it hasn't been initialized yet","# first, get the user object from the db(if exists or create a new one)","# then set the lookup object for user_id with the appropriate props","if","user_id","and","(","user_id","not","in","user_replica_set_events_lookup",")",":","existing_user_record","=","lookup_user_record","(","update_task",",","session",",","entry",",","block_number",",","block_timestamp",",","txhash",",",")","if","cnode_sp_id","and","(","cnode_sp_id","not","in","cnode_events_lookup",")",":","existing_cnode_record","=","lookup_ursm_cnode","(","update_task",",","session",",","entry",",","block_number",",","block_timestamp",",","txhash",",",")","# Add or update the value of the user record for this block in user_replica_set_events_lookup,","# ensuring that multiple events for a single user result in only 1 row insert operation","# (even if multiple operations are present)","if","(","event_type","==","user_replica_set_manager_event_types_lookup","[","\"update_replica_set\"","]",")",":","parsed_user_record","=","parse_user_record","(","update_task",",","entry",",","existing_user_record",",","block_timestamp",",",")","if","user_id","not","in","user_replica_set_events_lookup",":","user_replica_set_events_lookup","[","user_id","]","=","{","\"user\"",":","parsed_user_record",",","\"events\"",":","[","]",",","}","else",":","user_replica_set_events_lookup","[","user_id","]","[","\"user\"","]","=","parsed_user_record","user_replica_set_events_lookup","[","user_id","]","[","\"events\"","]",".","append","(","event_type",")","user_ids",".","add","(","user_id",")","processedEntries","+=","1","# Process L2 Content Node operations","elif","(","event_type","==","user_replica_set_manager_event_types_lookup","[","\"add_or_update_content_node\"","]",")",":","parsed_cnode_record","=","parse_ursm_cnode_record","(","update_task",",","entry",",","existing_cnode_record",",",")","if","cnode_sp_id","not","in","cnode_events_lookup",":","cnode_events_lookup","[","cnode_sp_id","]","=","{","\"content_node\"",":","parsed_cnode_record",",","\"events\"",":","[","]",",","}","else",":","cnode_events_lookup","[","cnode_sp_id","]","[","\"content_node\"","]","=","parsed_cnode_record","cnode_events_lookup","[","cnode_sp_id","]","[","\"events\"","]",".","append","(","event_type",")","processedEntries","+=","1","except","EntityMissingRequiredFieldError","as","e",":","logger",".","warning","(","f\"Skipping tx {txhash} with error {e}\"",")","skipped_tx_count","+=","1","add_node_level_skipped_transaction","(","session",",","block_number",",","event_blockhash",",","txhash",")","pass","except","Exception","as","e",":","logger",".","info","(","\"Error in parse user replica set transaction\"",")","raise","IndexingError","(","\"user_replica_set\"",",","block_number",",","event_blockhash",",","txhash",",","str","(","e",")",",",")","from","e","num_user_replica_set_changes","+=","processedEntries","logger",".","info","(","f\"index.py | user_replica_set.py | [URSM indexing] There are {num_user_replica_set_changes} events processed and {skipped_tx_count} skipped transactions.\"",")","# for each record in user_replica_set_events_lookup, invalidate the old record and add the new record","# we do this after all processing has completed so the user record is atomic by block, not tx","for","user_id",",","value_obj","in","user_replica_set_events_lookup",".","items","(",")",":","logger",".","info","(","f\"index.py | user_replica_set.py | Replica Set Processing Adding {value_obj['user']}\"",")","invalidate_old_user","(","session",",","user_id",")","session",".","add","(","value_obj","[","\"user\"","]",")","for","content_node_id",",","value_obj","in","cnode_events_lookup",".","items","(",")",":","logger",".","info","(","f\"index.py | user_replica_set.py | Content Node Processing Adding {value_obj['content_node']}\"",")","invalidate_old_cnode_record","(","session",",","content_node_id",")","session",".","add","(","value_obj","[","\"content_node\"","]",")","return","num_user_replica_set_changes",",","user_ids"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/user_replica_set.py#L27-L192"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index.py","language":"python","identifier":"get_tx_hash_to_skip","parameters":"(session, redis)","argument_list":"","return_statement":"","docstring":"Fetch if there is a tx_hash to be skipped because of continuous errors","docstring_summary":"Fetch if there is a tx_hash to be skipped because of continuous errors","docstring_tokens":["Fetch","if","there","is","a","tx_hash","to","be","skipped","because","of","continuous","errors"],"function":"def get_tx_hash_to_skip(session, redis):\n \"\"\"Fetch if there is a tx_hash to be skipped because of continuous errors\"\"\"\n indexing_error = get_indexing_error(redis)\n if (\n isinstance(indexing_error, dict)\n and \"has_consensus\" in indexing_error\n and indexing_error[\"has_consensus\"]\n ):\n return indexing_error[\"txhash\"]\n else:\n return None","function_tokens":["def","get_tx_hash_to_skip","(","session",",","redis",")",":","indexing_error","=","get_indexing_error","(","redis",")","if","(","isinstance","(","indexing_error",",","dict",")","and","\"has_consensus\"","in","indexing_error","and","indexing_error","[","\"has_consensus\"","]",")",":","return","indexing_error","[","\"txhash\"","]","else",":","return","None"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index.py#L397-L407"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_metrics.py","language":"python","identifier":"process_route_keys","parameters":"(session, redis, key, ip, date)","argument_list":"","return_statement":"","docstring":"For a redis hset storing a mapping of routes to the number of times they are hit,\n parse each key out into the version, path, and query string.\n Create a new entry in the DB for the each route.","docstring_summary":"For a redis hset storing a mapping of routes to the number of times they are hit,\n parse each key out into the version, path, and query string.\n Create a new entry in the DB for the each route.","docstring_tokens":["For","a","redis","hset","storing","a","mapping","of","routes","to","the","number","of","times","they","are","hit","parse","each","key","out","into","the","version","path","and","query","string",".","Create","a","new","entry","in","the","DB","for","the","each","route","."],"function":"def process_route_keys(session, redis, key, ip, date):\n \"\"\"\n For a redis hset storing a mapping of routes to the number of times they are hit,\n parse each key out into the version, path, and query string.\n Create a new entry in the DB for the each route.\n \"\"\"\n try:\n route_metrics = []\n routes = redis.hgetall(key)\n for key_bstr in routes:\n route = key_bstr.decode(\"utf-8\").strip(\"\/\")\n val = int(routes[key_bstr].decode(\"utf-8\"))\n\n version = \"0\" # default value if version is not present\n path = route\n query_string = None\n\n route_subpaths = route.split(\"\/\")\n\n # Extract the version out of the path\n if route_subpaths[0].startswith(\"v\") and len(route_subpaths[0]) > 1:\n version = route_subpaths[0][1:]\n path = \"\/\".join(route_subpaths[1:])\n\n # Extract the query string out of the path\n route_query = path.split(\"?\")\n if len(route_query) > 1:\n path = route_query[0]\n query_string = route_query[1]\n route_metrics.append(\n RouteMetrics(\n version=version,\n route_path=path,\n query_string=query_string,\n count=val,\n ip=ip,\n timestamp=date,\n )\n )\n\n if route_metrics:\n session.bulk_save_objects(route_metrics)\n redis.delete(key)\n except Exception as e:\n raise Exception(f\"Error processing route key {key} with error {e}\") from e","function_tokens":["def","process_route_keys","(","session",",","redis",",","key",",","ip",",","date",")",":","try",":","route_metrics","=","[","]","routes","=","redis",".","hgetall","(","key",")","for","key_bstr","in","routes",":","route","=","key_bstr",".","decode","(","\"utf-8\"",")",".","strip","(","\"\/\"",")","val","=","int","(","routes","[","key_bstr","]",".","decode","(","\"utf-8\"",")",")","version","=","\"0\"","# default value if version is not present","path","=","route","query_string","=","None","route_subpaths","=","route",".","split","(","\"\/\"",")","# Extract the version out of the path","if","route_subpaths","[","0","]",".","startswith","(","\"v\"",")","and","len","(","route_subpaths","[","0","]",")",">","1",":","version","=","route_subpaths","[","0","]","[","1",":","]","path","=","\"\/\"",".","join","(","route_subpaths","[","1",":","]",")","# Extract the query string out of the path","route_query","=","path",".","split","(","\"?\"",")","if","len","(","route_query",")",">","1",":","path","=","route_query","[","0","]","query_string","=","route_query","[","1","]","route_metrics",".","append","(","RouteMetrics","(","version","=","version",",","route_path","=","path",",","query_string","=","query_string",",","count","=","val",",","ip","=","ip",",","timestamp","=","date",",",")",")","if","route_metrics",":","session",".","bulk_save_objects","(","route_metrics",")","redis",".","delete","(","key",")","except","Exception","as","e",":","raise","Exception","(","f\"Error processing route key {key} with error {e}\"",")","from","e"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_metrics.py#L39-L83"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_metrics.py","language":"python","identifier":"process_app_name_keys","parameters":"(session, redis, key, ip, date)","argument_list":"","return_statement":"","docstring":"For a redis hset storing a mapping of app_name usage in request parameters to count,\n Create a new entry in the DB for each app_name.","docstring_summary":"For a redis hset storing a mapping of app_name usage in request parameters to count,\n Create a new entry in the DB for each app_name.","docstring_tokens":["For","a","redis","hset","storing","a","mapping","of","app_name","usage","in","request","parameters","to","count","Create","a","new","entry","in","the","DB","for","each","app_name","."],"function":"def process_app_name_keys(session, redis, key, ip, date):\n \"\"\"\n For a redis hset storing a mapping of app_name usage in request parameters to count,\n Create a new entry in the DB for each app_name.\n \"\"\"\n try:\n app_name_metrics = []\n app_names = redis.hgetall(key)\n for key_bstr in app_names:\n app_name = key_bstr.decode(\"utf-8\")\n val = int(app_names[key_bstr].decode(\"utf-8\"))\n\n app_name_metrics.append(\n AppNameMetrics(\n application_name=app_name, count=val, ip=ip, timestamp=date\n )\n )\n if app_name_metrics:\n session.bulk_save_objects(app_name_metrics)\n redis.delete(key)\n\n except Exception as e:\n raise Exception(f\"Error processing app name key {key} with error {e}\") from e","function_tokens":["def","process_app_name_keys","(","session",",","redis",",","key",",","ip",",","date",")",":","try",":","app_name_metrics","=","[","]","app_names","=","redis",".","hgetall","(","key",")","for","key_bstr","in","app_names",":","app_name","=","key_bstr",".","decode","(","\"utf-8\"",")","val","=","int","(","app_names","[","key_bstr","]",".","decode","(","\"utf-8\"",")",")","app_name_metrics",".","append","(","AppNameMetrics","(","application_name","=","app_name",",","count","=","val",",","ip","=","ip",",","timestamp","=","date",")",")","if","app_name_metrics",":","session",".","bulk_save_objects","(","app_name_metrics",")","redis",".","delete","(","key",")","except","Exception","as","e",":","raise","Exception","(","f\"Error processing app name key {key} with error {e}\"",")","from","e"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_metrics.py#L86-L108"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_metrics.py","language":"python","identifier":"sweep_metrics","parameters":"(db, redis)","argument_list":"","return_statement":"","docstring":"Move the metrics values from redis to the DB.\n\n Get all the redis keys with the metrics prefix,\n parse the key to get the timestamp in the key.\n If it is before the current time, then process the redis hset.","docstring_summary":"Move the metrics values from redis to the DB.","docstring_tokens":["Move","the","metrics","values","from","redis","to","the","DB","."],"function":"def sweep_metrics(db, redis):\n \"\"\"\n Move the metrics values from redis to the DB.\n\n Get all the redis keys with the metrics prefix,\n parse the key to get the timestamp in the key.\n If it is before the current time, then process the redis hset.\n \"\"\"\n with db.scoped_session() as session:\n for key_byte in redis.scan_iter(f\"{metrics_prefix}:*\"):\n key = key_byte.decode(\"utf-8\")\n try:\n parsed_key = parse_metrics_key(key)\n\n if parsed_key is None:\n raise KeyError(\n f\"index_metrics.py | Unable to parse key {key} | Skipping process key\"\n )\n source, ip, key_date = parsed_key\n\n current_date_time = get_rounded_date_time()\n\n if key_date < current_date_time:\n if source == metrics_routes:\n process_route_keys(session, redis, key, ip, key_date)\n elif source == metrics_applications:\n process_app_name_keys(session, redis, key, ip, key_date)\n except KeyError as e:\n logger.warning(e)\n redis.delete(key)\n except Exception as e:\n logger.error(e)\n redis.delete(key)","function_tokens":["def","sweep_metrics","(","db",",","redis",")",":","with","db",".","scoped_session","(",")","as","session",":","for","key_byte","in","redis",".","scan_iter","(","f\"{metrics_prefix}:*\"",")",":","key","=","key_byte",".","decode","(","\"utf-8\"",")","try",":","parsed_key","=","parse_metrics_key","(","key",")","if","parsed_key","is","None",":","raise","KeyError","(","f\"index_metrics.py | Unable to parse key {key} | Skipping process key\"",")","source",",","ip",",","key_date","=","parsed_key","current_date_time","=","get_rounded_date_time","(",")","if","key_date","<","current_date_time",":","if","source","==","metrics_routes",":","process_route_keys","(","session",",","redis",",","key",",","ip",",","key_date",")","elif","source","==","metrics_applications",":","process_app_name_keys","(","session",",","redis",",","key",",","ip",",","key_date",")","except","KeyError","as","e",":","logger",".","warning","(","e",")","redis",".","delete","(","key",")","except","Exception","as","e",":","logger",".","error","(","e",")","redis",".","delete","(","key",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_metrics.py#L111-L143"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/index_metrics.py","language":"python","identifier":"consolidate_metrics_from_other_nodes","parameters":"(self, db, redis)","argument_list":"","return_statement":"","docstring":"Get recent route and app metrics from all other discovery nodes\n and merge with this node's metrics so that this node will be aware\n of all the metrics across users hitting different providers","docstring_summary":"Get recent route and app metrics from all other discovery nodes\n and merge with this node's metrics so that this node will be aware\n of all the metrics across users hitting different providers","docstring_tokens":["Get","recent","route","and","app","metrics","from","all","other","discovery","nodes","and","merge","with","this","node","s","metrics","so","that","this","node","will","be","aware","of","all","the","metrics","across","users","hitting","different","providers"],"function":"def consolidate_metrics_from_other_nodes(self, db, redis):\n \"\"\"\n Get recent route and app metrics from all other discovery nodes\n and merge with this node's metrics so that this node will be aware\n of all the metrics across users hitting different providers\n \"\"\"\n all_other_nodes = get_all_other_nodes()[0]\n\n visited_node_timestamps_str = redis_get_or_restore(redis, metrics_visited_nodes)\n visited_node_timestamps = (\n json.loads(visited_node_timestamps_str) if visited_node_timestamps_str else {}\n )\n\n now = datetime.utcnow()\n one_iteration_ago = now - timedelta(minutes=METRICS_INTERVAL)\n one_iteration_ago_str = one_iteration_ago.strftime(datetime_format_secondary)\n end_time = now.strftime(datetime_format_secondary)\n\n # personal unique metrics for the day and the month\n summed_unique_metrics = get_summed_unique_metrics(now)\n summed_unique_daily_count = summed_unique_metrics[\"daily\"]\n summed_unique_monthly_count = summed_unique_metrics[\"monthly\"]\n\n # Merge & persist metrics for our personal node\n personal_route_metrics_str = redis_get_or_restore(redis, personal_route_metrics)\n personal_route_metrics_dict = (\n json.loads(personal_route_metrics_str) if personal_route_metrics_str else {}\n )\n new_personal_route_metrics = {}\n for timestamp, metrics in personal_route_metrics_dict.items():\n if timestamp > one_iteration_ago_str:\n for ip, count in metrics.items():\n if ip in new_personal_route_metrics:\n new_personal_route_metrics[ip] += count\n else:\n new_personal_route_metrics[ip] = count\n\n personal_app_metrics_str = redis_get_or_restore(redis, personal_app_metrics)\n personal_app_metrics_dict = (\n json.loads(personal_app_metrics_str) if personal_app_metrics_str else {}\n )\n new_personal_app_metrics = {}\n for timestamp, metrics in personal_app_metrics_dict.items():\n if timestamp > one_iteration_ago_str:\n for app_name, count in metrics.items():\n if app_name in new_personal_app_metrics:\n new_personal_app_metrics[app_name] += count\n else:\n new_personal_app_metrics[app_name] = count\n\n merge_route_metrics(new_personal_route_metrics, end_time, db)\n merge_app_metrics(new_personal_app_metrics, end_time, db)\n\n # Merge & persist metrics for other nodes\n for node in all_other_nodes:\n start_time_str = (\n visited_node_timestamps[node]\n if node in visited_node_timestamps\n else one_iteration_ago_str\n )\n start_time_obj = datetime.strptime(start_time_str, datetime_format_secondary)\n start_time = int(start_time_obj.timestamp())\n new_route_metrics, new_app_metrics = get_metrics(node, start_time)\n\n logger.info(\n f\"did attempt to receive route and app metrics from {node} at {start_time_obj} ({start_time})\"\n )\n\n # add other nodes' summed unique daily and monthly counts to this node's\n if new_route_metrics:\n logger.info(\n f\"summed unique metrics from {node}: {new_route_metrics['summed']}\"\n )\n summed_unique_daily_count += new_route_metrics[\"summed\"][\"daily\"]\n summed_unique_monthly_count += new_route_metrics[\"summed\"][\"monthly\"]\n new_route_metrics = new_route_metrics[\"deduped\"]\n\n merge_route_metrics(new_route_metrics or {}, end_time, db)\n merge_app_metrics(new_app_metrics or {}, end_time, db)\n\n if new_route_metrics is not None and new_app_metrics is not None:\n visited_node_timestamps[node] = end_time\n redis_set_and_dump(\n redis, metrics_visited_nodes, json.dumps(visited_node_timestamps)\n )\n\n # persist updated summed unique counts\n persist_summed_unique_counts(\n db, end_time, summed_unique_daily_count, summed_unique_monthly_count\n )\n\n logger.info(f\"visited node timestamps: {visited_node_timestamps}\")","function_tokens":["def","consolidate_metrics_from_other_nodes","(","self",",","db",",","redis",")",":","all_other_nodes","=","get_all_other_nodes","(",")","[","0","]","visited_node_timestamps_str","=","redis_get_or_restore","(","redis",",","metrics_visited_nodes",")","visited_node_timestamps","=","(","json",".","loads","(","visited_node_timestamps_str",")","if","visited_node_timestamps_str","else","{","}",")","now","=","datetime",".","utcnow","(",")","one_iteration_ago","=","now","-","timedelta","(","minutes","=","METRICS_INTERVAL",")","one_iteration_ago_str","=","one_iteration_ago",".","strftime","(","datetime_format_secondary",")","end_time","=","now",".","strftime","(","datetime_format_secondary",")","# personal unique metrics for the day and the month","summed_unique_metrics","=","get_summed_unique_metrics","(","now",")","summed_unique_daily_count","=","summed_unique_metrics","[","\"daily\"","]","summed_unique_monthly_count","=","summed_unique_metrics","[","\"monthly\"","]","# Merge & persist metrics for our personal node","personal_route_metrics_str","=","redis_get_or_restore","(","redis",",","personal_route_metrics",")","personal_route_metrics_dict","=","(","json",".","loads","(","personal_route_metrics_str",")","if","personal_route_metrics_str","else","{","}",")","new_personal_route_metrics","=","{","}","for","timestamp",",","metrics","in","personal_route_metrics_dict",".","items","(",")",":","if","timestamp",">","one_iteration_ago_str",":","for","ip",",","count","in","metrics",".","items","(",")",":","if","ip","in","new_personal_route_metrics",":","new_personal_route_metrics","[","ip","]","+=","count","else",":","new_personal_route_metrics","[","ip","]","=","count","personal_app_metrics_str","=","redis_get_or_restore","(","redis",",","personal_app_metrics",")","personal_app_metrics_dict","=","(","json",".","loads","(","personal_app_metrics_str",")","if","personal_app_metrics_str","else","{","}",")","new_personal_app_metrics","=","{","}","for","timestamp",",","metrics","in","personal_app_metrics_dict",".","items","(",")",":","if","timestamp",">","one_iteration_ago_str",":","for","app_name",",","count","in","metrics",".","items","(",")",":","if","app_name","in","new_personal_app_metrics",":","new_personal_app_metrics","[","app_name","]","+=","count","else",":","new_personal_app_metrics","[","app_name","]","=","count","merge_route_metrics","(","new_personal_route_metrics",",","end_time",",","db",")","merge_app_metrics","(","new_personal_app_metrics",",","end_time",",","db",")","# Merge & persist metrics for other nodes","for","node","in","all_other_nodes",":","start_time_str","=","(","visited_node_timestamps","[","node","]","if","node","in","visited_node_timestamps","else","one_iteration_ago_str",")","start_time_obj","=","datetime",".","strptime","(","start_time_str",",","datetime_format_secondary",")","start_time","=","int","(","start_time_obj",".","timestamp","(",")",")","new_route_metrics",",","new_app_metrics","=","get_metrics","(","node",",","start_time",")","logger",".","info","(","f\"did attempt to receive route and app metrics from {node} at {start_time_obj} ({start_time})\"",")","# add other nodes' summed unique daily and monthly counts to this node's","if","new_route_metrics",":","logger",".","info","(","f\"summed unique metrics from {node}: {new_route_metrics['summed']}\"",")","summed_unique_daily_count","+=","new_route_metrics","[","\"summed\"","]","[","\"daily\"","]","summed_unique_monthly_count","+=","new_route_metrics","[","\"summed\"","]","[","\"monthly\"","]","new_route_metrics","=","new_route_metrics","[","\"deduped\"","]","merge_route_metrics","(","new_route_metrics","or","{","}",",","end_time",",","db",")","merge_app_metrics","(","new_app_metrics","or","{","}",",","end_time",",","db",")","if","new_route_metrics","is","not","None","and","new_app_metrics","is","not","None",":","visited_node_timestamps","[","node","]","=","end_time","redis_set_and_dump","(","redis",",","metrics_visited_nodes",",","json",".","dumps","(","visited_node_timestamps",")",")","# persist updated summed unique counts","persist_summed_unique_counts","(","db",",","end_time",",","summed_unique_daily_count",",","summed_unique_monthly_count",")","logger",".","info","(","f\"visited node timestamps: {visited_node_timestamps}\"",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/index_metrics.py#L199-L290"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/vacuum_db.py","language":"python","identifier":"vacuum_db","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Vacuum the db","docstring_summary":"Vacuum the db","docstring_tokens":["Vacuum","the","db"],"function":"def vacuum_db(self):\n \"\"\"Vacuum the db\"\"\"\n\n db = vacuum_db.db\n redis = vacuum_db.redis\n\n have_lock = False\n update_lock = redis.lock(\"vacuum_db\", timeout=3600)\n\n try:\n have_lock = update_lock.acquire(blocking=False)\n\n if have_lock:\n engine = db._engine\n with engine.connect().execution_options(\n isolation_level=\"AUTOCOMMIT\"\n ) as connection:\n connection.execute(\"VACUUM ANALYZE\")\n else:\n logger.info(\"vacuum_db.py | Failed to acquire lock\")\n except Exception as e:\n logger.error(\"vacuum_db.py | Fatal error in main loop\", exc_info=True)\n raise e\n finally:\n if have_lock:\n update_lock.release()","function_tokens":["def","vacuum_db","(","self",")",":","db","=","vacuum_db",".","db","redis","=","vacuum_db",".","redis","have_lock","=","False","update_lock","=","redis",".","lock","(","\"vacuum_db\"",",","timeout","=","3600",")","try",":","have_lock","=","update_lock",".","acquire","(","blocking","=","False",")","if","have_lock",":","engine","=","db",".","_engine","with","engine",".","connect","(",")",".","execution_options","(","isolation_level","=","\"AUTOCOMMIT\"",")","as","connection",":","connection",".","execute","(","\"VACUUM ANALYZE\"",")","else",":","logger",".","info","(","\"vacuum_db.py | Failed to acquire lock\"",")","except","Exception","as","e",":","logger",".","error","(","\"vacuum_db.py | Fatal error in main loop\"",",","exc_info","=","True",")","raise","e","finally",":","if","have_lock",":","update_lock",".","release","(",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/vacuum_db.py#L9-L34"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/tracks.py","language":"python","identifier":"track_state_update","parameters":"(\n self,\n update_task: DatabaseTask,\n session: Session,\n track_factory_txs,\n block_number,\n block_timestamp,\n block_hash,\n ipfs_metadata,\n blacklisted_cids,\n)","argument_list":"","return_statement":"return num_total_changes, track_ids","docstring":"Return tuple containing int representing number of Track model state changes found in transaction and set of processed track IDs.","docstring_summary":"Return tuple containing int representing number of Track model state changes found in transaction and set of processed track IDs.","docstring_tokens":["Return","tuple","containing","int","representing","number","of","Track","model","state","changes","found","in","transaction","and","set","of","processed","track","IDs","."],"function":"def track_state_update(\n self,\n update_task: DatabaseTask,\n session: Session,\n track_factory_txs,\n block_number,\n block_timestamp,\n block_hash,\n ipfs_metadata,\n blacklisted_cids,\n) -> Tuple[int, Set]:\n \"\"\"Return tuple containing int representing number of Track model state changes found in transaction and set of processed track IDs.\"\"\"\n blockhash = update_task.web3.toHex(block_hash)\n num_total_changes = 0\n skipped_tx_count = 0\n # This stores the track_ids created or updated in the set of transactions\n track_ids: Set[int] = set()\n\n if not track_factory_txs:\n return num_total_changes, track_ids\n\n pending_track_routes: List[TrackRoute] = []\n track_events: Dict[int, Dict[str, Any]] = {}\n for tx_receipt in track_factory_txs:\n txhash = update_task.web3.toHex(tx_receipt.transactionHash)\n for event_type in track_event_types_arr:\n track_events_tx = get_track_events_tx(update_task, event_type, tx_receipt)\n processedEntries = 0 # if record does not get added, do not count towards num_total_changes\n for entry in track_events_tx:\n event_args = entry[\"args\"]\n track_id = (\n helpers.get_tx_arg(entry, \"_trackId\")\n if \"_trackId\" in event_args\n else helpers.get_tx_arg(entry, \"_id\")\n )\n existing_track_record = None\n track_metadata = None\n try:\n # look up or populate existing record\n if track_id in track_events:\n existing_track_record = track_events[track_id][\"track\"]\n else:\n existing_track_record = lookup_track_record(\n update_task,\n session,\n entry,\n track_id,\n block_number,\n blockhash,\n txhash,\n )\n # parse track event to add metadata to record\n if event_type in [\n track_event_types_lookup[\"new_track\"],\n track_event_types_lookup[\"update_track\"],\n ]:\n track_metadata_digest = event_args._multihashDigest.hex()\n track_metadata_hash_fn = event_args._multihashHashFn\n buf = multihash.encode(\n bytes.fromhex(track_metadata_digest), track_metadata_hash_fn\n )\n cid = multihash.to_b58_string(buf)\n # do not process entry if cid is blacklisted\n if cid in blacklisted_cids:\n continue\n track_metadata = ipfs_metadata[cid]\n\n parsed_track = parse_track_event(\n self,\n session,\n update_task,\n entry,\n event_type,\n existing_track_record,\n block_number,\n block_timestamp,\n track_metadata,\n pending_track_routes,\n )\n\n # If track record object is None, it has a blacklisted metadata CID\n if parsed_track is not None:\n if track_id not in track_events:\n track_events[track_id] = {\n \"track\": parsed_track,\n \"events\": [],\n }\n else:\n track_events[track_id][\"track\"] = parsed_track\n track_events[track_id][\"events\"].append(event_type)\n track_ids.add(track_id)\n processedEntries += 1\n except EntityMissingRequiredFieldError as e:\n logger.warning(f\"Skipping tx {txhash} with error {e}\")\n skipped_tx_count += 1\n add_node_level_skipped_transaction(\n session, block_number, blockhash, txhash\n )\n pass\n except Exception as e:\n logger.info(\"Error in parse track transaction\")\n raise IndexingError(\n \"track\", block_number, blockhash, txhash, str(e)\n ) from e\n\n num_total_changes += processedEntries\n\n logger.info(\n f\"index.py | tracks.py | [track indexing] There are {num_total_changes} events processed and {skipped_tx_count} skipped transactions.\"\n )\n\n for track_id, value_obj in track_events.items():\n if value_obj[\"events\"]:\n logger.info(f\"index.py | tracks.py | Adding {value_obj['track']}\")\n invalidate_old_track(session, track_id)\n session.add(value_obj[\"track\"])\n\n return num_total_changes, track_ids","function_tokens":["def","track_state_update","(","self",",","update_task",":","DatabaseTask",",","session",":","Session",",","track_factory_txs",",","block_number",",","block_timestamp",",","block_hash",",","ipfs_metadata",",","blacklisted_cids",",",")","->","Tuple","[","int",",","Set","]",":","blockhash","=","update_task",".","web3",".","toHex","(","block_hash",")","num_total_changes","=","0","skipped_tx_count","=","0","# This stores the track_ids created or updated in the set of transactions","track_ids",":","Set","[","int","]","=","set","(",")","if","not","track_factory_txs",":","return","num_total_changes",",","track_ids","pending_track_routes",":","List","[","TrackRoute","]","=","[","]","track_events",":","Dict","[","int",",","Dict","[","str",",","Any","]","]","=","{","}","for","tx_receipt","in","track_factory_txs",":","txhash","=","update_task",".","web3",".","toHex","(","tx_receipt",".","transactionHash",")","for","event_type","in","track_event_types_arr",":","track_events_tx","=","get_track_events_tx","(","update_task",",","event_type",",","tx_receipt",")","processedEntries","=","0","# if record does not get added, do not count towards num_total_changes","for","entry","in","track_events_tx",":","event_args","=","entry","[","\"args\"","]","track_id","=","(","helpers",".","get_tx_arg","(","entry",",","\"_trackId\"",")","if","\"_trackId\"","in","event_args","else","helpers",".","get_tx_arg","(","entry",",","\"_id\"",")",")","existing_track_record","=","None","track_metadata","=","None","try",":","# look up or populate existing record","if","track_id","in","track_events",":","existing_track_record","=","track_events","[","track_id","]","[","\"track\"","]","else",":","existing_track_record","=","lookup_track_record","(","update_task",",","session",",","entry",",","track_id",",","block_number",",","blockhash",",","txhash",",",")","# parse track event to add metadata to record","if","event_type","in","[","track_event_types_lookup","[","\"new_track\"","]",",","track_event_types_lookup","[","\"update_track\"","]",",","]",":","track_metadata_digest","=","event_args",".","_multihashDigest",".","hex","(",")","track_metadata_hash_fn","=","event_args",".","_multihashHashFn","buf","=","multihash",".","encode","(","bytes",".","fromhex","(","track_metadata_digest",")",",","track_metadata_hash_fn",")","cid","=","multihash",".","to_b58_string","(","buf",")","# do not process entry if cid is blacklisted","if","cid","in","blacklisted_cids",":","continue","track_metadata","=","ipfs_metadata","[","cid","]","parsed_track","=","parse_track_event","(","self",",","session",",","update_task",",","entry",",","event_type",",","existing_track_record",",","block_number",",","block_timestamp",",","track_metadata",",","pending_track_routes",",",")","# If track record object is None, it has a blacklisted metadata CID","if","parsed_track","is","not","None",":","if","track_id","not","in","track_events",":","track_events","[","track_id","]","=","{","\"track\"",":","parsed_track",",","\"events\"",":","[","]",",","}","else",":","track_events","[","track_id","]","[","\"track\"","]","=","parsed_track","track_events","[","track_id","]","[","\"events\"","]",".","append","(","event_type",")","track_ids",".","add","(","track_id",")","processedEntries","+=","1","except","EntityMissingRequiredFieldError","as","e",":","logger",".","warning","(","f\"Skipping tx {txhash} with error {e}\"",")","skipped_tx_count","+=","1","add_node_level_skipped_transaction","(","session",",","block_number",",","blockhash",",","txhash",")","pass","except","Exception","as","e",":","logger",".","info","(","\"Error in parse track transaction\"",")","raise","IndexingError","(","\"track\"",",","block_number",",","blockhash",",","txhash",",","str","(","e",")",")","from","e","num_total_changes","+=","processedEntries","logger",".","info","(","f\"index.py | tracks.py | [track indexing] There are {num_total_changes} events processed and {skipped_tx_count} skipped transactions.\"",")","for","track_id",",","value_obj","in","track_events",".","items","(",")",":","if","value_obj","[","\"events\"","]",":","logger",".","info","(","f\"index.py | tracks.py | Adding {value_obj['track']}\"",")","invalidate_old_track","(","session",",","track_id",")","session",".","add","(","value_obj","[","\"track\"","]",")","return","num_total_changes",",","track_ids"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/tracks.py#L25-L142"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/tracks.py","language":"python","identifier":"update_track_routes_table","parameters":"(\n session, track_record, track_metadata, pending_track_routes\n)","argument_list":"","return_statement":"","docstring":"Creates the route for the given track","docstring_summary":"Creates the route for the given track","docstring_tokens":["Creates","the","route","for","the","given","track"],"function":"def update_track_routes_table(\n session, track_record, track_metadata, pending_track_routes\n):\n \"\"\"Creates the route for the given track\"\"\"\n\n # Check if the title is staying the same, and if so, return early\n if track_record.title == track_metadata[\"title\"]:\n return\n\n # Get the title slug, and set the new slug to that\n # (will check for conflicts later)\n new_track_slug_title = helpers.create_track_slug(\n track_metadata[\"title\"], track_record.track_id\n )\n new_track_slug = new_track_slug_title\n\n # Find the current route for the track\n # Check the pending track route updates first\n prev_track_route_record = next(\n (\n route\n for route in pending_track_routes\n if route.is_current and route.track_id == track_record.track_id\n ),\n None,\n )\n # Then query the DB if necessary\n if prev_track_route_record is None:\n prev_track_route_record = (\n session.query(TrackRoute)\n .filter(\n TrackRoute.track_id == track_record.track_id,\n TrackRoute.is_current == True,\n ) # noqa: E712\n .one_or_none()\n )\n\n if prev_track_route_record is not None:\n if prev_track_route_record.title_slug == new_track_slug_title:\n # If the title slug hasn't changed, we have no work to do\n return\n # The new route will be current\n prev_track_route_record.is_current = False\n\n # Check for collisions by slug titles, and get the max collision_id\n max_collision_id: Optional[int] = None\n # Check pending updates first\n for route in pending_track_routes:\n if (\n route.title_slug == new_track_slug_title\n and route.owner_id == track_record.owner_id\n ):\n max_collision_id = (\n route.collision_id\n if max_collision_id is None\n else max(max_collision_id, route.collision_id)\n )\n # Check DB if necessary\n if max_collision_id is None:\n max_collision_id = (\n session.query(functions.max(TrackRoute.collision_id))\n .filter(\n TrackRoute.title_slug == new_track_slug_title,\n TrackRoute.owner_id == track_record.owner_id,\n )\n .one_or_none()\n )[0]\n\n existing_track_route: Optional[TrackRoute] = None\n # If the new track_slug ends in a digit, there's a possibility it collides\n # with an existing route when the collision_id is appended to its title_slug\n if new_track_slug[-1].isdigit():\n existing_track_route = next(\n (\n route\n for route in pending_track_routes\n if route.slug == new_track_slug\n and route.owner_id == track_record.owner_id\n ),\n None,\n )\n if existing_track_route is None:\n existing_track_route = (\n session.query(TrackRoute)\n .filter(\n TrackRoute.slug == new_track_slug,\n TrackRoute.owner_id == track_record.owner_id,\n )\n .one_or_none()\n )\n\n new_collision_id = 0\n has_collisions = existing_track_route is not None\n\n if max_collision_id is not None:\n has_collisions = True\n new_collision_id = max_collision_id\n while has_collisions:\n # If there is an existing track by the user with that slug,\n # then we need to append the collision number to the slug\n new_collision_id += 1\n new_track_slug = helpers.create_track_slug(\n track_metadata[\"title\"], track_record.track_id, new_collision_id\n )\n\n # Check for new collisions after making the new slug\n # In rare cases the user may have track names that end in numbers that\n # conflict with this track name when the collision id is appended,\n # for example they could be trying to create a route that conflicts\n # with the old routing (of appending -{track_id}) This is a fail safe\n # to increment the collision ID until no such collisions are present.\n #\n # Example scenario:\n # - User uploads track titled \"Track\" (title_slug: 'track')\n # - User uploads track titled \"Track 1\" (title_slug: 'track-1')\n # - User uploads track titled \"Track\" (title_slug: 'track')\n # - Try collision_id: 1, slug: 'track-1' and find new collision\n # - Use collision_id: 2, slug: 'track-2'\n # - User uploads track titled \"Track\" (title_slug: 'track')\n # - Use collision_id: 3, slug: 'track-3'\n # - User uploads track titled \"Track 1\" (title_slug: 'track-1')\n # - Use collision_id: 1, slug: 'track-1-1'\n #\n # This may be expensive with many collisions, but should be rare.\n existing_track_route = next(\n (\n route\n for route in pending_track_routes\n if route.slug == new_track_slug\n and route.owner_id == track_record.owner_id\n ),\n None,\n )\n if existing_track_route is None:\n existing_track_route = (\n session.query(TrackRoute)\n .filter(\n TrackRoute.slug == new_track_slug,\n TrackRoute.owner_id == track_record.owner_id,\n )\n .one_or_none()\n )\n has_collisions = existing_track_route is not None\n\n # Add the new track route\n new_track_route = TrackRoute()\n new_track_route.slug = new_track_slug\n new_track_route.title_slug = new_track_slug_title\n new_track_route.collision_id = new_collision_id\n new_track_route.owner_id = track_record.owner_id\n new_track_route.track_id = track_record.track_id\n new_track_route.is_current = True\n new_track_route.blockhash = track_record.blockhash\n new_track_route.blocknumber = track_record.blocknumber\n new_track_route.txhash = track_record.txhash\n session.add(new_track_route)\n\n # Add to pending track routes so we don't add the same route twice\n pending_track_routes.append(new_track_route)","function_tokens":["def","update_track_routes_table","(","session",",","track_record",",","track_metadata",",","pending_track_routes",")",":","# Check if the title is staying the same, and if so, return early","if","track_record",".","title","==","track_metadata","[","\"title\"","]",":","return","# Get the title slug, and set the new slug to that","# (will check for conflicts later)","new_track_slug_title","=","helpers",".","create_track_slug","(","track_metadata","[","\"title\"","]",",","track_record",".","track_id",")","new_track_slug","=","new_track_slug_title","# Find the current route for the track","# Check the pending track route updates first","prev_track_route_record","=","next","(","(","route","for","route","in","pending_track_routes","if","route",".","is_current","and","route",".","track_id","==","track_record",".","track_id",")",",","None",",",")","# Then query the DB if necessary","if","prev_track_route_record","is","None",":","prev_track_route_record","=","(","session",".","query","(","TrackRoute",")",".","filter","(","TrackRoute",".","track_id","==","track_record",".","track_id",",","TrackRoute",".","is_current","==","True",",",")","# noqa: E712",".","one_or_none","(",")",")","if","prev_track_route_record","is","not","None",":","if","prev_track_route_record",".","title_slug","==","new_track_slug_title",":","# If the title slug hasn't changed, we have no work to do","return","# The new route will be current","prev_track_route_record",".","is_current","=","False","# Check for collisions by slug titles, and get the max collision_id","max_collision_id",":","Optional","[","int","]","=","None","# Check pending updates first","for","route","in","pending_track_routes",":","if","(","route",".","title_slug","==","new_track_slug_title","and","route",".","owner_id","==","track_record",".","owner_id",")",":","max_collision_id","=","(","route",".","collision_id","if","max_collision_id","is","None","else","max","(","max_collision_id",",","route",".","collision_id",")",")","# Check DB if necessary","if","max_collision_id","is","None",":","max_collision_id","=","(","session",".","query","(","functions",".","max","(","TrackRoute",".","collision_id",")",")",".","filter","(","TrackRoute",".","title_slug","==","new_track_slug_title",",","TrackRoute",".","owner_id","==","track_record",".","owner_id",",",")",".","one_or_none","(",")",")","[","0","]","existing_track_route",":","Optional","[","TrackRoute","]","=","None","# If the new track_slug ends in a digit, there's a possibility it collides","# with an existing route when the collision_id is appended to its title_slug","if","new_track_slug","[","-","1","]",".","isdigit","(",")",":","existing_track_route","=","next","(","(","route","for","route","in","pending_track_routes","if","route",".","slug","==","new_track_slug","and","route",".","owner_id","==","track_record",".","owner_id",")",",","None",",",")","if","existing_track_route","is","None",":","existing_track_route","=","(","session",".","query","(","TrackRoute",")",".","filter","(","TrackRoute",".","slug","==","new_track_slug",",","TrackRoute",".","owner_id","==","track_record",".","owner_id",",",")",".","one_or_none","(",")",")","new_collision_id","=","0","has_collisions","=","existing_track_route","is","not","None","if","max_collision_id","is","not","None",":","has_collisions","=","True","new_collision_id","=","max_collision_id","while","has_collisions",":","# If there is an existing track by the user with that slug,","# then we need to append the collision number to the slug","new_collision_id","+=","1","new_track_slug","=","helpers",".","create_track_slug","(","track_metadata","[","\"title\"","]",",","track_record",".","track_id",",","new_collision_id",")","# Check for new collisions after making the new slug","# In rare cases the user may have track names that end in numbers that","# conflict with this track name when the collision id is appended,","# for example they could be trying to create a route that conflicts","# with the old routing (of appending -{track_id}) This is a fail safe","# to increment the collision ID until no such collisions are present.","#","# Example scenario:","# - User uploads track titled \"Track\" (title_slug: 'track')","# - User uploads track titled \"Track 1\" (title_slug: 'track-1')","# - User uploads track titled \"Track\" (title_slug: 'track')","# - Try collision_id: 1, slug: 'track-1' and find new collision","# - Use collision_id: 2, slug: 'track-2'","# - User uploads track titled \"Track\" (title_slug: 'track')","# - Use collision_id: 3, slug: 'track-3'","# - User uploads track titled \"Track 1\" (title_slug: 'track-1')","# - Use collision_id: 1, slug: 'track-1-1'","#","# This may be expensive with many collisions, but should be rare.","existing_track_route","=","next","(","(","route","for","route","in","pending_track_routes","if","route",".","slug","==","new_track_slug","and","route",".","owner_id","==","track_record",".","owner_id",")",",","None",",",")","if","existing_track_route","is","None",":","existing_track_route","=","(","session",".","query","(","TrackRoute",")",".","filter","(","TrackRoute",".","slug","==","new_track_slug",",","TrackRoute",".","owner_id","==","track_record",".","owner_id",",",")",".","one_or_none","(",")",")","has_collisions","=","existing_track_route","is","not","None","# Add the new track route","new_track_route","=","TrackRoute","(",")","new_track_route",".","slug","=","new_track_slug","new_track_route",".","title_slug","=","new_track_slug_title","new_track_route",".","collision_id","=","new_collision_id","new_track_route",".","owner_id","=","track_record",".","owner_id","new_track_route",".","track_id","=","track_record",".","track_id","new_track_route",".","is_current","=","True","new_track_route",".","blockhash","=","track_record",".","blockhash","new_track_route",".","blocknumber","=","track_record",".","blocknumber","new_track_route",".","txhash","=","track_record",".","txhash","session",".","add","(","new_track_route",")","# Add to pending track routes so we don't add the same route twice","pending_track_routes",".","append","(","new_track_route",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/tracks.py#L231-L389"} {"nwo":"AudiusProject\/audius-protocol","sha":"4d1fbe27b5b283befbda7b74dc956e7dd3a52b19","path":"discovery-provider\/src\/tasks\/cache_trending_playlists.py","language":"python","identifier":"cache_trending_playlists","parameters":"(self)","argument_list":"","return_statement":"","docstring":"Caches trending playlists for time period","docstring_summary":"Caches trending playlists for time period","docstring_tokens":["Caches","trending","playlists","for","time","period"],"function":"def cache_trending_playlists(self):\n \"\"\"Caches trending playlists for time period\"\"\"\n\n db = cache_trending_playlists.db\n redis = cache_trending_playlists.redis\n\n have_lock = False\n update_lock = redis.lock(\"cache_trending_playlists_lock\", timeout=7200)\n\n try:\n have_lock = update_lock.acquire(blocking=False)\n\n if have_lock:\n trending_playlist_versions = (\n trending_strategy_factory.get_versions_for_type(\n TrendingType.PLAYLISTS\n ).keys()\n )\n for version in trending_playlist_versions:\n logger.info(\n f\"cache_trending_playlists.py ({version.name} version) | Starting\"\n )\n strategy = trending_strategy_factory.get_strategy(\n TrendingType.PLAYLISTS, version\n )\n start_time = time.time()\n cache_trending(db, redis, strategy)\n end_time = time.time()\n logger.info(\n f\"cache_trending_playlists.py ({version.name} version) | \\\n Finished in {end_time - start_time} seconds\"\n )\n redis.set(trending_playlists_last_completion_redis_key, int(end_time))\n else:\n logger.info(\"cache_trending_playlists.py | Failed to acquire lock\")\n except Exception as e:\n logger.error(\n \"cache_trending_playlists.py | Fatal error in main loop\", exc_info=True\n )\n raise e\n finally:\n if have_lock:\n update_lock.release()","function_tokens":["def","cache_trending_playlists","(","self",")",":","db","=","cache_trending_playlists",".","db","redis","=","cache_trending_playlists",".","redis","have_lock","=","False","update_lock","=","redis",".","lock","(","\"cache_trending_playlists_lock\"",",","timeout","=","7200",")","try",":","have_lock","=","update_lock",".","acquire","(","blocking","=","False",")","if","have_lock",":","trending_playlist_versions","=","(","trending_strategy_factory",".","get_versions_for_type","(","TrendingType",".","PLAYLISTS",")",".","keys","(",")",")","for","version","in","trending_playlist_versions",":","logger",".","info","(","f\"cache_trending_playlists.py ({version.name} version) | Starting\"",")","strategy","=","trending_strategy_factory",".","get_strategy","(","TrendingType",".","PLAYLISTS",",","version",")","start_time","=","time",".","time","(",")","cache_trending","(","db",",","redis",",","strategy",")","end_time","=","time",".","time","(",")","logger",".","info","(","f\"cache_trending_playlists.py ({version.name} version) | \\\n Finished in {end_time - start_time} seconds\"",")","redis",".","set","(","trending_playlists_last_completion_redis_key",",","int","(","end_time",")",")","else",":","logger",".","info","(","\"cache_trending_playlists.py | Failed to acquire lock\"",")","except","Exception","as","e",":","logger",".","error","(","\"cache_trending_playlists.py | Fatal error in main loop\"",",","exc_info","=","True",")","raise","e","finally",":","if","have_lock",":","update_lock",".","release","(",")"],"url":"https:\/\/github.com\/AudiusProject\/audius-protocol\/blob\/4d1fbe27b5b283befbda7b74dc956e7dd3a52b19\/discovery-provider\/src\/tasks\/cache_trending_playlists.py#L30-L72"}