diff --git "a/custom_index/docstore.json" "b/custom_index/docstore.json" --- "a/custom_index/docstore.json" +++ "b/custom_index/docstore.json" @@ -1 +1 @@ -{"docstore/metadata": {"cf01bf3c-485c-4cc7-8dee-d50e81ed1f44": {"doc_hash": "11c8a3d01644ae0e8e8d6ff334d5f6546809c9553f532379a424055b9d2180e2"}, "da0f0d1b-08a7-404e-971f-d4ac86f075c1": {"doc_hash": "39233ab9964ec63ed25c7b38225e24fa8dd469d5b6424a91d03b0aeb40f84b69"}, "a1f841c7-791b-4aa2-925a-4baae7050de3": {"doc_hash": "caf398a52bcc68240a58e796587532bd0f4a4d87fb673d1c07323be7deb7246c"}, "3263090c-90e3-4b9d-b402-69d662046a2f": {"doc_hash": "03173012074ee8c8aa5aa5c45c0597ec570c256c427f4f26aedc9cbfb4878045"}, "23ca55f5-7668-4c89-a498-8b346fb59b6b": {"doc_hash": "25a000acde2316443d608565399296dc71d4ccafe753fc2303632501145e19f2"}, "6a334eef-b779-483c-b985-4dc24e4a4a40": {"doc_hash": "21cc94a20e40b4e0507178618ddae4bf7e7561cbded605a5704620d292c463da"}, "30c34778-fd84-4247-a8e8-951e5f41fd1b": {"doc_hash": "4a573f135fd6e0a296538270df39d45736b84875c1602c0db0139321f7e539ec"}, "e0d7717d-cfe0-4212-9290-dfa3afea7aae": {"doc_hash": "9a57af184d633ae88d28a9adf02309b9bec7408e52023bc5e2801fb60d74ef6e"}, "4e3755a0-2c19-42b5-b693-15c44d7d49a9": {"doc_hash": "7974425cbb776830c95e0300eafc863c309fde76b1a9e1f9dd231e7006b276f9"}, "29fad43a-5695-4286-bc25-eff91020241f": {"doc_hash": "f48079b147206e24f1a603002ca88b8dd9ee1cedd13676fe17fa8d88d312fecf"}, "35ff0f02-f8a3-4f4b-9b80-c52d809439ef": {"doc_hash": "02914b5015ef0db3eb6a652f9ff2edd193274fb65eea63ef0bf624d76079328e"}, "a99b8daa-2f23-425d-86bd-5a50f12846ec": {"doc_hash": "63af798fef7b1598e4ad2b16e9e40b34c2a978d585c6f3e831a52bee98a2c97c"}, "e2a0dbaa-daf2-400a-8a52-560293fb0472": {"doc_hash": "d100d58e1005fb2c658803507071ecabcfe9ff3f9f7a07fc712974f29ac6a067"}, "fb50b1ba-bc1b-4d74-951a-22d5167d5ffb": {"doc_hash": "a8f60bb292a6a8a5a8797722564372a56ffb6de873685c2bc9b524a068f8aafa"}, "ae82459d-41ef-4ac9-9820-3eb69e0be922": {"doc_hash": "aee6cf38a4e5eabdb63c856c12d762982faa68814c4c26a216e7e82966f4b677"}, "e77f0ca8-a8fa-47af-85cd-aad9ca3cc25e": {"doc_hash": "2f7788aa4b917d49a21e64d362ffc8d6188998d57f3ba44afd05a23057110c4d"}, "1dcb5cb9-fe5d-4ddc-85cd-2e85f79f60f7": {"doc_hash": "5de25c19315462d3549f5e784b49d5fe540f2dd41be784136ea8f183d044841e"}, "f53299ea-9b05-4449-a158-4c5ea426befc": {"doc_hash": "d167f69365feeebf08d4e5311c0293ff15b57098d968240bfde1be734a32b9a1"}, "495e4cc2-2841-4f5f-81da-e858fa85f927": {"doc_hash": "82b05d0d1fd143fc24f18c451af7e1ea6dd510b3940358ac556bdeba9575a469"}, "730f6711-eabb-4590-9fab-575896022f6c": {"doc_hash": "3f51f64cdd0021155737e7826c409d607939518f65e164e3ee555f38594170fc"}, "f3cb11e5-8af0-4129-9192-4b58bde83b30": {"doc_hash": "a5553ea6ed07b8e093c44b85d94740f40d50a4c641b848483c962fff81cd2291"}, "56f26472-42eb-4429-b4c7-024d54499981": {"doc_hash": "4ad5c5d20a0766e3c50e1d4af23dfb8e23882a51c39751faad138e202e6a1fb4"}, "014f9f76-25c6-49c0-9684-97ff451b3a3f": {"doc_hash": "15672c9e516b1d378e4664126899f48bc6dc97265e02e44fa6195f7c530592fd"}, "14282b74-c50d-4960-bd8a-324dbd31df6c": {"doc_hash": "44b70d9dd3875a1574c410efe32c278ab0e1181c77c9ec481e2b18c5e3b978a0"}, "b0861860-7803-4610-8cec-2a5dd7c79879": {"doc_hash": "ecc43c6e256acb254beef7108c49b128ae23445c3a0e57659376c374f0094682"}, "290c3c9f-bcba-49f3-9af1-8e83a8b02247": {"doc_hash": "0243dbf30e08de942f6fc080f3e64eedaecc3d1d0c734f189cb27f92d1ef18c6"}, "9d6248bd-b296-4a6d-9600-3d026b72246a": {"doc_hash": "62c283b3781b3eb1252f949a458c78ef86493d1e5524328b881d9b326d7a80ac"}, "7565958f-8cee-4307-9686-729fbd2fa991": {"doc_hash": "06a251c9de1a6d9955e088165ab5bd81d8f6f9a71b1e68acf034c38b64d05878"}, "7173d059-7feb-4711-a7ed-cbaf25bd35b1": {"doc_hash": "b0385e450f438088394742c5e283c6ae66d5d576d1d4abf3491473344c886b57"}, "dead2a62-5d5f-4ac8-b2e5-dfa16f691ab0": {"doc_hash": "710cb5ff4959b9c99a37e3173bbab0cd10febb03f71cae910fd6e6acecc99a53"}, "cdee50c4-5767-4a9d-9c14-65222d9a2c67": {"doc_hash": "53ef4c20e509492cc140d37adee597ff6cd68aed6522a0a503af2eee93b5d6db"}, "1eb867f6-2334-4615-9360-df36808375eb": {"doc_hash": "0165fd3447625d17d4424f764da78f21b9aab377a01e12e0ae99a474bdcca254"}, "681b072c-d4ae-407a-b4d2-2e37d084b018": {"doc_hash": "22cd93d4a5ea32e00f61c8b2fc56465b6622bbb9cc3219da94235742c5c249dc"}, "859b3475-3d01-499b-9e73-8c43546ea104": {"doc_hash": "8277539a9bb2bc291202182cd75f19bcdff308635b52308fc69e837d85013297"}, "40514f02-0035-4454-bc49-ddb28d097eee": {"doc_hash": "864598e743cc8b86a13d888c7f1fe9f382b30358a7cb9f69741e3d9ed7e533bf"}, "610d896b-01cd-43ed-a0e6-3847712913a6": {"doc_hash": "c14210eb722eadddb07bb607a0a10354c9147de1afbed69ef7eebfb20d641da7"}, "be623874-dc7e-4bcd-8475-12c168009c09": {"doc_hash": "c2df2dcb54ce0499daf86ff9549dbd4f0bbdb90573278a8d17f7b8b45eb9301f"}, "0a852b0e-e292-450d-b078-23d3d016ca1b": {"doc_hash": "70254454ab988b1a5a2393bcda5e976341fb56875518f7a867588d109b8f7365"}, "5fe7fedf-099f-4582-a322-79c0ef11a5a0": {"doc_hash": "bbedaa482fd33c1a4c7c8299516043ed80c0df6cebbea56f9472fcc012c8900c"}, "ff41215a-c415-4483-93eb-95855177d748": {"doc_hash": "f8e8c420fc0e4c1b5ef6663c7468203d1e2a7c84037324476427e0427a42a4ea"}, "5a95b011-603b-4da0-8be7-a49d329ac9d4": {"doc_hash": "a8b06d28c38dad7b3eeb48534f0f6dd917aa3349a031c7bf425429fd635407d3"}, "b00f5eab-fd70-4b2e-85d3-7869e449d678": {"doc_hash": "b51f6693e2d1f769e18a27b56001b30d395b8eeaacf37fcff15cdcc858244c91"}, "048dd051-017e-4c75-8cc8-56155e8a7606": {"doc_hash": "d2baa62743b351ba26cd4e7fdac22c4c69777d50fd09182b343bd5c24b7c7286"}, "eb12e17e-9db6-4386-a3eb-4a56c58432a8": {"doc_hash": "55541680862a5c09edfb82df46672d43898976f892f04b52241020c46998f67e"}, "1ac83d1c-fd83-4278-8d24-a5d0959792ff": {"doc_hash": "bceeaf74bbe465a1db4d83091b131127e70915875abf2743d5784551523b8ddb"}, "601d1b58-4d75-440e-94b7-fb7ca8fc8e1e": {"doc_hash": "687e845953e6f61280551d93d2040d81948690f1932acc74aac2afa24f3c62fa"}, "70cc21b2-2872-44f5-b6d4-d263796db69a": {"doc_hash": "a9e4c033eccff1f4e383d17e78f9f752da3a5aaa165f444af85d153aff779acf"}, "3e952d87-b8e5-461b-b7e8-da3c8d8a509d": {"doc_hash": "e4a007ae349c076b957891f562c88e341d8c0e16c5ce8eaf008e24e30dd3c3cb"}, "e241b9f8-0862-48b2-9782-788d8006a633": {"doc_hash": "a4b8e0615edc8fa500aef85e9a3312b1119c7d2089b7fcdb874d64ed2cbfaaa4"}, "3e66fc81-176b-49c9-b86b-24852aa480b7": {"doc_hash": "93f99800f617b1f2da213fdf9b225cde4e61cac098a8a97c00f2c16f00a0c6fe", "ref_doc_id": "cf01bf3c-485c-4cc7-8dee-d50e81ed1f44"}, "e62f4e51-0196-4a3a-9ff5-7d0ac4f08d88": {"doc_hash": "13a4d07c6601b199c952fa3e10f1c57ed71b110800a7ab248ab3f76896c1a92f", "ref_doc_id": "cf01bf3c-485c-4cc7-8dee-d50e81ed1f44"}, "6f26c18a-9aca-461e-99c7-ef94b56b145d": {"doc_hash": "d8c22256ba38c7cae13e9ebfbfc7e61ec63653e5e3746db2f0d2c516bff1a693", "ref_doc_id": "cf01bf3c-485c-4cc7-8dee-d50e81ed1f44"}, "9fbf274d-45e6-4727-a45f-aa4ad82e3a07": {"doc_hash": "72365937a9e180a8f941c929cf7102778bbddd770d6803eebd038fe44f546e21", "ref_doc_id": "cf01bf3c-485c-4cc7-8dee-d50e81ed1f44"}, "149632c1-0f93-4a36-aa68-a8121850fa4a": {"doc_hash": "5f778905c3cbbf686780fb0f98b5607f84dce7aed5ff294d5ac442c4ba172c9f", "ref_doc_id": "da0f0d1b-08a7-404e-971f-d4ac86f075c1"}, "5ab25872-e533-42d3-a732-7008791988df": {"doc_hash": "2a1ca63c0aac62f04fc93ce637cb30109dd0c5e1a422e1de8c7f7b8838a6dac9", "ref_doc_id": "da0f0d1b-08a7-404e-971f-d4ac86f075c1"}, "8dccc7fc-46fe-45a1-84b8-d6f1dbfcc380": {"doc_hash": "f2582c82ab94a469fce476c6d34834ea603c9caf326411a06582dfe68c6a3b9b", "ref_doc_id": "da0f0d1b-08a7-404e-971f-d4ac86f075c1"}, "cf93ae05-6a0e-4a29-8c55-9a84241ae99b": {"doc_hash": "b81ebf5c42b49f9c4e56ee9c5b3e54c1072c4d70c58cc23ae7dc1b999dd5f332", "ref_doc_id": "da0f0d1b-08a7-404e-971f-d4ac86f075c1"}, "3208e4e4-6c7f-4aa7-b717-c3f544ddd70e": {"doc_hash": "986b646d1baeb6573ca4a8777f5dea57c7552d03f3d3839ff2a3e676af18a9fd", "ref_doc_id": "a1f841c7-791b-4aa2-925a-4baae7050de3"}, "fdd0a876-3845-448d-936c-2c6e66149219": {"doc_hash": "9ae21d22ef1682ea6b13093839fecb566b1d0f71b600ba7f7272b924aa897329", "ref_doc_id": "a1f841c7-791b-4aa2-925a-4baae7050de3"}, "9ae16e2b-7abd-417e-b390-4d50d52a7ae1": {"doc_hash": "61f87d7b0b963778089600deab180d527d8511d837bc4b0721148aa5210e49f4", "ref_doc_id": "a1f841c7-791b-4aa2-925a-4baae7050de3"}, "e800fd76-a7cd-4f5a-9cde-883692f09be1": {"doc_hash": "cc6f6e2ca34d7081e35822ba57b4d88d57877a943194651c3b394d1966e4835a", "ref_doc_id": "a1f841c7-791b-4aa2-925a-4baae7050de3"}, "b5dfda59-5a79-44a9-96b9-8abe4da3d725": {"doc_hash": "7288d30959dcc5c0df03770a141639f20853df32a4288d744923b2bed7b0a7bd", "ref_doc_id": "a1f841c7-791b-4aa2-925a-4baae7050de3"}, "7135cb16-0f14-40f0-8b27-3b0624544a2e": {"doc_hash": "0a7fe876d9988586b92f9ce133b4007c8588478661a05a9a8635f005d9daa779", "ref_doc_id": "a1f841c7-791b-4aa2-925a-4baae7050de3"}, "f503b09a-2136-4927-b4b4-4562aedd74db": {"doc_hash": "24e1310dfcb3b63821d28da7b093b2f77d9a9e1cb0f11c9d1c7aae19f09e7131", "ref_doc_id": "3263090c-90e3-4b9d-b402-69d662046a2f"}, "75f0b359-23e9-4147-81a2-c6dd02c273b6": {"doc_hash": "71566724382835eac9a42fd96426f3c26e521e046a48c0eec634b0a3b085d720", "ref_doc_id": "3263090c-90e3-4b9d-b402-69d662046a2f"}, "b9f23634-baae-42a9-94a0-2c41e14a6959": {"doc_hash": "9610d5ab293d8ef77282b43598950bcf78afc6c1bd2edf57616b46be18878dfe", "ref_doc_id": "3263090c-90e3-4b9d-b402-69d662046a2f"}, "cc641909-3146-4abe-85a0-ec8f94a34287": {"doc_hash": "cd09e3478febe75da8c8f405179c2767f096ec5c13bb8285c62251f648d63ca4", "ref_doc_id": "3263090c-90e3-4b9d-b402-69d662046a2f"}, "cfd15aea-e645-44ec-bcf9-41c9d785616d": {"doc_hash": "e11fd43e67192c1ef152165a8d046c2fe84613463fda4bd32bb289ed9ffd539d", "ref_doc_id": "3263090c-90e3-4b9d-b402-69d662046a2f"}, "5973208a-25fa-4746-8bea-376b878de666": {"doc_hash": "6c02e7e04e388f89ca606bc185649c73bbee934f6535f03888b58faf7f04f54a", "ref_doc_id": "23ca55f5-7668-4c89-a498-8b346fb59b6b"}, "ef9507bd-7173-4e94-8300-74eb90b374b3": {"doc_hash": "29d3fb9cb703e19c92142732908cb048af875dde00995ded4ae5bb12e1656378", "ref_doc_id": "23ca55f5-7668-4c89-a498-8b346fb59b6b"}, "a0415ba1-08ac-4705-83e9-b34878db7c1b": {"doc_hash": "1ece6ffec5d12b6c35f00c8f3a404bcde655f8b045a08d743f0c68ae43b6ec37", "ref_doc_id": "23ca55f5-7668-4c89-a498-8b346fb59b6b"}, "e0a5c38c-26f2-4cb5-a622-2d5c48031963": {"doc_hash": "f7fe375c3075b31f905f0a007ae373f208fc1911973946ae0f3b6b2e5b6b46d3", "ref_doc_id": "23ca55f5-7668-4c89-a498-8b346fb59b6b"}, "14f37a90-740e-4e46-bfb7-83c4f2a3a9d1": {"doc_hash": "445144bbf3f448b5017cf083b0c9c5ddf9bd30f5e3a358f46c957dfd4a9adb66", "ref_doc_id": "6a334eef-b779-483c-b985-4dc24e4a4a40"}, "e48ccbf0-faff-42de-b267-937dfdc4c764": {"doc_hash": "d42b05654c289d0a74bf17516c35f707aa81332320e4b1d0dc36f8b2ab0db24c", "ref_doc_id": "6a334eef-b779-483c-b985-4dc24e4a4a40"}, "d1ce3399-b5f0-4589-8650-afe723450e88": {"doc_hash": "3bbe42f899cd42a3b36fed1f0cd1f5959f8ab62aab004c613c81525289e36705", "ref_doc_id": "6a334eef-b779-483c-b985-4dc24e4a4a40"}, "c6a8b18c-7194-4e38-bf40-84052e54e7e5": {"doc_hash": "c38218ed217586747d53f23357e6513dfd863be98fec16c00716dda1f0909852", "ref_doc_id": "6a334eef-b779-483c-b985-4dc24e4a4a40"}, "bf82ac3e-224b-429d-b939-e1097751eb94": {"doc_hash": "7b8574c8167e0b4798acf05b91d6c90d38c409534db4ca28d66a2129b4b31761", "ref_doc_id": "6a334eef-b779-483c-b985-4dc24e4a4a40"}, "30c02805-d52d-4048-8ddf-663b9e4d088c": {"doc_hash": "08409887034256735afb66265b20014a2c553d2440448a499f2afead147a4179", "ref_doc_id": "30c34778-fd84-4247-a8e8-951e5f41fd1b"}, "a45cc988-0ef5-42ce-a33b-73c13e09396e": {"doc_hash": "b98d367a32e513826ef13cf5772d9913aad4856685b0e4dbcb17fc1fca8f575f", "ref_doc_id": "30c34778-fd84-4247-a8e8-951e5f41fd1b"}, "4453f417-2325-426f-b119-22f38fcafaed": {"doc_hash": "a6be382651453303d4cfa2d0e26e2abaa22e6430dc1900ff3609de8bc561115c", "ref_doc_id": "30c34778-fd84-4247-a8e8-951e5f41fd1b"}, "3b75c8c1-58b1-4b1e-a6f8-d371dd394311": {"doc_hash": "5894fb3a8b3f8ff1b67d1ba6863180330bd38fd8e70445c7f3a2861a40673699", "ref_doc_id": "30c34778-fd84-4247-a8e8-951e5f41fd1b"}, "b7603c42-6086-41f6-95ab-fceff7e81d7c": {"doc_hash": "e233a82e4a3a2abc95aac32437a367e01d882ef210ce1d46b944214b2bac1b2b", "ref_doc_id": "30c34778-fd84-4247-a8e8-951e5f41fd1b"}, "7343c44f-ee68-4b65-88aa-cd75f706a7e6": {"doc_hash": "d4ed1436d9eb53c2b63a75b5f20e4cf54fff666946a22ef7351b5625f648dc7f", "ref_doc_id": "e0d7717d-cfe0-4212-9290-dfa3afea7aae"}, "bad287bc-422f-4d8c-93b2-9c2d64e8e462": {"doc_hash": "27a88a0c05fc44a9f06f7e4f9fee823623b521c17a026caba9c47e72e4739f06", "ref_doc_id": "e0d7717d-cfe0-4212-9290-dfa3afea7aae"}, "7a3a29a2-2efe-42b9-83ef-68d88625774d": {"doc_hash": "1042c875d48c52781ee5e9756be8c22d2b4a77c189140b5f5c3d9324d14a3812", "ref_doc_id": "e0d7717d-cfe0-4212-9290-dfa3afea7aae"}, "8b5e7d90-37d6-401c-aafe-ec7c32c0becc": {"doc_hash": "759cd7e7054a33ecfc4d858a46df70fb86240b846dc44cecb4b79e5a8c9c881f", "ref_doc_id": "e0d7717d-cfe0-4212-9290-dfa3afea7aae"}, "f2886304-2409-4824-bbd0-bd1311c0ddbd": {"doc_hash": "6f4e234b60be15b535b902507d9f9da6cb1305000e641be010a7f16f0842ba17", "ref_doc_id": "e0d7717d-cfe0-4212-9290-dfa3afea7aae"}, "d8ee0993-1f35-4e2a-b7df-df1d0fed634c": {"doc_hash": "59e1017bb3617e266d50079eebd1c21854507ee0459ab134930036eeb6fa048d", "ref_doc_id": "e0d7717d-cfe0-4212-9290-dfa3afea7aae"}, "5c054979-9ff0-4480-abf0-d3cddbaf962a": {"doc_hash": "0332040847d898fd6390d58428245bec2784e27cef5bfa25b52eb8b0f11adc12", "ref_doc_id": "4e3755a0-2c19-42b5-b693-15c44d7d49a9"}, "ed7f6e5e-afe7-4f68-b672-3a911d428a09": {"doc_hash": "5c6cb7104a013fe28bc64f65e2aee96aad177f3d52210fdaa4696c6529a68f21", "ref_doc_id": "4e3755a0-2c19-42b5-b693-15c44d7d49a9"}, "6077f29c-2e41-40d6-b4eb-4f680ba8fedf": {"doc_hash": "2436131b6e16c580a4ce434fe4be3fdb01fa8f14b4ed5462e2edcdc38ffde9af", "ref_doc_id": "4e3755a0-2c19-42b5-b693-15c44d7d49a9"}, "76a94fb2-faef-4a6c-be1c-be4983ab2606": {"doc_hash": "51ecf674ada100977e93bda6442bca59aedeb795f2766d5877b57f235b07da4b", "ref_doc_id": "4e3755a0-2c19-42b5-b693-15c44d7d49a9"}, "4dda1b89-e6af-4070-ae1b-7c3baaeb19dd": {"doc_hash": "69377b5cac3704d12eb0430d50f1cf09a4accd5538bd282ff5bf1d2f7769c53b", "ref_doc_id": "29fad43a-5695-4286-bc25-eff91020241f"}, "e6091fcf-6fb9-4cc7-8d05-c11cda8d7d00": {"doc_hash": "fd7af15ee733d5361a0448cb63a0b7f72b5e1531d6d1ac6f53ecf937af0e6390", "ref_doc_id": "29fad43a-5695-4286-bc25-eff91020241f"}, "7e36a599-5ad0-4db6-8f42-f1b923437e40": {"doc_hash": "c32eb78c5e862eb8ae35669f039abd348dcba4b62b586b00705a1340212bf8b3", "ref_doc_id": "29fad43a-5695-4286-bc25-eff91020241f"}, "72cf3755-afc3-4628-a3f7-a9c05803d70f": {"doc_hash": "0a2342406048104f549d575f08dd22e211ba93b5bbd0d707320c456228a083b1", "ref_doc_id": "29fad43a-5695-4286-bc25-eff91020241f"}, "6cfd78f7-094e-4dab-ab20-db6d9ae7245e": {"doc_hash": "72438db53e82583b0ceb63476c33c0bbf3b190bf4a9c91b28ddb59cb79017986", "ref_doc_id": "29fad43a-5695-4286-bc25-eff91020241f"}, "d45d3921-d8ca-4ad1-a11e-35fd0ce71045": {"doc_hash": "989aeea73e68efaf5b02b7804a58dce1eabda84a58706d7b1bf4193ba667a3f3", "ref_doc_id": "29fad43a-5695-4286-bc25-eff91020241f"}, "d683c0d4-eff0-49a9-855b-8be0efaac829": {"doc_hash": "c85556f147da60fb7396b65fb3356c2963e1e03a914486c13f9f9bf89c0d6d73", "ref_doc_id": "29fad43a-5695-4286-bc25-eff91020241f"}, "f2d8b0fe-11fa-4721-b296-d7fce33485d8": {"doc_hash": "722092a96de0ea9ec6a7dd859cf0608cfb61bd54e1753553b4547930678fc11a", "ref_doc_id": "35ff0f02-f8a3-4f4b-9b80-c52d809439ef"}, "eed605dc-21d5-45a3-ba2a-e00cedc08614": {"doc_hash": "f57e4f350e6c7a8f1841459ed62f452c6396dcf59ef5e4a2d6dd51180641f3d4", "ref_doc_id": "35ff0f02-f8a3-4f4b-9b80-c52d809439ef"}, "ea96a8a2-4442-4ff9-b5f0-e0b8bc61cce5": {"doc_hash": "31bff821f0364b1cabbd34e8e1862c0cfa90b3c8f81eba8fa04d66c8eddb932c", "ref_doc_id": "35ff0f02-f8a3-4f4b-9b80-c52d809439ef"}, "50701628-4ae8-4c0f-b246-07c40bcacef0": {"doc_hash": "03821445ed54a2b2d7362731c15bf062b0c381c60c9140376d6a1a3b62749788", "ref_doc_id": "35ff0f02-f8a3-4f4b-9b80-c52d809439ef"}, "f997fc05-f082-45a9-ac34-35213160ab39": {"doc_hash": "7990782086b05c8f10530ae4abd83bc50802693943917a4f7cde652b44a8e0c1", "ref_doc_id": "a99b8daa-2f23-425d-86bd-5a50f12846ec"}, "da9beb03-1f3e-4b25-a36a-0bb40c1d2227": {"doc_hash": "4a728fd0bd70b4aad7316ddd46b3cef9cd273e1a691501707dc92dc4653eb36c", "ref_doc_id": "a99b8daa-2f23-425d-86bd-5a50f12846ec"}, "ce6915a9-e74e-4132-a576-43a0743acf04": {"doc_hash": "d15ea8360bdb9864c76cccbeb650a194246fcf1d02f73b05d0979c062c558332", "ref_doc_id": "a99b8daa-2f23-425d-86bd-5a50f12846ec"}, "424dd8a7-d41e-4c7d-888b-60048cf12eb3": {"doc_hash": "bc2fce9728fa58d37f14b83ace0dbb8e179f9e35e3e0a13d2c3e8128fd4a620f", "ref_doc_id": "e2a0dbaa-daf2-400a-8a52-560293fb0472"}, "bcc2842f-dcca-44b6-96c1-a02b60ed42ec": {"doc_hash": "79b627940452a02c917b588f29291b4ff51f6441909c1e4fa759d58e8190fe9a", "ref_doc_id": "e2a0dbaa-daf2-400a-8a52-560293fb0472"}, "a3fc4df2-de3c-4d38-96f1-556757db1937": {"doc_hash": "addc2a4de2cf0310da4f3c05e1e43aefc5d89a80f15248cd92c122b90944b9a0", "ref_doc_id": "e2a0dbaa-daf2-400a-8a52-560293fb0472"}, "aa4c2471-0e4b-410e-9d13-274b0c84c895": {"doc_hash": "65a8326008e5e9c93eac3d57229d6fc5e9970cad44525805ffcc19faba5b87cc", "ref_doc_id": "e2a0dbaa-daf2-400a-8a52-560293fb0472"}, "5ae76d28-2afa-443a-b517-81fd19c03ba4": {"doc_hash": "2ead5854d221d9fc79243c4ac2e79c6abaeb0e69e82068721670d5a97f5159a8", "ref_doc_id": "e2a0dbaa-daf2-400a-8a52-560293fb0472"}, "fcce7b07-dfff-4be7-8002-9a29584e0517": {"doc_hash": "ae190f858fa4d18b3c0c7626707776253e21279281e2bf422ac9ae70b80111d3", "ref_doc_id": "e2a0dbaa-daf2-400a-8a52-560293fb0472"}, "f40faf80-60cc-4fea-8e42-6eb0f03e2cfb": {"doc_hash": "de9920144a9e618ec266c6e715fce188b8781bfe644c4072baf944a8fe22f307", "ref_doc_id": "e2a0dbaa-daf2-400a-8a52-560293fb0472"}, "10c704e9-4bac-4a12-9842-685048fb12c9": {"doc_hash": "bbb7ebe81a605be99f2f44d325e90348f14689f65f45db01e7e2493d8ce0ab24", "ref_doc_id": "fb50b1ba-bc1b-4d74-951a-22d5167d5ffb"}, "32935be2-7368-48d5-a254-cea57dc8b5d5": {"doc_hash": "77a53e89e51b5cf3782de3e66f9b12eb3b824aeb72c47f56dc5cf3fb196214d7", "ref_doc_id": "fb50b1ba-bc1b-4d74-951a-22d5167d5ffb"}, "e5a73dc6-e5be-4c60-b357-6805138bb333": {"doc_hash": "1c61bc7678dbf46dd50f1c45ee160fddd65cc73ea40af0ca46b1a2464c3256d4", "ref_doc_id": "fb50b1ba-bc1b-4d74-951a-22d5167d5ffb"}, "69ed6cbc-effa-44cb-b100-ea1cace6e492": {"doc_hash": "17f967d82e52c7268091fdcee2fcd5500d83e0ede3cb33a0cedb0067d93f771e", "ref_doc_id": "fb50b1ba-bc1b-4d74-951a-22d5167d5ffb"}, "c48f1ef4-0130-488b-a028-7f883c4a6430": {"doc_hash": "4eb08cd23638864d6c8b13920fa3f4f823417fc357bcb9fb966788ddd256b678", "ref_doc_id": "fb50b1ba-bc1b-4d74-951a-22d5167d5ffb"}, "8321641d-8705-4c4f-b92b-2a5a93e98665": {"doc_hash": "777f104a4af5d3cb85ee2bbc2ad31c0a79defcbdc6ab56b2cc4cfdafd35e0ad7", "ref_doc_id": "ae82459d-41ef-4ac9-9820-3eb69e0be922"}, "f3c7a322-53a9-47b6-8a14-2c46258f7c89": {"doc_hash": "859060cfb63f4a1c868e3b3c6d063a17af5874879a3abcb3d03f948737698f3f", "ref_doc_id": "ae82459d-41ef-4ac9-9820-3eb69e0be922"}, "9f7c6bcd-56bd-411f-9e27-03d9c7050384": {"doc_hash": "ad2858902754f42daea000681269e6a00b6e492aff2bb01116ab6a1995fe6216", "ref_doc_id": "e77f0ca8-a8fa-47af-85cd-aad9ca3cc25e"}, "68088673-ea16-4f8c-bbb4-9b5e6f6fdef5": {"doc_hash": "c53f2e6e83687dbf33b5e1443e3138ac32ebf752eab8c83fba716bf23131b196", "ref_doc_id": "e77f0ca8-a8fa-47af-85cd-aad9ca3cc25e"}, "63738b65-9843-4ff1-b6a1-8ddef353cd8c": {"doc_hash": "7fa30c61984a02a847addd85deac15f56d93a5ae09656cb1d778d80da7027429", "ref_doc_id": "1dcb5cb9-fe5d-4ddc-85cd-2e85f79f60f7"}, "4ab7b75b-e6bf-46ec-b174-bfe670f1168a": {"doc_hash": "644ea2c2c137e0109637a37ddb64d891d66f4806a850d3df4bd8fe4a0d6412ae", "ref_doc_id": "1dcb5cb9-fe5d-4ddc-85cd-2e85f79f60f7"}, "bab46858-35ad-4e50-8cdf-434b6ae20645": {"doc_hash": "34bd881779db752ec2937e948a5c23f17380370515419b19772d866aa0e462a0", "ref_doc_id": "f53299ea-9b05-4449-a158-4c5ea426befc"}, "95228c59-35b1-46a1-92e6-59449d559e11": {"doc_hash": "8c5397da689efade87c6c3f0f5668d8daeccfb6654500cb0704fd7427460729c", "ref_doc_id": "f53299ea-9b05-4449-a158-4c5ea426befc"}, "7f5c8cb8-aa49-454b-8207-07e69b5210f5": {"doc_hash": "e0f1e7362d215e7d2563db6a8e5aee677ee27867bbeb68f88beb80b3c807804a", "ref_doc_id": "495e4cc2-2841-4f5f-81da-e858fa85f927"}, "7a766a69-d441-4edd-a442-d9bc9d0228d6": {"doc_hash": "37f5b16cabc14d7773d37a02b8c5a80aec9a924f352257d6244a15b3a9976a2c", "ref_doc_id": "495e4cc2-2841-4f5f-81da-e858fa85f927"}, "1553f926-2e53-4d00-87de-e94c0769dd93": {"doc_hash": "57f9c2ea68a8fdce036a9aafde6856ce7f66c43af0bfbecbb7ef933373ecb7a8", "ref_doc_id": "730f6711-eabb-4590-9fab-575896022f6c"}, "22d51c10-fe03-4329-bc30-7b011e523841": {"doc_hash": "327769e5f7bc05ff01d5f71874c43576108d68967503623fcb27bc7814c39d17", "ref_doc_id": "730f6711-eabb-4590-9fab-575896022f6c"}, "92e8d650-e363-4d70-a1c0-e748c5393256": {"doc_hash": "18417ca71c2b53898fd417ce94f1fbd456b0137e9664e9a6f394e773f96ebf2e", "ref_doc_id": "f3cb11e5-8af0-4129-9192-4b58bde83b30"}, "48de3e95-ec15-491e-865b-568f9e614cc9": {"doc_hash": "f44733c47a39bcb9a159f2952a09d84ae4068c64a4dd3ee59cf910cbfa7e299f", "ref_doc_id": "f3cb11e5-8af0-4129-9192-4b58bde83b30"}, "655edc73-5ce9-4802-9c63-0f7f4b2b79d4": {"doc_hash": "90731e741d544b73dd697485e5ec78d563cff3a410a8852f6332573539fd9bbe", "ref_doc_id": "56f26472-42eb-4429-b4c7-024d54499981"}, "eaaeb217-2fa0-4fbf-8787-a23d9f4b23a9": {"doc_hash": "160b256a10df27ca7df01c54a12bd959d0354013226a513e2c747ac690ccf2b2", "ref_doc_id": "56f26472-42eb-4429-b4c7-024d54499981"}, "83e0514c-8117-4af1-a568-e05c05cac038": {"doc_hash": "13894aa8a1948c7959aeec130722746bb4b1fe01f8a7df4a58f950b666c2d6c7", "ref_doc_id": "014f9f76-25c6-49c0-9684-97ff451b3a3f"}, "e9c43a6e-719e-4651-8a63-20dbd18f957c": {"doc_hash": "7b95909313a55486156fa06078c42f86de008e2298843ef9cb11250b934549b4", "ref_doc_id": "014f9f76-25c6-49c0-9684-97ff451b3a3f"}, "f15188af-1a6b-4679-97ed-ceccd73fbc4e": {"doc_hash": "69d387d9a0325b93644fa8cad5d9dcdd621731fb4868a7edcd83eb3b4fc3b545", "ref_doc_id": "14282b74-c50d-4960-bd8a-324dbd31df6c"}, "cbcff33d-dec8-4395-a7b0-3b883b03327b": {"doc_hash": "8cd76ed9abbd9e628709b4e1e49369649c5ceee86b6794e0f17e21fb33ec4aba", "ref_doc_id": "14282b74-c50d-4960-bd8a-324dbd31df6c"}, "d991ee32-0afb-40a7-b5d0-5b3e60a70557": {"doc_hash": "f2f71ccc41200004fad7bf95b574ae92a1e79d8c516c69470cd8193d5f5906e5", "ref_doc_id": "b0861860-7803-4610-8cec-2a5dd7c79879"}, "bcffad6f-fe7f-499c-b681-360725514cc2": {"doc_hash": "d9b745bd5fc9354af0c969356ac8b4bb501b83568136f6357493517b7ff65fe3", "ref_doc_id": "b0861860-7803-4610-8cec-2a5dd7c79879"}, "7cba94e3-99b9-456c-b068-f79e84d6d8cd": {"doc_hash": "81072b254b353d9b74e4673570ba06565bbd0c06f4db1aededf74661a5ae5cf5", "ref_doc_id": "290c3c9f-bcba-49f3-9af1-8e83a8b02247"}, "3680cb90-e69d-40ff-8e85-5580f194b152": {"doc_hash": "e483ed9ad19faa3501b1cf0058d3cc4265762db334539182df09e5b3ebe3888a", "ref_doc_id": "290c3c9f-bcba-49f3-9af1-8e83a8b02247"}, "db81a990-35d0-474d-ad9d-e5f4c0122776": {"doc_hash": "1a7f87cadc0fd63b3a1ea8202fa1e5a4f5c109652a07e9090eb4c0474c4dfa90", "ref_doc_id": "9d6248bd-b296-4a6d-9600-3d026b72246a"}, "fb3db90c-a561-46fe-a2db-785b9fb9cb73": {"doc_hash": "3161abe8415bc95ea8b5343645f317c8d3b19c2a3b1b619e49680a9a4b9926f4", "ref_doc_id": "9d6248bd-b296-4a6d-9600-3d026b72246a"}, "294d6aff-90f6-4f41-8d36-51e086905848": {"doc_hash": "7c5dfc6263ee2fdd4e1d29530489aee794b677009d0e38fc3ce2fc126f5c0483", "ref_doc_id": "7565958f-8cee-4307-9686-729fbd2fa991"}, "98ab4b4c-460d-4789-8dc1-9d7efa325853": {"doc_hash": "6e69af0863a9bdf465871b31be8f5db7bfce2a86419b5e1d97c3aa8d55faaffc", "ref_doc_id": "7565958f-8cee-4307-9686-729fbd2fa991"}, "2fdc5d29-c912-487d-ad1a-c2d26d3dd110": {"doc_hash": "f1802c67bef335d7f65ab00d6743e71576307b3439c072fe5ef1959202a5ed11", "ref_doc_id": "7173d059-7feb-4711-a7ed-cbaf25bd35b1"}, "e820746a-5f9b-4646-9d8c-7c72b50717be": {"doc_hash": "8510c50aac4a1935a42c61ba299d9465ffd9d42280b418c4f3e32aee2ab54968", "ref_doc_id": "7173d059-7feb-4711-a7ed-cbaf25bd35b1"}, "7dceb720-792d-43b2-9e4a-8d373b9f0b6e": {"doc_hash": "cbc0f8975699b8753d389e638557ca95cc576f222eae47a98714a4ad42f17462", "ref_doc_id": "dead2a62-5d5f-4ac8-b2e5-dfa16f691ab0"}, "49ed5d4f-fb50-4f4c-94e5-dfae17b9b7e8": {"doc_hash": "50c4ecdd5d696100d158bebbf93a6ad61ad1466dc10848128fbf82193baee283", "ref_doc_id": "dead2a62-5d5f-4ac8-b2e5-dfa16f691ab0"}, "0f0e83cd-5678-426b-b828-7f80df9c85e3": {"doc_hash": "2d330d75caa4737f89d516d0aaa8c05fac7c104138cc76c4a0b3a5ab9666f4c9", "ref_doc_id": "dead2a62-5d5f-4ac8-b2e5-dfa16f691ab0"}, "d9122d3c-7a34-416a-8b96-d071fbc97ff1": {"doc_hash": "38198c0616dce6a00fc7ee3d36f11d3fe63090f395c1019a3089fb7979b9ca07", "ref_doc_id": "cdee50c4-5767-4a9d-9c14-65222d9a2c67"}, "326af7e9-6f5b-4e17-a2ba-6101200750e2": {"doc_hash": "4c3b31fcc218114d24961bfdd3e1d329ca7ac55a19b992382ca18406dfce3815", "ref_doc_id": "cdee50c4-5767-4a9d-9c14-65222d9a2c67"}, "f8b77e70-a304-42d0-90e3-03fe2f913ff0": {"doc_hash": "1e8af07606e1fcb090b659a8eefef63afa9730bd9aa18267f5de1de99406def1", "ref_doc_id": "cdee50c4-5767-4a9d-9c14-65222d9a2c67"}, "225511cb-2445-4b55-8fba-74e2779df88d": {"doc_hash": "8eb89bc098cc88a1cd617ccc5ea6891f1cc20bc391a55ed1c56fe05f9e3f2b05", "ref_doc_id": "1eb867f6-2334-4615-9360-df36808375eb"}, "cbf07eca-e026-4906-964b-811831c2c3b9": {"doc_hash": "b27832d1bcd3f3d7fc3bf739b5e0b7599c1cfe810774b0a9f7798a47d4b8a967", "ref_doc_id": "1eb867f6-2334-4615-9360-df36808375eb"}, "905772c9-f508-47a1-9f3f-ab5f94a5fb57": {"doc_hash": "92464fbd6d1e8dc196d85d4ed27792dec4dfad9a12efcc13e492f2eeabbfcbaa", "ref_doc_id": "1eb867f6-2334-4615-9360-df36808375eb"}, "1e63b909-05a4-48d8-92b3-c8db6e3caeec": {"doc_hash": "28a822e278204927cccb8e0eb20083566c5171bd81b47560b01981faef56b269", "ref_doc_id": "1eb867f6-2334-4615-9360-df36808375eb"}, "01d82a0f-42e6-49ef-9a7a-3e175795b415": {"doc_hash": "def959a033d12011e58530d6cd34053416a9388271fafc9c899f25cd29cb3dbb", "ref_doc_id": "681b072c-d4ae-407a-b4d2-2e37d084b018"}, "ae8f3a67-6919-4cf1-9c7c-5ca1a07e0402": {"doc_hash": "1b1bb59ffe0e2804f9b06f8880b9159b2aea6ffd394a80ab16326e058653a702", "ref_doc_id": "681b072c-d4ae-407a-b4d2-2e37d084b018"}, "611d01f7-d6d0-41ee-ba40-7ed46668a98e": {"doc_hash": "cea31513e031df82fbc5d812c65da48e406a824cdc7ca07ed7ecb8e363480a60", "ref_doc_id": "681b072c-d4ae-407a-b4d2-2e37d084b018"}, "ccdf0301-0bc3-4355-a575-31f5cea8baf6": {"doc_hash": "f15944ec52d0509bb12c9338985646ea59ba84f639b0d3ec3d7789ec25433b54", "ref_doc_id": "681b072c-d4ae-407a-b4d2-2e37d084b018"}, "6262b8bd-2101-4d15-b3ce-a56d217b7c4f": {"doc_hash": "38d6830d511ad1f421b5e395fe0d133afa24f76aafca0a71a4d682d5a6f24318", "ref_doc_id": "859b3475-3d01-499b-9e73-8c43546ea104"}, "5bd87cc3-41e4-41fc-bf4f-24d867b3af6a": {"doc_hash": "cab86ca6821292ec51ba7986744fce1f1001afc5354e332479d7f14f331995d5", "ref_doc_id": "859b3475-3d01-499b-9e73-8c43546ea104"}, "eb6d3b55-7df3-4945-af0f-2ef1bd12223a": {"doc_hash": "42515c7003ad66d66479ef220df85cc0878119096f1c53b72d493f6b28f79bdf", "ref_doc_id": "859b3475-3d01-499b-9e73-8c43546ea104"}, "8c9b8955-6862-41f4-b727-0e4ed0876bc1": {"doc_hash": "1249f31ab9a18e116dead7b11d197698bfc1e0dfa00ee2183eaa1d7fb1465460", "ref_doc_id": "859b3475-3d01-499b-9e73-8c43546ea104"}, "48643cd6-43bc-4667-bc82-e8bd01a20fbe": {"doc_hash": "139abb98a7fbd0b1845dbe5dce70a51e7c5592e4e013b4060e1fed9771d5b80b", "ref_doc_id": "40514f02-0035-4454-bc49-ddb28d097eee"}, "fa0e90cb-9ff0-4ef5-b097-09d1e3ee43e6": {"doc_hash": "7fc1518376332e4eacfbac8512679727d105e83caf0b83de897c345f364096ee", "ref_doc_id": "40514f02-0035-4454-bc49-ddb28d097eee"}, "f516eb1f-209f-440e-90bc-fb7242626f8e": {"doc_hash": "77e43f3ce8d665a8cc3def596bbf8f11a6c86e4c19b86a44b2273c3f6d491941", "ref_doc_id": "40514f02-0035-4454-bc49-ddb28d097eee"}, "fa3ee10b-cd66-434d-8184-baeb41614874": {"doc_hash": "9515212925dd815cae0ed80b972ef0046da39d417b2e2e339bc6625ed9da2499", "ref_doc_id": "40514f02-0035-4454-bc49-ddb28d097eee"}, "e611770c-8153-4e7a-b7e4-9bb00a639a23": {"doc_hash": "4464b0a75435307c8c066ad529aae5d6ed37d25cc1182b2d0069cfc7bb0e74ae", "ref_doc_id": "610d896b-01cd-43ed-a0e6-3847712913a6"}, "ecc7d6b0-a501-4eba-856e-8986c908bc51": {"doc_hash": "e650c9aacc7b4d589bf819777a111711381969ad1592671fd34dd4f868444d87", "ref_doc_id": "610d896b-01cd-43ed-a0e6-3847712913a6"}, "901519b1-7622-49a8-9330-7c8742988373": {"doc_hash": "2b404ffc1a06cb0292ef35c9f7ec9f4db462ee3686090495b0846ab85a05b235", "ref_doc_id": "610d896b-01cd-43ed-a0e6-3847712913a6"}, "964a0eb7-9428-4dd4-9e99-7c1c305368b7": {"doc_hash": "820661108183802aacbfa5a36aec905c9716d357e8d203bc050c58a2dddd3eb0", "ref_doc_id": "610d896b-01cd-43ed-a0e6-3847712913a6"}, "e87ef8c8-9326-45ce-b9c3-6cee2f3657cc": {"doc_hash": "f580fe8c798ceb3aed542a5e5c00d693126e0efb6980d508421ff0098859cb2c", "ref_doc_id": "be623874-dc7e-4bcd-8475-12c168009c09"}, "8d443c3d-7a59-44d4-8805-914e661ee2f0": {"doc_hash": "8c8529ad7f2f2821e38d32d2fce11a0dfadb81776fa1eedeb00d3ce4bbcc268f", "ref_doc_id": "be623874-dc7e-4bcd-8475-12c168009c09"}, "0b714dcb-fa1b-4b76-a488-6e2e97e176d9": {"doc_hash": "845ecb807d7121426693db0218a15752d51bba0a25c694cba2663afd921c9332", "ref_doc_id": "be623874-dc7e-4bcd-8475-12c168009c09"}, "dbf9dbe8-bc64-45da-8e15-ab4ef31719e7": {"doc_hash": "69e6d6cdfda07f25dec04debd018c2bb22a2efb05eea2c7d21a7631b4d82e5c3", "ref_doc_id": "0a852b0e-e292-450d-b078-23d3d016ca1b"}, "521c0d11-6e5c-4861-a22a-2de66ae3212e": {"doc_hash": "ee7066cbd6bfba02beebc87b77b6c0146b667d26b526589919d89fe793c183c8", "ref_doc_id": "0a852b0e-e292-450d-b078-23d3d016ca1b"}, "b2d69dce-fa88-4cc8-bf2d-50a971131427": {"doc_hash": "98bdcf6ab62d30fada505ce30a6c176b65c1bf31a0bd46602915df29ed223383", "ref_doc_id": "0a852b0e-e292-450d-b078-23d3d016ca1b"}, "a9e7a5b2-baab-41cd-9e53-cf51a056b43f": {"doc_hash": "491bf4d7ff80958f5d85db48f9ae3fefa286d2670192ed8c76ca4041675eca33", "ref_doc_id": "0a852b0e-e292-450d-b078-23d3d016ca1b"}, "63113b91-c230-4616-8d00-8c8c00bdfac0": {"doc_hash": "01b1d86929a270975c514bb26f69e22de0686b4ae98c98f20137a0b2c5d86d4c", "ref_doc_id": "5fe7fedf-099f-4582-a322-79c0ef11a5a0"}, "acf4a408-80f0-4cff-81cd-43b391a84acd": {"doc_hash": "bed8a0c28543ea5a298ff1404b1bb5ae2309d0a9da96e06c1b14f1f9dbfd7109", "ref_doc_id": "5fe7fedf-099f-4582-a322-79c0ef11a5a0"}, "3fba50c3-5bfc-40bb-82eb-b3bb81ada00b": {"doc_hash": "fd6f2912fe6326a03c33d5b873179c4fffcf84abb056f15097a07c4985189fe1", "ref_doc_id": "5fe7fedf-099f-4582-a322-79c0ef11a5a0"}, "18a89535-8812-44a3-ac60-8ad99c89c186": {"doc_hash": "0ce79c8973e89dc438ace61df170850e8fc82ce6229a9c13d57208e24f095af8", "ref_doc_id": "5fe7fedf-099f-4582-a322-79c0ef11a5a0"}, "06f5ce2e-ef90-4708-8a1c-b4066c8b6de5": {"doc_hash": "612164d3ee3dc43352b5ac4a8fa95088385a29eb19257b2554cb38e0e864b2c5", "ref_doc_id": "5fe7fedf-099f-4582-a322-79c0ef11a5a0"}, "df9f28c6-950d-4668-866a-d14382a04ac3": {"doc_hash": "0ce2ebec3d0f332e998f143bfa826bbae8b454290deb455a3de4978610ffd8ee", "ref_doc_id": "ff41215a-c415-4483-93eb-95855177d748"}, "aa67b0f7-851a-4ca3-a5e4-fa42c5b4f742": {"doc_hash": "1b345b828522c6a311864cfe0dd1241fcb899ffd02753c89f75cd6488ca3e131", "ref_doc_id": "ff41215a-c415-4483-93eb-95855177d748"}, "08122aef-967b-405c-bf7a-59e00d11c3bf": {"doc_hash": "1c5077b9dd344a6b6bbc471ab6d606bac3acd6be818d475b3e8682a49c6993fb", "ref_doc_id": "ff41215a-c415-4483-93eb-95855177d748"}, "032654d2-5c3e-42e0-a60c-1b997fff46bc": {"doc_hash": "273a95ef2b0050d789b7b2658c406c2c3930add4b47b3771e6bae99b6e82af19", "ref_doc_id": "ff41215a-c415-4483-93eb-95855177d748"}, "7cfac3bc-7928-42bb-a503-9452236d429d": {"doc_hash": "2e07824ceb05973b42ac970c935187fc24bfd021f630cd1497fe061b0880bd2f", "ref_doc_id": "5a95b011-603b-4da0-8be7-a49d329ac9d4"}, "8099f264-3349-43ee-8007-914a10d6849d": {"doc_hash": "0197370ffe74549ade4352304cc6147e1458c9c73c82758e27914218c14aed03", "ref_doc_id": "5a95b011-603b-4da0-8be7-a49d329ac9d4"}, "6497cf78-b671-4e46-a376-1bda901cf237": {"doc_hash": "9563a9cc58c19b920d05e0361c4e8ab2de8d3cf6be4b1d26aa98b1a1f5379222", "ref_doc_id": "5a95b011-603b-4da0-8be7-a49d329ac9d4"}, "165c7a08-432f-4e76-b33c-17a559a71ede": {"doc_hash": "dc3dfc9e1bcf886b06a5eebf7b86294f6bd9a166c0eb7d346cd5e7fa3d9fde95", "ref_doc_id": "5a95b011-603b-4da0-8be7-a49d329ac9d4"}, "ff129a41-48b5-4357-9d1f-6e90681837a9": {"doc_hash": "ddc28283d402d053e251a9778812586b7d6e7a92388b2befd53b64e190da702a", "ref_doc_id": "b00f5eab-fd70-4b2e-85d3-7869e449d678"}, "9b381efd-dc5e-414c-ac91-ae92d12b437b": {"doc_hash": "095845762fcfe291a41b55b3079c3d370223bf81822b9b6a2c33a299bd79ee3c", "ref_doc_id": "b00f5eab-fd70-4b2e-85d3-7869e449d678"}, "faff713f-cb36-41d9-b962-61b6e460c171": {"doc_hash": "d330c8407f7514234df0ae6f67d5f7d6477d68b4d6825a7375f609db158b03da", "ref_doc_id": "b00f5eab-fd70-4b2e-85d3-7869e449d678"}, "59fe409e-6841-45eb-a97b-1025f3e31d5f": {"doc_hash": "409ba42ef7cca62b3312ac87caad3628acf2caaa194868578f4b0c5d159167de", "ref_doc_id": "048dd051-017e-4c75-8cc8-56155e8a7606"}, "3956a0f4-aa18-43fd-8ef9-88c022039dd9": {"doc_hash": "a82a0255864336c892b6009ed2658da9d127bd33414e2cbc329478b6e1c0120c", "ref_doc_id": "048dd051-017e-4c75-8cc8-56155e8a7606"}, "c9400c8d-2f2d-47ad-a0ba-14d368a727e0": {"doc_hash": "677a77a146186df76e5f5f63ff491ac0fc12114308a82c4f3f470e016075d581", "ref_doc_id": "048dd051-017e-4c75-8cc8-56155e8a7606"}, "501e4e69-51dd-48d6-aece-78cf440325c6": {"doc_hash": "54f6f6a43efb835c6b425d427c68832f9589cbffbc945b97e922eeef7caec282", "ref_doc_id": "048dd051-017e-4c75-8cc8-56155e8a7606"}, "1aef728f-b3e4-4b9c-9136-3a9d0953b58e": {"doc_hash": "6eec42707e0c579aba0c881f40568b35979c405ff297f1c78f3a74cea7ab0484", "ref_doc_id": "048dd051-017e-4c75-8cc8-56155e8a7606"}, "40640fb8-4915-45b5-ba23-1695511805b7": {"doc_hash": "ef4a8fb9ba8ae396a86921ab0560b3d4d1bbf4e7dc2d0d17e98be9c7ed36668f", "ref_doc_id": "048dd051-017e-4c75-8cc8-56155e8a7606"}, "3b8e3bea-cbf4-4897-aa54-54f0acf62ea7": {"doc_hash": "0dfb9466313eef6dd9cec85a62a51a2e7705f977aa66175e375a2cb0b076e721", "ref_doc_id": "eb12e17e-9db6-4386-a3eb-4a56c58432a8"}, "81353502-7a9f-4cf6-811b-958597b94420": {"doc_hash": "d195ab9fb67e775cf4a0455b991c2acfed4206a56772e2904047d1e309a02eb2", "ref_doc_id": "eb12e17e-9db6-4386-a3eb-4a56c58432a8"}, "6ea33531-c586-4e6a-afe3-de26070071eb": {"doc_hash": "d0f09d08c9f6b394184e3d0b7c6a8d40a470d97126cbd68488471601888f98b7", "ref_doc_id": "eb12e17e-9db6-4386-a3eb-4a56c58432a8"}, "7b4bec44-96e6-4c6b-97d9-1c6bce9dca9f": {"doc_hash": "52859f1f9cb6bedf2d08759a08c8cec99188140fea3422698bc798077cd3bfab", "ref_doc_id": "eb12e17e-9db6-4386-a3eb-4a56c58432a8"}, "c29bb93a-8e6f-4b72-bbd2-4152c741fa99": {"doc_hash": "433f76ba3bed8cd97966d784347529bc89afadbce96bd2a50fa3b1f27abdcee0", "ref_doc_id": "1ac83d1c-fd83-4278-8d24-a5d0959792ff"}, "05b64c8c-ca3f-4f9c-a881-823217e809ea": {"doc_hash": "5063af8a26606634d7d9a6319c5ea2396da3763389de2a5f27c4a4583ed0452f", "ref_doc_id": "1ac83d1c-fd83-4278-8d24-a5d0959792ff"}, "288a70d7-8666-4e24-81a3-3c37b3604d68": {"doc_hash": "8af4e889726f621ed011d81edd6cd127b095e62951aa622d2f6532e08350ad9c", "ref_doc_id": "1ac83d1c-fd83-4278-8d24-a5d0959792ff"}, "87e661ff-4281-47b7-a252-fd19b2b6d718": {"doc_hash": "c51af269a8585c010c128204a36dcb25fcc8c4d54b259c420c49a2731ed274dc", "ref_doc_id": "1ac83d1c-fd83-4278-8d24-a5d0959792ff"}, "e1906c45-5a5e-4047-8395-b9a6f1897795": {"doc_hash": "8a8f084856a183b161676aeaff773af8ece66e3b91f3a950959b23277499ce6c", "ref_doc_id": "1ac83d1c-fd83-4278-8d24-a5d0959792ff"}, "d6f82a48-424e-4ae2-a937-a285a9de548e": {"doc_hash": "fad213b89c8a5a3644ff36c52f803e70f15d66a38dbeca2ba2dd7e3bc2309d5a", "ref_doc_id": "1ac83d1c-fd83-4278-8d24-a5d0959792ff"}, "eaf58355-265d-48cd-bde3-53487892ef90": {"doc_hash": "08354932038598ecb132193ec024e833fd84070e56eb908e2f96c2ae4459c402", "ref_doc_id": "601d1b58-4d75-440e-94b7-fb7ca8fc8e1e"}, "b0adb19b-2692-4dde-8a37-0f7c01619d39": {"doc_hash": "9f3772f3873e7facbbf2daa3b35b95ea29f48fe9fdf04cbd30f67833885e5dda", "ref_doc_id": "601d1b58-4d75-440e-94b7-fb7ca8fc8e1e"}, "ef3af981-99f3-470c-af61-f34b6972dcf6": {"doc_hash": "66a9c84533e546f231095ed6cd85b50ab7a5b06675e702c1a16ff5b9110d8233", "ref_doc_id": "601d1b58-4d75-440e-94b7-fb7ca8fc8e1e"}, "f92579c1-f277-4332-b6f6-19b0b71653fd": {"doc_hash": "bd5e11acb5888d0d5cc8a108e7a9f3b109a362022de70d4a77d3463423cd10b4", "ref_doc_id": "601d1b58-4d75-440e-94b7-fb7ca8fc8e1e"}, "f836d01d-b5a3-43a1-8228-4c3545a49b74": {"doc_hash": "7baf3821ec621ee466d9d97c3bcf0fd76f010e29d4a3d5ddb39d003e6e71665f", "ref_doc_id": "70cc21b2-2872-44f5-b6d4-d263796db69a"}, "0f1e2bc5-ad7e-4e4d-8aec-456a458197e1": {"doc_hash": "895e3084e260252f906fcada63068846e6ea852021e7c81de6ac246cd472b5a4", "ref_doc_id": "70cc21b2-2872-44f5-b6d4-d263796db69a"}, "2632bd44-0cb3-4109-88f3-9a616044f28b": {"doc_hash": "80461c0def70804ff3e0216bd4e0266dfd3e6d6cf90636485e63cd89c8133945", "ref_doc_id": "70cc21b2-2872-44f5-b6d4-d263796db69a"}, "9e5dbd8f-942b-4303-9769-f5e5d7fb85c8": {"doc_hash": "2cb0d53347bc2a608df540359e726acfe961331465d964e19bce469d3871d2fc", "ref_doc_id": "70cc21b2-2872-44f5-b6d4-d263796db69a"}, "8b40f1af-d50d-45c2-9619-b970156d4678": {"doc_hash": "b9ef6848875994ed63b95605c6b43f76e0de9f289f1ae9590a494e14544e24ef", "ref_doc_id": "70cc21b2-2872-44f5-b6d4-d263796db69a"}, "5b2e1286-1dd5-4979-a759-56c63446f281": {"doc_hash": "962a1f5ef7635034d5255e8b969dabee1e85072755680f461d10af92e9187d6b", "ref_doc_id": "3e952d87-b8e5-461b-b7e8-da3c8d8a509d"}, "2232ceba-b840-4b3c-a4ad-80c015f78c3d": {"doc_hash": "f18dbbe62d9fe675ce7a9abc7585e00f3f15d5bd0271f6f8f20cc496d5fdfed2", "ref_doc_id": "3e952d87-b8e5-461b-b7e8-da3c8d8a509d"}, "6f64a7d5-e4ad-4eb6-9398-71b9582c25e8": {"doc_hash": "5958dfb7b149730bbc4e6b9b26ffc4df06ed7e07fa9c9d7b0680881cf8e16faa", "ref_doc_id": "3e952d87-b8e5-461b-b7e8-da3c8d8a509d"}, "3db7c785-149a-42e3-82e8-e41b06a93aaf": {"doc_hash": "95a251886ad2c362b2490488fa72c808e51ff8db4cde49398bb1ef348617734c", "ref_doc_id": "3e952d87-b8e5-461b-b7e8-da3c8d8a509d"}, "14762bf5-aeef-4d8e-b49c-a462f37815a2": {"doc_hash": "6a2719bfbb3fa3eabb0af3b222f8d5b2825bba8a81527e430904b614996a1d58", "ref_doc_id": "e241b9f8-0862-48b2-9782-788d8006a633"}, "78b6d842-f4b1-4173-a564-df0fb805b61c": {"doc_hash": "bbb4c6e9de28c42da00d0d1b63efdca8223145d53d5a12dbeba53cc901a6f09d", "ref_doc_id": "e241b9f8-0862-48b2-9782-788d8006a633"}}, "docstore/data": {"3e66fc81-176b-49c9-b86b-24852aa480b7": {"__data__": {"id_": "3e66fc81-176b-49c9-b86b-24852aa480b7", "embedding": null, "metadata": {"page_label": "1", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "cf01bf3c-485c-4cc7-8dee-d50e81ed1f44", "node_type": "4", "metadata": {"page_label": "1", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "11c8a3d01644ae0e8e8d6ff334d5f6546809c9553f532379a424055b9d2180e2"}, "3": {"node_id": "e62f4e51-0196-4a3a-9ff5-7d0ac4f08d88", "node_type": "1", "metadata": {"page_label": "1", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "13a4d07c6601b199c952fa3e10f1c57ed71b110800a7ab248ab3f76896c1a92f"}}, "hash": "93f99800f617b1f2da213fdf9b225cde4e61cac098a8a97c00f2c16f00a0c6fe", "text": "1041-4347 (c) 2021 IEEE. Personal use is permitted, but republication/redistribution requires IEEE permission. See http://www.ieee.org/publications_standards/publications/rights/index.html for more information.This article has been accepted for publication in a future issue of this journal, but has not been fully edited. Content may change prior to final publication. Citation information: DOI 10.1109/TKDE.2021.3080293, IEEE\nTransactions on Knowledge and Data Engineering\n1\nCorporate Relative Valuation using\nHeterogeneous Multi-Modal\nGraph Neural Network\nY ang Y ang, Jia-Qi Y ang, Ran Bao, De-Chuan Zhan, Hengshu Zhu Senior Member, IEEE , Xiao-Ru\nGao, Hui Xiong, Fellow, IEEE and Jian Y ang Member, IEEE\nAbstract\u2014Corporate relative valuation (CRV) refers to the process of comparing a company\u2019s value from company products, core staff\nand other related information, so that we can assess the company\u2019s market value, which is critical for venture capital \ufb01rms. Traditional\nrelative valuation methods heavily rely on tedious and expensive human efforts, especially for non-publicly listed companies. However,\nthe availability of information about company\u2019s invisible assets, such as patents, talent, and investors, enables a new paradigm to learn\nand evaluate corporate relative values automatically. Indeed, in this paper, we reveal that, the companies and their core members can\nnatually be formed as a heterogeneous graph and the attributes of different nodes include semantically-rich multi-modal data, thereby\nwe are able to extract a latent embedding for each company. The network embeddings can re\ufb02ect domain experts\u2019 behavior and are\neffective for corporate relative valuation.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "e62f4e51-0196-4a3a-9ff5-7d0ac4f08d88": {"__data__": {"id_": "e62f4e51-0196-4a3a-9ff5-7d0ac4f08d88", "embedding": null, "metadata": {"page_label": "1", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "cf01bf3c-485c-4cc7-8dee-d50e81ed1f44", "node_type": "4", "metadata": {"page_label": "1", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "11c8a3d01644ae0e8e8d6ff334d5f6546809c9553f532379a424055b9d2180e2"}, "2": {"node_id": "3e66fc81-176b-49c9-b86b-24852aa480b7", "node_type": "1", "metadata": {"page_label": "1", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "93f99800f617b1f2da213fdf9b225cde4e61cac098a8a97c00f2c16f00a0c6fe"}, "3": {"node_id": "6f26c18a-9aca-461e-99c7-ef94b56b145d", "node_type": "1", "metadata": {"page_label": "1", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "d8c22256ba38c7cae13e9ebfbfc7e61ec63653e5e3746db2f0d2c516bff1a693"}}, "hash": "13a4d07c6601b199c952fa3e10f1c57ed71b110800a7ab248ab3f76896c1a92f", "text": "Along this line, we develop a heterogeneous multi-modal graph neural network method,\nnamed HM2, which deals with embedding challenges involving modal attribute encoding, multi-modal aggregation, and valuation\nprediction modules. Speci\ufb01cally, HM2\ufb01rstly performs the representation learning for heterogeneous neighbors of the input company by\ntaking relationships among nodes into consideration, which aggregates node attributes via linkage-aware multi-head attention\nmechanism, rather than multi-instance based methods. Then, HM2adopts the self-attention network to aggregate different modal\nembeddings for \ufb01nal prediction, and employs dynamic triplet loss with embeddings of competitors as the constraint. As a result, HM2\ncan explore companies\u2019 intrinsic properties to improve the CRV performance. Extensive experiments on real-world data demonstrate\nthe effectiveness of the proposed HM2.\nIndex Terms\u2014Corporate Relative Valuation, Heterogeneous Graph, Multi-Modal Learning, Linkage-Aware\n!\n1 I NTRODUCTION\nRecent years, we have witnessed the increasing popular-\nity of applying machine learning models in software as a\nservice (SAAS) and various enterprise applications, which\ngreatly reduces the manual cost and improves the operat-\ning ef\ufb01ciency. For example, [1] proposed an intelligent job\ninterview system, which can be applied in human resources\nmanagement (HRM); [2] utilized the structure-aware con-\nvolution neural network for talent \ufb02ow forecast, which can\nbe introduced into enterprise resource planning (ERP); [3]\n\u2022Yang Yang and Jian Yang are with the Nanjing University of Science and\nTechnology, Nanjing 210094, China.\nE-mail: yyang,csjyang@njust.edu.cn\n\u2022Jia-Qi Yang, Ran Bao and De-Chuan Zhan are with the Nanjing Univer-\nsity, Nanjing 210023, China.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "6f26c18a-9aca-461e-99c7-ef94b56b145d": {"__data__": {"id_": "6f26c18a-9aca-461e-99c7-ef94b56b145d", "embedding": null, "metadata": {"page_label": "1", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "cf01bf3c-485c-4cc7-8dee-d50e81ed1f44", "node_type": "4", "metadata": {"page_label": "1", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "11c8a3d01644ae0e8e8d6ff334d5f6546809c9553f532379a424055b9d2180e2"}, "2": {"node_id": "e62f4e51-0196-4a3a-9ff5-7d0ac4f08d88", "node_type": "1", "metadata": {"page_label": "1", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "13a4d07c6601b199c952fa3e10f1c57ed71b110800a7ab248ab3f76896c1a92f"}, "3": {"node_id": "9fbf274d-45e6-4727-a45f-aa4ad82e3a07", "node_type": "1", "metadata": {"page_label": "1", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "72365937a9e180a8f941c929cf7102778bbddd770d6803eebd038fe44f546e21"}}, "hash": "d8c22256ba38c7cae13e9ebfbfc7e61ec63653e5e3746db2f0d2c516bff1a693", "text": "E-mail: yangjq@lamda.nju.edu.cn, zhandc@nju.edu.cn, baorana@163.com\n\u2022Hengshu Zhu is with Baidu Talent Intelligence Center, Baidu Inc, Beijing\n100000, China.\nE-mail:zhuhengshu@baidu.com\n\u2022Xiao-Ru Gao and Hui Xiong is with the Management Science and\nInformation Systems Department, Rutgers Business School, Rutgers Uni-\nversity, Newark, NJ 07102, USA.\nE-mail: xg89@business.rutgers.edu, hxiong@rutgers.edu\nYang Yang and Jian Yang are with PCA Lab, Key Lab of Intelligent Perception\nand Systems for High-Dimensional Information of Ministry of Education, and\nJiangsu Key Lab of Image and Video Understanding for Social Security, School\nof Computer Science and Engineering, Nanjing University of Science and\nTechnology. De-Chuan Zhan is the corresponding author.applied neural networks for user recommendation, which\ncan be practiced into customer relationship management\n(CRM), etc. Meanwhile, there also spring up many en-\nterprise service companies based on arti\ufb01cial intelligence\ntechnologies, for example, UiPath1delivers data mining\ntechniques for document management, contact center, hu-\nman resources, supply chains, etc.; Pymetrics2combines\narti\ufb01cial intelligence technology for intelligent recruitment,\ntalent matching, etc. On the other hand, corporate valuation\nplays an important role in SAAS, which is to evaluate the\nrelative value of companies, and establishes a critical basis\nfor various pricing transactions in enterprise applications.\nThere exist several sophisticated corporate absolute val-\nuation methods, for example, discounted cash \ufb02ow method\n(DCF) [4], economic value added method (EVA) [5], real op-\ntions method (ROA) [6] and price-to-sales method (PS) [7].", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "9fbf274d-45e6-4727-a45f-aa4ad82e3a07": {"__data__": {"id_": "9fbf274d-45e6-4727-a45f-aa4ad82e3a07", "embedding": null, "metadata": {"page_label": "1", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "cf01bf3c-485c-4cc7-8dee-d50e81ed1f44", "node_type": "4", "metadata": {"page_label": "1", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "11c8a3d01644ae0e8e8d6ff334d5f6546809c9553f532379a424055b9d2180e2"}, "2": {"node_id": "6f26c18a-9aca-461e-99c7-ef94b56b145d", "node_type": "1", "metadata": {"page_label": "1", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "d8c22256ba38c7cae13e9ebfbfc7e61ec63653e5e3746db2f0d2c516bff1a693"}}, "hash": "72365937a9e180a8f941c929cf7102778bbddd770d6803eebd038fe44f546e21", "text": "While these methods always require historical \ufb01nancial\nstatements of the company, which are dif\ufb01cult to acquire,\nespecially for non-publicly listed companies. On the other\nhand, some other corporate relative valuation methods are\nadopted. These methods usually rely on professionals to\ncomprehensively consider the core resources, members, and\ncompetitors of the company, and then carry on the \ufb01nal\nvaluation. Note that this type of methods can be used to\nestimate the company\u2019s value level without detailed \ufb01nan-\n1. http://www.uipath.com\n2. https://www.pymetrics.ai/\nAuthorized licensed use limited to: Univ of Calif Santa Barbara. Downloaded on June 20,2021 at 08:36:01 UTC from IEEE Xplore. Restrictions apply.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "149632c1-0f93-4a36-aa68-a8121850fa4a": {"__data__": {"id_": "149632c1-0f93-4a36-aa68-a8121850fa4a", "embedding": null, "metadata": {"page_label": "2", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "da0f0d1b-08a7-404e-971f-d4ac86f075c1", "node_type": "4", "metadata": {"page_label": "2", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "39233ab9964ec63ed25c7b38225e24fa8dd469d5b6424a91d03b0aeb40f84b69"}, "3": {"node_id": "5ab25872-e533-42d3-a732-7008791988df", "node_type": "1", "metadata": {"page_label": "2", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "2a1ca63c0aac62f04fc93ce637cb30109dd0c5e1a422e1de8c7f7b8838a6dac9"}}, "hash": "5f778905c3cbbf686780fb0f98b5607f84dce7aed5ff294d5ac442c4ba172c9f", "text": "1041-4347 (c) 2021 IEEE. Personal use is permitted, but republication/redistribution requires IEEE permission. See http://www.ieee.org/publications_standards/publications/rights/index.html for more information.This article has been accepted for publication in a future issue of this journal, but has not been fully edited. Content may change prior to final publication. Citation information: DOI 10.1109/TKDE.2021.3080293, IEEE\nTransactions on Knowledge and Data Engineering\n2\nholding\ninvestmentmanager\nsupervisormanager\nsupervisordirector\nsupervisor\ndirectormanager\nmanagersupervisorholding\nFig. 1. (Best viewed in color.) Example of company structure. CRV usu-\nally considers two aspects of the company\u2019s structure: 1) af\ufb01liates, i.e.,\nthe relevant information of the company and its af\ufb01liates (for example,\nfounded, acquired, and invested subsidiaries); 2) members, i.e., the\nrelevant information of company\u2019s core member (for example, manager,\nsupervisor, etc). Considering privacy, we use cartoon characters for re-\nplacement. Note that the companies and members can be regarded as\nentities, and the connections among them can be regarded as linkages,\nthereby all the data can naturally be constructed to a heterogeneous\ngraph.\ncial statement analysis, whereas needs precise judgments\nand heavily relies on tedious and expensive manpower.\nWith the economic development, the number of companies\nhas increased dramatically, thereby it is undoubtedly dif\ufb01-\ncult for venture capital \ufb01rms to conduct large amount of\ncompany valuation screening on interested companies. In\nresult, it urgently needs automatic or semi-automatic CRV\ntechnology by applying machine learning models.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "5ab25872-e533-42d3-a732-7008791988df": {"__data__": {"id_": "5ab25872-e533-42d3-a732-7008791988df", "embedding": null, "metadata": {"page_label": "2", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "da0f0d1b-08a7-404e-971f-d4ac86f075c1", "node_type": "4", "metadata": {"page_label": "2", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "39233ab9964ec63ed25c7b38225e24fa8dd469d5b6424a91d03b0aeb40f84b69"}, "2": {"node_id": "149632c1-0f93-4a36-aa68-a8121850fa4a", "node_type": "1", "metadata": {"page_label": "2", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "5f778905c3cbbf686780fb0f98b5607f84dce7aed5ff294d5ac442c4ba172c9f"}, "3": {"node_id": "8dccc7fc-46fe-45a1-84b8-d6f1dbfcc380", "node_type": "1", "metadata": {"page_label": "2", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "f2582c82ab94a469fce476c6d34834ea603c9caf326411a06582dfe68c6a3b9b"}}, "hash": "2a1ca63c0aac62f04fc93ce637cb30109dd0c5e1a422e1de8c7f7b8838a6dac9", "text": "As a matter of fact, relative valuation performed man-\nually without \ufb01nancial statements always considers three\nfactors: 1) the core resources of the company and its af\ufb01li-\nates, such as the basic information, business conditions, and\nintellectual properties; 2) the information of core members\nof the company, such as member\u2019s background, resume, and\nin\ufb02uence; and 3) the valuation of competitors within the\nsame industry. Naturally, as shown in Figure 1, these com-\npanies and members construct a complex heterogeneous\nmulti-modal graph. In detail, there are two types of nodes\nin the graph, i.e., companies and members. Meanwhile, at-\ntributes of nodes constitute multi-modal data, i.e., different\ntypes of nodes have various descriptions. Besides, there\nappear multiple types of graph linkages, i.e., company-\ncompany, company-member, and member-member. There-\nfore, by comprehensively modeling the corporate/personal\nattributes and the linkages among them, we can obtain new\nlatent embeddings to describe the company, which can be\nfurther utilized in corporate valuation task. This learning\nprocedure is also con\ufb01rmed with professional domain ex-\nperts\u2019 operation in reality.\nInspired by the observations above, we develop HM2,\na heterogeneous graph neural network for corporate rela-tive valuation. HM2is a deep graph network, which can\nacquire discriminative embeddings of the company node by\nencoding heterogeneous neighbors comprehensively. Differ-\nent from previous HGNNs, HM2can effectively capture\nthe relationships among nodes, and design speci\ufb01c struc-\ntural loss function to improve the \ufb01nal performance.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "8dccc7fc-46fe-45a1-84b8-d6f1dbfcc380": {"__data__": {"id_": "8dccc7fc-46fe-45a1-84b8-d6f1dbfcc380", "embedding": null, "metadata": {"page_label": "2", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "da0f0d1b-08a7-404e-971f-d4ac86f075c1", "node_type": "4", "metadata": {"page_label": "2", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "39233ab9964ec63ed25c7b38225e24fa8dd469d5b6424a91d03b0aeb40f84b69"}, "2": {"node_id": "5ab25872-e533-42d3-a732-7008791988df", "node_type": "1", "metadata": {"page_label": "2", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "2a1ca63c0aac62f04fc93ce637cb30109dd0c5e1a422e1de8c7f7b8838a6dac9"}, "3": {"node_id": "cf93ae05-6a0e-4a29-8c55-9a84241ae99b", "node_type": "1", "metadata": {"page_label": "2", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "b81ebf5c42b49f9c4e56ee9c5b3e54c1072c4d70c58cc23ae7dc1b999dd5f332"}}, "hash": "f2582c82ab94a469fce476c6d34834ea603c9caf326411a06582dfe68c6a3b9b", "text": "In\ndetail, based on obtained heterogeneous neighbors of the\ninput company, HM2aggregates node attributes via the\nlinkage-aware multi-head attention mechanism [8], which\neffectively incorporates the relationships into node embed-\nding. Then, HM2utilizes adaptive weighted ensemble to\naggregate multi-modal node embedding, which can capture\nmodal interactions and get more descriptive capabilities.\nMoreover, the loss function includes the extra triplet loss,\nwhich considers the structure with competitors\u2019 embed-\ndings except for normal company valuation loss, and aims\nto enhance the embedding presentation capability by multi-\ntask operator. To the best of our knowledge, we are the\n\ufb01rst to formalize the corporate relative valuation into an in-\nductive learning problem considering heterogeneous graph\nstructure. To summarize, the main contributions are:\n\u2022We formalize the corporate relative valuation as the\nheterogeneous multi-modal graph structure, which in-\ncludes heterogeneous nodes, linkages and multi-modal\nnode attributes in speci\ufb01c;\n\u2022We develop HM2, a heterogeneous multi-modal graph\nneural network, which considers heterogeneous nodes\nand linkages for node embeddings comprehensively,\nand combines speci\ufb01c structure loss for \ufb01nal prediction.\nIn result, HM2can be effectively applied to corporate\nrelative valuation;\n\u2022We conduct extensive experiments on collected real-\nworld corporate valuation dataset, and our results\ndemonstrate the effectiveness of HM2.\n2 P RELIMINARIES\nIn this section, we declare our motivation, deliver the\nde\ufb01nition of CRV with heterogeneous company-member\ngraph, and then introduce the adopted real-world data. In\naddition, we also introduce existing heterogeneous graph\nneural networks.\n2.1 Motivation\nIn real applications, as shown in Figure 1, relative valuation\nperformed manually always considers two factors [9, 10]:\n1) the company and its af\ufb01liates.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "cf93ae05-6a0e-4a29-8c55-9a84241ae99b": {"__data__": {"id_": "cf93ae05-6a0e-4a29-8c55-9a84241ae99b", "embedding": null, "metadata": {"page_label": "2", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "da0f0d1b-08a7-404e-971f-d4ac86f075c1", "node_type": "4", "metadata": {"page_label": "2", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "39233ab9964ec63ed25c7b38225e24fa8dd469d5b6424a91d03b0aeb40f84b69"}, "2": {"node_id": "8dccc7fc-46fe-45a1-84b8-d6f1dbfcc380", "node_type": "1", "metadata": {"page_label": "2", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "f2582c82ab94a469fce476c6d34834ea603c9caf326411a06582dfe68c6a3b9b"}}, "hash": "b81ebf5c42b49f9c4e56ee9c5b3e54c1072c4d70c58cc23ae7dc1b999dd5f332", "text": "2) the core members of\nthe company. Therefore, similar to the citation network\n(author-article) [11, 12], the companies and members can be\nregarded as entities, and the connections among them can\nbe regarded as linkages. Consequently, the data naturally\nconstruct a complex heterogeneous multi-modal graph. Es-\nsentially, it is unstructured data for the reason that: 1) the\ngraph size is arbitrary, the topological structure is complex,\nand there is no spatial locality like images; 2) the graph does\nnot have a \ufb01xed order of nodes; and 3) the graph is dynamic\nand contains multi-modal features. If we directly concate-\nnate the company embedding and the member embed-\nding as a single example, the neighbor representation and\nstructural information of the sample cannot be considered.\nAuthorized licensed use limited to: Univ of Calif Santa Barbara. Downloaded on June 20,2021 at 08:36:01 UTC from IEEE Xplore. Restrictions apply.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "3208e4e4-6c7f-4aa7-b717-c3f544ddd70e": {"__data__": {"id_": "3208e4e4-6c7f-4aa7-b717-c3f544ddd70e", "embedding": null, "metadata": {"page_label": "3", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "a1f841c7-791b-4aa2-925a-4baae7050de3", "node_type": "4", "metadata": {"page_label": "3", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "caf398a52bcc68240a58e796587532bd0f4a4d87fb673d1c07323be7deb7246c"}, "3": {"node_id": "fdd0a876-3845-448d-936c-2c6e66149219", "node_type": "1", "metadata": {"page_label": "3", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "9ae21d22ef1682ea6b13093839fecb566b1d0f71b600ba7f7272b924aa897329"}}, "hash": "986b646d1baeb6573ca4a8777f5dea57c7552d03f3d3839ff2a3e676af18a9fd", "text": "1041-4347 (c) 2021 IEEE. Personal use is permitted, but republication/redistribution requires IEEE permission. See http://www.ieee.org/publications_standards/publications/rights/index.html for more information.This article has been accepted for publication in a future issue of this journal, but has not been fully edited. Content may change prior to final publication. Citation information: DOI 10.1109/TKDE.2021.3080293, IEEE\nTransactions on Knowledge and Data Engineering\n3\nFig. 2. (Best viewed in color.) The illustration of heterogeneous multi-\nmodal graph. There exist various types of nodes, i.e., we utilize two types\nhere for simplicity (blue and yellow). Meanwhile, the linkages among\nnodes are also with multiple types, i.e., blue, black, and yellow solid\nlines. Different types of nodes can be represented by various attributes.\nTherefore, in this paper, we develop a deep heterogeneous\ngraph method for CRV . The experimental comparison with\ntraditional methods also veri\ufb01es the effectiveness of the\ngraph embedding.\n2.2 Problem De\ufb01nition\nFirst, we formalize the de\ufb01nition of heterogeneous graph\nwith multi-modal information.\nDe\ufb01nition 1. Heterogeneous Multi-Modal Graph\n(HMMG). (also known as Content-associated\nHeterogeneous Graph [13]) As shown in Figure 2,\nHMMG is de\ufb01ned as a graph G= (V,E,C V,CE)with\nnode setV, linkage set E, node type set CV, and linkage\ntype setCE. Different from homogeneous graph that\nnodes belong to a single type, HMMG owns multiple\nnode types, and different types of nodes have various\nlinkage representations.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "fdd0a876-3845-448d-936c-2c6e66149219": {"__data__": {"id_": "fdd0a876-3845-448d-936c-2c6e66149219", "embedding": null, "metadata": {"page_label": "3", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "a1f841c7-791b-4aa2-925a-4baae7050de3", "node_type": "4", "metadata": {"page_label": "3", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "caf398a52bcc68240a58e796587532bd0f4a4d87fb673d1c07323be7deb7246c"}, "2": {"node_id": "3208e4e4-6c7f-4aa7-b717-c3f544ddd70e", "node_type": "1", "metadata": {"page_label": "3", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "986b646d1baeb6573ca4a8777f5dea57c7552d03f3d3839ff2a3e676af18a9fd"}, "3": {"node_id": "9ae16e2b-7abd-417e-b390-4d50d52a7ae1", "node_type": "1", "metadata": {"page_label": "3", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "61f87d7b0b963778089600deab180d527d8511d837bc4b0721148aa5210e49f4"}}, "hash": "9ae21d22ef1682ea6b13093839fecb566b1d0f71b600ba7f7272b924aa897329", "text": "Moreover, attributes of different\ntypes of nodes constitute multi-modal data, i.e., different\ntypes of nodes have various raw dimensional attribute\nrepresentations.\nWith De\ufb01nition 1, we can observe that the company\npenetration graph actually is an HMMG with two types of\nnodes. In detail, the node type set CVincludes: company\nand member, and the linkage type set CEincludes: company-\ncompany, company-member and member-member. Then, we can\nde\ufb01ne the corporate relative valuation.\nProblem 1. Corporate Relative Valuation (CRV). CRV aims\nto estimate the relative valuation or value level, i.e.,\na regression or classi\ufb01cation problem, without the \ufb01-\nnancial statement data. CRV is widely used for star-\ntups and unlisted companies, and always considers the\ncore resources, members, and competitors of the input\ncompany. Traditional CRV usually relies on experienced\nexperts.\nIn summary, we now de\ufb01ne the CRV problem with\nHMMG representation learning. Without any loss of gen-\nerality, we provide both approximate (coarse-grained) andTABLE 1\nDatasets used in this work. ICV denotes Internet corporate valuation,\nCRV represents corporate relative valuation level, and BC represents\nbusiness class.\nData Node Edge CRV BC\nICVCompany: 4362\nMember: 6877Company-Company: 5106\nCompany-Member: 13123\nMember-Member: 282244 7\n/uni00000014/uni00000013/uni00000013/uni00000010/uni00000015/uni00000013/uni00000013 /uni00000015/uni00000013/uni00000013/uni00000010/uni00000016/uni00000013/uni00000013", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "9ae16e2b-7abd-417e-b390-4d50d52a7ae1": {"__data__": {"id_": "9ae16e2b-7abd-417e-b390-4d50d52a7ae1", "embedding": null, "metadata": {"page_label": "3", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "a1f841c7-791b-4aa2-925a-4baae7050de3", "node_type": "4", "metadata": {"page_label": "3", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "caf398a52bcc68240a58e796587532bd0f4a4d87fb673d1c07323be7deb7246c"}, "2": {"node_id": "fdd0a876-3845-448d-936c-2c6e66149219", "node_type": "1", "metadata": {"page_label": "3", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "9ae21d22ef1682ea6b13093839fecb566b1d0f71b600ba7f7272b924aa897329"}, "3": {"node_id": "e800fd76-a7cd-4f5a-9cde-883692f09be1", "node_type": "1", "metadata": {"page_label": "3", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "cc6f6e2ca34d7081e35822ba57b4d88d57877a943194651c3b394d1966e4835a"}}, "hash": "61f87d7b0b963778089600deab180d527d8511d837bc4b0721148aa5210e49f4", "text": "/uni00000016/uni00000013/uni00000013/uni00000010/uni00000017/uni00000013/uni00000013/uni00000003/uni00000003/uni00000003/uni00000003/uni00000003/uni00000003/uni00000003/uni00000003/uni00000003/uni00000003/uni00000003/uni00000003/uni00000003/uni00000003/uni00000003/uni00000003/uni00000003/uni00000017/uni00000013/uni00000013/uni00000010/uni00000003/uni00000003/uni00000003/uni0000000b/uni00000050/uni0000004c/uni0000004f/uni0000004f/uni00000052/uni00000051/uni0000000c/uni00000013/uni00000015/uni00000013/uni00000013/uni00000017/uni00000013/uni00000013/uni00000019/uni00000013/uni00000013/uni0000001b/uni00000013/uni00000013/uni00000014/uni00000013/uni00000013/uni00000013\n(a) CRV\n/uni00000056/uni00000052/uni00000049/uni00000057/uni0000005a/uni00000044/uni00000055/uni00000048", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "e800fd76-a7cd-4f5a-9cde-883692f09be1": {"__data__": {"id_": "e800fd76-a7cd-4f5a-9cde-883692f09be1", "embedding": null, "metadata": {"page_label": "3", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "a1f841c7-791b-4aa2-925a-4baae7050de3", "node_type": "4", "metadata": {"page_label": "3", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "caf398a52bcc68240a58e796587532bd0f4a4d87fb673d1c07323be7deb7246c"}, "2": {"node_id": "9ae16e2b-7abd-417e-b390-4d50d52a7ae1", "node_type": "1", "metadata": {"page_label": "3", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "61f87d7b0b963778089600deab180d527d8511d837bc4b0721148aa5210e49f4"}, "3": {"node_id": "b5dfda59-5a79-44a9-96b9-8abe4da3d725", "node_type": "1", "metadata": {"page_label": "3", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "7288d30959dcc5c0df03770a141639f20853df32a4288d744923b2bed7b0a7bd"}}, "hash": "cc6f6e2ca34d7081e35822ba57b4d88d57877a943194651c3b394d1966e4835a", "text": "/uni00000056/uni00000046/uni0000004c/uni00000048/uni00000051/uni00000057/uni0000004c/uni00000049/uni0000004c/uni00000046\n/uni00000046/uni00000052/uni00000050/uni00000050/uni00000048/uni00000055/uni00000046/uni0000004c/uni00000044/uni0000004f/uni00000048/uni00000010/uni00000055/uni00000048/uni00000057/uni00000044/uni0000004c/uni0000004f/uni0000004c/uni00000051/uni0000004a/uni00000049/uni0000004c/uni00000051/uni00000044/uni00000051/uni00000046/uni0000004c/uni00000044/uni0000004f\n/uni00000048/uni00000051/uni00000057/uni00000048/uni00000055/uni00000057/uni00000044/uni0000004c/uni00000051/uni00000050/uni00000048/uni00000051/uni00000057/uni00000052/uni00000057/uni0000004b/uni00000048/uni00000055/uni00000056/uni00000013/uni00000015/uni00000013/uni00000013/uni00000017/uni00000013/uni00000013/uni00000019/uni00000013/uni00000013/uni0000001b/uni00000013/uni00000013/uni00000014/uni00000013/uni00000013/uni00000013/uni00000014/uni00000015/uni00000013/uni00000013/uni00000014/uni00000017/uni00000013/uni00000013(b) BC\nFig. 3. Data visualization. (a) is the number of each class in relative\nvaluation level (CRV) and (b) is the number of each class in business\ncategory (BC).\naccurate (\ufb01ne-grained) valuations of the input companies in\nexperiments.\nDe\ufb01nition 2. Heterogeneous Multi-Modal Graph Network\nfor Corporate Relative Valuation.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "b5dfda59-5a79-44a9-96b9-8abe4da3d725": {"__data__": {"id_": "b5dfda59-5a79-44a9-96b9-8abe4da3d725", "embedding": null, "metadata": {"page_label": "3", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "a1f841c7-791b-4aa2-925a-4baae7050de3", "node_type": "4", "metadata": {"page_label": "3", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "caf398a52bcc68240a58e796587532bd0f4a4d87fb673d1c07323be7deb7246c"}, "2": {"node_id": "e800fd76-a7cd-4f5a-9cde-883692f09be1", "node_type": "1", "metadata": {"page_label": "3", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "cc6f6e2ca34d7081e35822ba57b4d88d57877a943194651c3b394d1966e4835a"}, "3": {"node_id": "7135cb16-0f14-40f0-8b27-3b0624544a2e", "node_type": "1", "metadata": {"page_label": "3", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "0a7fe876d9988586b92f9ce133b4007c8588478661a05a9a8635f005d9daa779"}}, "hash": "7288d30959dcc5c0df03770a141639f20853df32a4288d744923b2bed7b0a7bd", "text": "Heterogeneous Multi-Modal Graph Network\nfor Corporate Relative Valuation. Given an HMMG\nG= (V,E,C V,CE), each corporate node v1\niinGhas\nits own attribute x1\ni, and is with two ground truth,\ni.e.,yb\ni\u2208RLbdenotes the business category, with Lb\nrepresents the dimension, and yp\ni\u2208RLpdenotes the\ncorporate relative valuation level, with Lpalso denotes\nthe dimension. Besides, each member node v2\njinGalso\nhas corresponding descriptions x2\nj. The task is to design\na modelfthat able to estimate corporate relative valua-\ntion level yp\niof these companies, and the key challenge\noffis to learn company\u2019s embedding, which encodes\nboth structural relationships and node attributes.\nNote that the ambition is to estimate the corporate value,\nthereby we concentrate on the embedding of the company\nnodes, i.e.,v1\ni, in this paper.\n2.3 Data Descriptions\nThe real-world corporate valuation dataset is provided by\nour business partner, and consists of companies in the\ninternet industry. There are several reasons to utilize the\ndata from the internet industry: 1) With the development\nof the Internet, most of the recent emerging companies are\nbelong to the internet industry, and serious data missing\nis a universal problem among companies in other different\ndomains; 2) Considering the cost of data collection, Internet\nindustry takes up the largest number of companies in the\ncollected data; and 3) The heterogeneous graphs of internet\ncompanies have the relevance island problem with the het-\nerogeneous graphs of other domains, i.e., few connections\nbetween two domains\u2019 heterogeneous graphs. Thereby it\nis dif\ufb01cult to consider all domains in one uni\ufb01ed graph.\nNote that these companies are mostly startups or unlisted\ncompanies, and are in need of relative valuation.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "7135cb16-0f14-40f0-8b27-3b0624544a2e": {"__data__": {"id_": "7135cb16-0f14-40f0-8b27-3b0624544a2e", "embedding": null, "metadata": {"page_label": "3", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "a1f841c7-791b-4aa2-925a-4baae7050de3", "node_type": "4", "metadata": {"page_label": "3", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "caf398a52bcc68240a58e796587532bd0f4a4d87fb673d1c07323be7deb7246c"}, "2": {"node_id": "b5dfda59-5a79-44a9-96b9-8abe4da3d725", "node_type": "1", "metadata": {"page_label": "3", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "7288d30959dcc5c0df03770a141639f20853df32a4288d744923b2bed7b0a7bd"}}, "hash": "0a7fe876d9988586b92f9ce133b4007c8588478661a05a9a8635f005d9daa779", "text": "The data\nAuthorized licensed use limited to: Univ of Calif Santa Barbara. Downloaded on June 20,2021 at 08:36:01 UTC from IEEE Xplore. Restrictions apply.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "f503b09a-2136-4927-b4b4-4562aedd74db": {"__data__": {"id_": "f503b09a-2136-4927-b4b4-4562aedd74db", "embedding": null, "metadata": {"page_label": "4", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "3263090c-90e3-4b9d-b402-69d662046a2f", "node_type": "4", "metadata": {"page_label": "4", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "03173012074ee8c8aa5aa5c45c0597ec570c256c427f4f26aedc9cbfb4878045"}, "3": {"node_id": "75f0b359-23e9-4147-81a2-c6dd02c273b6", "node_type": "1", "metadata": {"page_label": "4", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "71566724382835eac9a42fd96426f3c26e521e046a48c0eec634b0a3b085d720"}}, "hash": "24e1310dfcb3b63821d28da7b093b2f77d9a9e1cb0f11c9d1c7aae19f09e7131", "text": "1041-4347 (c) 2021 IEEE. Personal use is permitted, but republication/redistribution requires IEEE permission. See http://www.ieee.org/publications_standards/publications/rights/index.html for more information.This article has been accepted for publication in a future issue of this journal, but has not been fully edited. Content may change prior to final publication. Citation information: DOI 10.1109/TKDE.2021.3080293, IEEE\nTransactions on Knowledge and Data Engineering\n4\ncan be represented as HMMG in De\ufb01nition 1 naturally. The\noriginal data will be published after permission.\nIn detail, the graph consists two types of nodes: com-\npany and core member, the corporate and member nodes\nare associated with their own attributes, i.e., company has\n132 dimensional features, which cover basic information,\nlegal proceedings, business conditions, intellectual property\nand so on. Member has 5 dimensional features, which are\nextracted from personal information. And member nodes\nare concatenated with 45 dimensional embeddings using\nnode2vec [14]. Besides, the linkages constitutes three types:\n1)company-company linkages have two predicates, i.e., invest-\nment, acquisition. We present the representation of linkage\nas investment ratio, and acquisition is denoted by 1. 2)\ncompany-member linkages have nine predicates, for example,\nchief executive of\ufb01cer (CEO), chief operating of\ufb01cer (COO),\nchief technology of\ufb01cer (CTO), chief \ufb01nancial of\ufb01cer (CFO)\nand related derivative positions. We apply one-hot encoding\nto the company-member linkage. 3) member-member linkages\nhave one predicate, i.e., whether they belong to the same\ncompany. The main statistics of the dataset are shown in\nTable 1.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "75f0b359-23e9-4147-81a2-c6dd02c273b6": {"__data__": {"id_": "75f0b359-23e9-4147-81a2-c6dd02c273b6", "embedding": null, "metadata": {"page_label": "4", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "3263090c-90e3-4b9d-b402-69d662046a2f", "node_type": "4", "metadata": {"page_label": "4", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "03173012074ee8c8aa5aa5c45c0597ec570c256c427f4f26aedc9cbfb4878045"}, "2": {"node_id": "f503b09a-2136-4927-b4b4-4562aedd74db", "node_type": "1", "metadata": {"page_label": "4", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "24e1310dfcb3b63821d28da7b093b2f77d9a9e1cb0f11c9d1c7aae19f09e7131"}, "3": {"node_id": "b9f23634-baae-42a9-94a0-2c41e14a6959", "node_type": "1", "metadata": {"page_label": "4", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "9610d5ab293d8ef77282b43598950bcf78afc6c1bd2edf57616b46be18878dfe"}}, "hash": "71566724382835eac9a42fd96426f3c26e521e046a48c0eec634b0a3b085d720", "text": "The main statistics of the dataset are shown in\nTable 1.\nMoreover, the relative valuation level has 4 categories,\ni.e., 100 millions to 200 millions, 200 millions to 300 millions,\n300 millions to 400 millions, and 400 millions above, and\nthe business class (BC) contains 7 categories, i.e., software\nservice, scienti\ufb01c research and technology service, commer-\ncial service, e-retailing service, \ufb01nancial service, entertain-\nment service, and others. The visualizations of CRV and\nBC are shown in Figure 3, and the \ufb01gure reveals that\nthe instances distribute evenly among valuation categories,\nbut unbalanced among business categories. Therefore, the\nrelevance of instances within each business \ufb01eld needs to be\neffectively considered. Note that we conduct experiments\nwith the real corporate value after using logoperator.\n2.4 Heterogeneous Graph Neural Network\nIn this section, we present a generic de\ufb01nition of het-\nerogeneous graph neural networks (HGNN). HGNN is\nmainly based on neighbor aggregation architecture, which\nemphasizes on processing different types of nodes respec-\ntively [13, 15, 16]. In detail, HGNN usually samples different\ntypes of neighbors for each input node, then encodes them\nrespectively, and \ufb01nally aggregates different embeddings\ninto a uniform embedding. The key idea of HGNN is to\nprocess various types of neighbors for node vi, which can\nbe commonly expressed as [13, 15].", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "b9f23634-baae-42a9-94a0-2c41e14a6959": {"__data__": {"id_": "b9f23634-baae-42a9-94a0-2c41e14a6959", "embedding": null, "metadata": {"page_label": "4", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "3263090c-90e3-4b9d-b402-69d662046a2f", "node_type": "4", "metadata": {"page_label": "4", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "03173012074ee8c8aa5aa5c45c0597ec570c256c427f4f26aedc9cbfb4878045"}, "2": {"node_id": "75f0b359-23e9-4147-81a2-c6dd02c273b6", "node_type": "1", "metadata": {"page_label": "4", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "71566724382835eac9a42fd96426f3c26e521e046a48c0eec634b0a3b085d720"}, "3": {"node_id": "cc641909-3146-4abe-85a0-ec8f94a34287", "node_type": "1", "metadata": {"page_label": "4", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "cd09e3478febe75da8c8f405179c2767f096ec5c13bb8285c62251f648d63ca4"}}, "hash": "9610d5ab293d8ef77282b43598950bcf78afc6c1bd2edf57616b46be18878dfe", "text": "ft(vi) =1\n|Nt(vi)|\u2211\nv\u2032\u2208Nt(vi)\u2212\u2212\u2212\u2212\u2192LSTM{x(v\u2032)}\u2295\u2190\u2212\u2212\u2212\u2212LSTM{x(v\u2032)},(1)\nwheretdenotes the node type, Nt(\u00b7)is the neighbor set\nof input node,|Nt(\u00b7)| is the set size, x(\u00b7) represents the\nattribute of node, and \u2295denotes concatenation. The single\nLSTM can be formulated as:\nzi=\u03c3(Wzx(v\u2032) +Uzhi\u22121+bz),\ngi=\u03c3(Wgx(v\u2032) +Ughi\u22121+bg),\noi=\u03c3(Wox(v\u2032) +Uohi\u22121+bo),\n\u02c6ci=tanh(W cx(v\u2032) +Uchi\u22121+bc),\nci=gi\u2299ci\u22121+zi\u2299\u02c6ci,\nhi=tanh(c i)\u2299oiwhere hiis the hidden state of i\u2212th node,Wj,Uj,bjj\u2208\n{z,g,o,c}are learnable parameters, and zi,gi,oiare forget\ngate vector, input gate vector, and output gate vector of i\u2212th\nnode respectively. \u2299denoted element-wise product.\nHere LSTM module employs Bi-LSTM [17] to capture\ndeep feature interactions. The Bi-LSTM operates on an un-\nordered content set, which is inspired by previous work [18]\nfor aggregating unordered neighbors. In detail, the LSTM\nbased module \ufb01rst transform different neighbors with the\nsame type into a common embedding space, then employs\nthe Bi-LSTM to accumulate deep feature representations of\nall neighbors, and utilizes a mean pooling operator over\nall hidden states to obtain the general content embeddings.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "cc641909-3146-4abe-85a0-ec8f94a34287": {"__data__": {"id_": "cc641909-3146-4abe-85a0-ec8f94a34287", "embedding": null, "metadata": {"page_label": "4", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "3263090c-90e3-4b9d-b402-69d662046a2f", "node_type": "4", "metadata": {"page_label": "4", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "03173012074ee8c8aa5aa5c45c0597ec570c256c427f4f26aedc9cbfb4878045"}, "2": {"node_id": "b9f23634-baae-42a9-94a0-2c41e14a6959", "node_type": "1", "metadata": {"page_label": "4", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "9610d5ab293d8ef77282b43598950bcf78afc6c1bd2edf57616b46be18878dfe"}, "3": {"node_id": "cfd15aea-e645-44ec-bcf9-41c9d785616d", "node_type": "1", "metadata": {"page_label": "4", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "e11fd43e67192c1ef152165a8d046c2fe84613463fda4bd32bb289ed9ffd539d"}}, "hash": "cd09e3478febe75da8c8f405179c2767f096ec5c13bb8285c62251f648d63ca4", "text": "HGNN establishes corresponding LSTM models for dif-\nferent types of nodes and fuses them to obtain the \ufb01nal\nfeature embeddings. However, it is notable that Bi-LSTM\nin Equation 1 acts as a multi-instance learning operator,\nwhich only aggregates the information of heterogeneous\nneighbors, yet has not considered the linkages among neigh-\nbors. Therefore, Bi-LSTM may lose more information during\nfeature embedding.\n3 P ROPOSED METHOD\nThe usage of HGNN for CRV task mainly faces the follow-\ning challenges: 1) Nodes in heterogeneous graph connect to\ndifferent types of neighbors, and the number of their neigh-\nbors varies, for example, member nodes usually contains\nmore neighbors than company nodes. Thus, we need to\ndesign an effective neighbor sampling method to consider\nboth the number and type of sampling comprehensively.\n2) Heterogeneous neighbors contain different modal fea-\nture descriptions, and the linkages among homogeneous\nand heterogeneous neighbors are also inconsistent. There-\nfore, we need to design corresponding fusion networks for\nheterogeneous neighbors, and consider the linkages when\nlearning embeddings. 3) Different types of neighbors con-\ntribute differently to node embeddings in heterogeneous\ngraph, thus we need to adaptively learn the weights of\nheterogeneous nodes for \ufb01nal fusion.\nBased on the considerations above, we formally present\nHM2, which consists of there modules: 1) modal attribute\nencoding module, 2) multi-modal aggregation module, and\n3) valuation loss module.\n\u2022Modal Attribute Encoding Module: This module en-\ncodes each type of neighbors respectively after neigh-\nbor sampling, i.e., single modal attribute embedding.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "cfd15aea-e645-44ec-bcf9-41c9d785616d": {"__data__": {"id_": "cfd15aea-e645-44ec-bcf9-41c9d785616d", "embedding": null, "metadata": {"page_label": "4", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "3263090c-90e3-4b9d-b402-69d662046a2f", "node_type": "4", "metadata": {"page_label": "4", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "03173012074ee8c8aa5aa5c45c0597ec570c256c427f4f26aedc9cbfb4878045"}, "2": {"node_id": "cc641909-3146-4abe-85a0-ec8f94a34287", "node_type": "1", "metadata": {"page_label": "4", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "cd09e3478febe75da8c8f405179c2767f096ec5c13bb8285c62251f648d63ca4"}}, "hash": "e11fd43e67192c1ef152165a8d046c2fe84613463fda4bd32bb289ed9ffd539d", "text": "The key idea is to take both nodes\u2019 attributes and\ntheir relations into consideration for learning overall\nembedding;\n\u2022Multi-Modal Aggregation Module: This module adap-\ntively aggregates different types of neighbors and input\nnode itself, i.e., the multi-modal embedding weighted\naggregation. The key idea is to learn adaptive weights\nfor each modal information;\n\u2022Valuation Loss Module: This module considers the\nloss of valuation prediction, while incorporates the\nsimilarities of different corporate node embeddings in\nthe same business category, which aims to regularize\nthe consistence.\nAuthorized licensed use limited to: Univ of Calif Santa Barbara. Downloaded on June 20,2021 at 08:36:01 UTC from IEEE Xplore. Restrictions apply.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "5973208a-25fa-4746-8bea-376b878de666": {"__data__": {"id_": "5973208a-25fa-4746-8bea-376b878de666", "embedding": null, "metadata": {"page_label": "5", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "23ca55f5-7668-4c89-a498-8b346fb59b6b", "node_type": "4", "metadata": {"page_label": "5", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "25a000acde2316443d608565399296dc71d4ccafe753fc2303632501145e19f2"}, "3": {"node_id": "ef9507bd-7173-4e94-8300-74eb90b374b3", "node_type": "1", "metadata": {"page_label": "5", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "29d3fb9cb703e19c92142732908cb048af875dde00995ded4ae5bb12e1656378"}}, "hash": "6c02e7e04e388f89ca606bc185649c73bbee934f6535f03888b58faf7f04f54a", "text": "1041-4347 (c) 2021 IEEE. Personal use is permitted, but republication/redistribution requires IEEE permission. See http://www.ieee.org/publications_standards/publications/rights/index.html for more information.This article has been accepted for publication in a future issue of this journal, but has not been fully edited. Content may change prior to final publication. Citation information: DOI 10.1109/TKDE.2021.3080293, IEEE\nTransactions on Knowledge and Data Engineering\n5\nX\u2026\n\u2026\n\u2026\nXPrediction LossNeighbor Sampling Modal Attribute Encoding Multi -Modal Aggregation\n\ud835\udefd \u2026\u2026\u2026\\\u2a02\u2026Multi -Head \nAttentionMulti -Head \nAttention\u2026\u2026\n\u2026Multi -Head \nAttentionMulti -Head \nAttention\u2026\nFig. 4. The overall architecture of HM2. From left to right, HM2\ufb01rst samples \ufb01x sized neighbors, which include heterogeneous types. Then it encodes\neach modal attributes via deep network with multi-head attention mechanism, and aggregates multi-modal embedding through adaptive attention.\nFinally, it develops the loss via corporate valuation with structural triplet regularization.\nTABLE 2\nDescription of symbols.\nSym.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "ef9507bd-7173-4e94-8300-74eb90b374b3": {"__data__": {"id_": "ef9507bd-7173-4e94-8300-74eb90b374b3", "embedding": null, "metadata": {"page_label": "5", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "23ca55f5-7668-4c89-a498-8b346fb59b6b", "node_type": "4", "metadata": {"page_label": "5", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "25a000acde2316443d608565399296dc71d4ccafe753fc2303632501145e19f2"}, "2": {"node_id": "5973208a-25fa-4746-8bea-376b878de666", "node_type": "1", "metadata": {"page_label": "5", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "6c02e7e04e388f89ca606bc185649c73bbee934f6535f03888b58faf7f04f54a"}, "3": {"node_id": "a0415ba1-08ac-4705-83e9-b34878db7c1b", "node_type": "1", "metadata": {"page_label": "5", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "1ece6ffec5d12b6c35f00c8f3a404bcde655f8b045a08d743f0c68ae43b6ec37"}}, "hash": "29d3fb9cb703e19c92142732908cb048af875dde00995ded4ae5bb12e1656378", "text": "TABLE 2\nDescription of symbols.\nSym. De\ufb01nition\nV set of nodes with different types, i.e., v1(company),\nv2(member)\nE set of edges with different types\nCVTtypes of node: company and member\nCEMtypes of linkage: company-company, company-member\nmember-member\nx attribute of each node, i.e., x1\u2208Rd1(company),\nx2\u2208Rd2(member)\ny ground truth of each company node, i.e., yb(business\ncategory), yp(valuation level)\nf1 modal attribute encoding module\nql\njthe embedding of j\u2212th node in l\u2212th layer\n\u03b1l,h\ni,jthe learnable weight between nodes iandjinl\u2212th layer\nwithh\u2212th head\nHl number of embedding aggregation head in l\u2212th layer\npi,j embedding of the directed edge between nodes iandj\nati,j predicate of edge between nodes iandj\nf2 multi-modal aggregating module\nAn overview of HM2is shown in Figure 4. Speci\ufb01cally,\nfor the input company node (i.e., dotted blue node), we\nwill \ufb01rst sample heterogeneous neighbors (blue and yel-\nlow nodes) of the node. Secondly, we develop separate\ndeep feature learning networks to aggregate information\nof neighbors, which combines multi-head attention mech-\nanism considering various types of linkages in the learning\nprocess. Finally, we comprehensively consider the embed-\ndings of input nodes and their neighbors using self-attention\nmechanism to acquire \ufb01nal embeddings, which are used for\npredicting the company\u2019s valuation. Table 2 provides the\nde\ufb01nition of symbols used in this paper.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "a0415ba1-08ac-4705-83e9-b34878db7c1b": {"__data__": {"id_": "a0415ba1-08ac-4705-83e9-b34878db7c1b", "embedding": null, "metadata": {"page_label": "5", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "23ca55f5-7668-4c89-a498-8b346fb59b6b", "node_type": "4", "metadata": {"page_label": "5", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "25a000acde2316443d608565399296dc71d4ccafe753fc2303632501145e19f2"}, "2": {"node_id": "ef9507bd-7173-4e94-8300-74eb90b374b3", "node_type": "1", "metadata": {"page_label": "5", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "29d3fb9cb703e19c92142732908cb048af875dde00995ded4ae5bb12e1656378"}, "3": {"node_id": "e0a5c38c-26f2-4cb5-a622-2d5c48031963", "node_type": "1", "metadata": {"page_label": "5", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "f7fe375c3075b31f905f0a007ae373f208fc1911973946ae0f3b6b2e5b6b46d3"}}, "hash": "1ece6ffec5d12b6c35f00c8f3a404bcde655f8b045a08d743f0c68ae43b6ec37", "text": "Table 2 provides the\nde\ufb01nition of symbols used in this paper.\n3.1 Modal Attribute Encoding Module\nThe most critical component of graph neural networks\n(GNNs) [18] is to aggregate attributes of neighbors for\nrepresenting input node. However, heterogeneous graphshave multiple types of nodes, rather than homogeneous\ntype considered in previous methods. Therefore, the em-\nbedding of heterogeneous graph faces two challenges: a)\nsample heterogeneous neighbors for each node in HMMG;\nb) construct node encoder for each type of node in HMMG.\nNeighbor Construction\nThe neighbor construction here aims to provide more\nuseful structural auxiliary information for input node, so\nthat learns more discriminative node embedding. The com-\nmon method for neighbor sampling is to sample direct\nneighbors of each node, i.e., \ufb01rst-order neighbors. Neverthe-\nless, \ufb01rst-order neighbors have several limitations as men-\ntioned in [19]: 1) Be susceptible to interference. Nodes have\nlimited \ufb01rst-order neighbors, thus noise neighbors with\nincorrect relations or attributes may impair the embedding;\n2) Information loss. The effects of non-direct neighbors are\nlost by aggregating attributes of direct neighbors only, and\nlimited neighbors may lead to insuf\ufb01cient embedding, for\nexample, node A has \ufb01ve direct neighbors while node B only\nhas three; and 3) Aggregation dif\ufb01culty. Sampling direct\nneighbors is unsuitable for aggregating heterogeneous in-\nformation that contains different modal features. Therefore,\nsampling only direct neighbors may play a negative role.\nHeterogeneous neighbors require different transformations\nto deal with various feature types and dimensions.\nTo solve this problem, inspired from [13, 14], HM2uti-\nlizes the random walk sampling for each node.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "e0a5c38c-26f2-4cb5-a622-2d5c48031963": {"__data__": {"id_": "e0a5c38c-26f2-4cb5-a622-2d5c48031963", "embedding": null, "metadata": {"page_label": "5", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "23ca55f5-7668-4c89-a498-8b346fb59b6b", "node_type": "4", "metadata": {"page_label": "5", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "25a000acde2316443d608565399296dc71d4ccafe753fc2303632501145e19f2"}, "2": {"node_id": "a0415ba1-08ac-4705-83e9-b34878db7c1b", "node_type": "1", "metadata": {"page_label": "5", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "1ece6ffec5d12b6c35f00c8f3a404bcde655f8b045a08d743f0c68ae43b6ec37"}}, "hash": "f7fe375c3075b31f905f0a007ae373f208fc1911973946ae0f3b6b2e5b6b46d3", "text": "In detail, it\ncontains two steps:\n\u2022Step 1: Sampling \ufb01xed size lof neighbors with random\nwalk sampling. In detail, the sampling process starts\nfrom input node v1\ni\u2208V(superscript 1 represents\nthe company node), and iteratively random walks to\nneighbors of current node or returns to v1\niwith a\nprobability. The process ends until collecting lnodes,\ni.e.,N(v1\ni),|N(v1\ni)|=l;\n\u2022Step 2: Grouping different types of neighbors. For each\ntype, it selects top kt(tis the node type) nodes from\nN(v1\ni)according to frequency.\nWith the procedure above, HM2can collect all types of\nAuthorized licensed use limited to: Univ of Calif Santa Barbara. Downloaded on June 20,2021 at 08:36:01 UTC from IEEE Xplore. Restrictions apply.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "14f37a90-740e-4e46-bfb7-83c4f2a3a9d1": {"__data__": {"id_": "14f37a90-740e-4e46-bfb7-83c4f2a3a9d1", "embedding": null, "metadata": {"page_label": "6", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "6a334eef-b779-483c-b985-4dc24e4a4a40", "node_type": "4", "metadata": {"page_label": "6", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "21cc94a20e40b4e0507178618ddae4bf7e7561cbded605a5704620d292c463da"}, "3": {"node_id": "e48ccbf0-faff-42de-b267-937dfdc4c764", "node_type": "1", "metadata": {"page_label": "6", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "d42b05654c289d0a74bf17516c35f707aa81332320e4b1d0dc36f8b2ab0db24c"}}, "hash": "445144bbf3f448b5017cf083b0c9c5ddf9bd30f5e3a358f46c957dfd4a9adb66", "text": "1041-4347 (c) 2021 IEEE. Personal use is permitted, but republication/redistribution requires IEEE permission. See http://www.ieee.org/publications_standards/publications/rights/index.html for more information.This article has been accepted for publication in a future issue of this journal, but has not been fully edited. Content may change prior to final publication. Citation information: DOI 10.1109/TKDE.2021.3080293, IEEE\nTransactions on Knowledge and Data Engineering\n6\nneighbors for each node, and the most frequently visited\nneighbors are selected. Notably, the number of each type of\nnode inN(v1\ni)is constrained, which ensures the balance\nof heterogeneous nodes. Note that HM2focuses on the\nembedding learning of corporate nodes, but the member-\nmember linkage can help to collect high-order member\nneighbors for each node when constructing neighbor.\nModal Attribute Encoding\nThe majority of previous GNN models focus on homo-\ngeneous graphs [20, 21, 22], which ignore the impact of\nnode type. However in HMMG, different types of neigh-\nbors contribute differently to node embeddings. For exam-\nple, mature companies have stronger core resources, thus\nthe attributes of corporate nodes have a greater impact,\nwhereas the core members have a relatively large propor-\ntion of impact in several other companies for valuation.\nOn the other hand, different types of nodes have various\ndimensional attributes, which contain inconsistent physical\nmeanings. Therefore, it is unreasonable to directly aggregate\nheterogeneous neighbors as traditional GNN models. In\nother words, heterogeneous multi-modal neighbors require\ndifferent embedding transformations. To solve this prob-\nlem, [13, 15, 16] attempt to handle heterogeneous graph\nembedding with novel deep graph neural networks, in\nwhich heterogeneous multi-modal neighbors are encoded\nseparately, and aggregated for \ufb01nal embedding.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "e48ccbf0-faff-42de-b267-937dfdc4c764": {"__data__": {"id_": "e48ccbf0-faff-42de-b267-937dfdc4c764", "embedding": null, "metadata": {"page_label": "6", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "6a334eef-b779-483c-b985-4dc24e4a4a40", "node_type": "4", "metadata": {"page_label": "6", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "21cc94a20e40b4e0507178618ddae4bf7e7561cbded605a5704620d292c463da"}, "2": {"node_id": "14f37a90-740e-4e46-bfb7-83c4f2a3a9d1", "node_type": "1", "metadata": {"page_label": "6", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "445144bbf3f448b5017cf083b0c9c5ddf9bd30f5e3a358f46c957dfd4a9adb66"}, "3": {"node_id": "d1ce3399-b5f0-4589-8650-afe723450e88", "node_type": "1", "metadata": {"page_label": "6", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "3bbe42f899cd42a3b36fed1f0cd1f5959f8ab62aab004c613c81525289e36705"}}, "hash": "d42b05654c289d0a74bf17516c35f707aa81332320e4b1d0dc36f8b2ab0db24c", "text": "However,\nmost of these methods only encode heterogeneous neigh-\nbors with multi-instance based process, without considering\nthe relationships among nodes. But the correlations play an\nimportant role in traditional GNNs, i.e., a weighted metric\nin neighbor aggregation.\nTo model the relationships among neighboring nodes,\ninspired by recent work of attention mechanism [23], we\npropose a linkage-aware model f1, rather than directly em-\nbedding aggregation. Speci\ufb01cally, f1considers two factors:\n1) The relationships among nodes. Different relationships\nplay various roles in embedding, for example, investment\nand acquisition represent different af\ufb01liations between two\ncompanies, and acquisition indicates stronger relation; and\n2) The hierarchical embedding. Direct and non-direct neigh-\nbors have different impacts according to feature propaga-\ntion process mechanism [24], i.e., direct neighboring nodes\nplay relatively more important roles. In summary, f1can\naggregate homogeneous neighboring attributes, considering\nthe linkages among nodes comprehensively. Without any\nloss of generality, different types of nodes can have similar\nmodal attribute encoding modules, i.e., corporate and mem-\nber nodes have similar encoding structures except various\ndimensional input.\nTherefore, as shown in the second part of Figure 4, with\nsampled neighbors, t\u2212th type of neighbors of v1\ni(company)\nare denoted as Nt(v1\ni). We refer to the self-head attention\nmechanism, which performs embedding aggregation and\nattention computation simultaneously.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "d1ce3399-b5f0-4589-8650-afe723450e88": {"__data__": {"id_": "d1ce3399-b5f0-4589-8650-afe723450e88", "embedding": null, "metadata": {"page_label": "6", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "6a334eef-b779-483c-b985-4dc24e4a4a40", "node_type": "4", "metadata": {"page_label": "6", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "21cc94a20e40b4e0507178618ddae4bf7e7561cbded605a5704620d292c463da"}, "2": {"node_id": "e48ccbf0-faff-42de-b267-937dfdc4c764", "node_type": "1", "metadata": {"page_label": "6", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "d42b05654c289d0a74bf17516c35f707aa81332320e4b1d0dc36f8b2ab0db24c"}, "3": {"node_id": "c6a8b18c-7194-4e38-bf40-84052e54e7e5", "node_type": "1", "metadata": {"page_label": "6", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "c38218ed217586747d53f23357e6513dfd863be98fec16c00716dda1f0909852"}}, "hash": "3bbe42f899cd42a3b36fed1f0cd1f5959f8ab62aab004c613c81525289e36705", "text": "Formally, the self-\nhead attention aggregation can be formulated as:\nql\nk=\u2211\nj\u2208Nt(i)\u222a{i}\u03b1l\nk,jql\u22121\nj, (2)\nwherel/kdenotes hidden layer index (l = 1,2,\u00b7\u00b7\u00b7,L) and\nnode index (k\u2208Nt(i)\u222a{i}),\u03b1l\nk,jis a learnable weight\nbetween nodes kandj.ql\u22121\nj denotes the embedding ofnodejofl\u22121-th layer output, where q0\nj= \u03a6(x j)is the\ntransformed representations in common space from raw\nattribute, i.e., q0\u2208Rd.\u03b1l\nk,jacts as self-attention operator,\nwhich is a single layer forward neural network, and can be\nformalized as:\n\u03b1l\nk,j=exp(\n(\u03c9\u22a4\nl(\u03a8(p k,j)\u2225atk,j)[ql\u22121\nk\u2299ql\u22121\nj]))\n\u2211\nn\u2208N t(i)\u222a{i}exp(\n(\u03c9\u22a4\nl(\u03a8(p k,n)\u2225atk,n)[ql\u22121\nk\u2299ql\u22121\nn])),\n(3)\nwhere\u03c9\u22a4\nlrepresents the weight matrix for l\u2212th layer, and\u2299\ndenotes the vector point multiplication. pk,j\u2208Rdpdenotes\nthe one-hot representation of directed edge between nodes\nkandj, and \u03a8(\u00b7) denotes mapping from raw linkage\nrepresentation to its embedding. atk,j\u2208Rrepresents the\nlink predicate, i.e., atk,jis the investment ratio for company-\ncompany linkages, and atk,j= 1 for company-member and\nmember-member linkages.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "c6a8b18c-7194-4e38-bf40-84052e54e7e5": {"__data__": {"id_": "c6a8b18c-7194-4e38-bf40-84052e54e7e5", "embedding": null, "metadata": {"page_label": "6", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "6a334eef-b779-483c-b985-4dc24e4a4a40", "node_type": "4", "metadata": {"page_label": "6", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "21cc94a20e40b4e0507178618ddae4bf7e7561cbded605a5704620d292c463da"}, "2": {"node_id": "d1ce3399-b5f0-4589-8650-afe723450e88", "node_type": "1", "metadata": {"page_label": "6", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "3bbe42f899cd42a3b36fed1f0cd1f5959f8ab62aab004c613c81525289e36705"}, "3": {"node_id": "bf82ac3e-224b-429d-b939-e1097751eb94", "node_type": "1", "metadata": {"page_label": "6", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "7b8574c8167e0b4798acf05b91d6c90d38c409534db4ca28d66a2129b4b31761"}}, "hash": "c38218ed217586747d53f23357e6513dfd863be98fec16c00716dda1f0909852", "text": "\u2225denotes concatenation opera-\ntor, andpk,j=atk,j=\u03beif there exists no direct linkage\nbetween nodes kandj.\u03beis always with a small value (i.e.,\n10\u22123in experiment).\nWe can also extend f1to a more general architecture, in\nwhich each layer contains a variable number of attribute\naggregation head. And multiple heads can promote the\nperformance and optimization stability. Therefore, Equation\n2 and Equation 3 can be reformulated as:\nql\nk=1\n|Hl|\u2211\nh\u2211\nj\u2208Nt(i)\u222a{i}\u03b1l,h\nk,jql\u22121\nj,\n\u03b1l,h\nk,j=exp(\n(\u03c9\u22a4\nl,h(\u03a8(p k,j)\u2225atk,j)[ql\u22121\nk\u2299ql\u22121\nj]))\n\u2211\nn\u2208N t(i)\u222a{i}exp(\n(\u03c9\u22a4\nl,h(\u03a8(p k,n)\u2225atk,n)[ql\u22121\nk\u2299ql\u22121\nn])),\n(4)\nwherehdenotes the h\u2212th head, and Hlis the number of\nheads inl\u2212th layer. Consequently, we can formalize the \ufb01nal\naggregated embedding output of t\u2212th type of neighboring\nnodes as:\nft\n1(v1\ni) =1\n|Nt(i)|\u2211\nk\u2208Nt(i)qL\nk.(5)\nThus,\u03b1l\nk,jcan well measure the relationships between input\nnodes and different types of neighbors, while considering\nthe impact of direct and non-direct neighbors. f1computes\nthe aggregated embeddings by performing a weighted ag-\ngregation of intermediate, and the learnable weight \u03b1can\neffectively overcome the two problems mentioned above.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "bf82ac3e-224b-429d-b939-e1097751eb94": {"__data__": {"id_": "bf82ac3e-224b-429d-b939-e1097751eb94", "embedding": null, "metadata": {"page_label": "6", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "6a334eef-b779-483c-b985-4dc24e4a4a40", "node_type": "4", "metadata": {"page_label": "6", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "21cc94a20e40b4e0507178618ddae4bf7e7561cbded605a5704620d292c463da"}, "2": {"node_id": "c6a8b18c-7194-4e38-bf40-84052e54e7e5", "node_type": "1", "metadata": {"page_label": "6", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "c38218ed217586747d53f23357e6513dfd863be98fec16c00716dda1f0909852"}}, "hash": "7b8574c8167e0b4798acf05b91d6c90d38c409534db4ca28d66a2129b4b31761", "text": "3.2 Multi-Modal Aggregating Module\nIn this section, we aim to aggregate different modal em-\nbeddings for \ufb01nal representation. As shown in the third\npart of Figure 4, different from concatenating multi-modal\nembedding directly [25], we turn to design a novel adaptive\nattention based network to capture more discriminative\nfeature capability. Formally, the \ufb01nal representation of v1\ni\ncan be computed as:\nf2(v1\ni) =\u2211\nj\u2208{T,i}\u03b2j\u02c6fj\n1(v1\ni),(6)\nAuthorized licensed use limited to: Univ of Calif Santa Barbara. Downloaded on June 20,2021 at 08:36:01 UTC from IEEE Xplore. Restrictions apply.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "30c02805-d52d-4048-8ddf-663b9e4d088c": {"__data__": {"id_": "30c02805-d52d-4048-8ddf-663b9e4d088c", "embedding": null, "metadata": {"page_label": "7", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "30c34778-fd84-4247-a8e8-951e5f41fd1b", "node_type": "4", "metadata": {"page_label": "7", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "4a573f135fd6e0a296538270df39d45736b84875c1602c0db0139321f7e539ec"}, "3": {"node_id": "a45cc988-0ef5-42ce-a33b-73c13e09396e", "node_type": "1", "metadata": {"page_label": "7", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "b98d367a32e513826ef13cf5772d9913aad4856685b0e4dbcb17fc1fca8f575f"}}, "hash": "08409887034256735afb66265b20014a2c553d2440448a499f2afead147a4179", "text": "1041-4347 (c) 2021 IEEE. Personal use is permitted, but republication/redistribution requires IEEE permission. See http://www.ieee.org/publications_standards/publications/rights/index.html for more information.This article has been accepted for publication in a future issue of this journal, but has not been fully edited. Content may change prior to final publication. Citation information: DOI 10.1109/TKDE.2021.3080293, IEEE\nTransactions on Knowledge and Data Engineering\n7\nwhere\u03b2jis the adaptive weights of each modal embedding,\nwhich aim to discover the relationships among different\nmodalities. \u03b2jcan be formulated as:\n\u03b2j=exp{LeakyReLU (u\u22a4\u02c6fj\n1)}\n\u2211\nkexp{LeakyReLU (uT\u02c6fk\n1)},\nwhere LeakyReLU denotes leaky version of a Recti\ufb01ed\nLinear Unit, and u\u2208R2d\u00d71is the parameter. \u02c6fj\n1\u2208R2d\ndenotes the concatenated embeddings:\n\u02c6fj\n1={\nfj\n1\u2225\u03a6(x1\ni), when j\u0338=i,\n\u03a6(x1\ni)\u2225\u03a6(x1\ni),when j =i.\nwhere \u03a6(x1\ni)is the mapping introduced in Section 3.1, and\n\u2225denotes concatenation operator.\n3.3 Model Training\nTo perform corporate relative valuation for input node, we\ntrain HM2from two aspects: 1) corporate relative valuation\nloss, and 2) heterogeneous graph representation structural\nloss. This constructs a multi-task learning approach, which\ncan learn more discriminative representation.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "a45cc988-0ef5-42ce-a33b-73c13e09396e": {"__data__": {"id_": "a45cc988-0ef5-42ce-a33b-73c13e09396e", "embedding": null, "metadata": {"page_label": "7", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "30c34778-fd84-4247-a8e8-951e5f41fd1b", "node_type": "4", "metadata": {"page_label": "7", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "4a573f135fd6e0a296538270df39d45736b84875c1602c0db0139321f7e539ec"}, "2": {"node_id": "30c02805-d52d-4048-8ddf-663b9e4d088c", "node_type": "1", "metadata": {"page_label": "7", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "08409887034256735afb66265b20014a2c553d2440448a499f2afead147a4179"}, "3": {"node_id": "4453f417-2325-426f-b119-22f38fcafaed", "node_type": "1", "metadata": {"page_label": "7", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "a6be382651453303d4cfa2d0e26e2abaa22e6430dc1900ff3609de8bc561115c"}}, "hash": "b98d367a32e513826ef13cf5772d9913aad4856685b0e4dbcb17fc1fca8f575f", "text": "This constructs a multi-task learning approach, which\ncan learn more discriminative representation. In result, the\noverall loss function is:\n\u2113=\u2113m+\u03bb\u2113b,\n\u2113m=\u2212\u2211\ni\u2208V1\u2211\nj1{yp\ni=j}logexp(\u03b8\u22a4\njf2(vi))\u2211\nkexp(\u03b8\u22a4\nkf2(vi)),\n\u2113b=\u2211\n\u2208Tmax{0,\u00b5 +d(f 2(vi),f2(vj))\u2212d(f 2(vi),f2(vk))},\ns.t. yb\ni=yb\nj\u0338=yb\nk, yp\ni=yp\nj=yp\nk.\n(7)\nwhere\u2113mdenotes the corporate valuation loss, \u03b8is the\nfully connected layer to the prediction layer, \u00b5represents\nthe margin de\ufb01ned manually, and d(\u00b7) is the distance mea-\nsurement function, which measures the distance between\ntwo node embeddings (we utilize the euclidean distance\nhere for simplicity). Note that \u2113bre\ufb02ects the embedding ef-\nfect between competitors considered by traditional domain\nexperts, thus further regularizes the embedding structure\nin the same business category. Inspired by [26], we can\ncorporate the random walk in graph to generate triplets\n< i,j,k >\u2208T . In detail, we \ufb01rst generate a set of random\nwalks in the HMMG. Then we collect node jwith same\nthe same business category yb\niand valuation level yp\nifor\nnodeiin the walk sequence. Besides, we sample node\nkwith the same valuation level yp\nibut different business\ncategoryyb\nifor nodei.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "4453f417-2325-426f-b119-22f38fcafaed": {"__data__": {"id_": "4453f417-2325-426f-b119-22f38fcafaed", "embedding": null, "metadata": {"page_label": "7", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "30c34778-fd84-4247-a8e8-951e5f41fd1b", "node_type": "4", "metadata": {"page_label": "7", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "4a573f135fd6e0a296538270df39d45736b84875c1602c0db0139321f7e539ec"}, "2": {"node_id": "a45cc988-0ef5-42ce-a33b-73c13e09396e", "node_type": "1", "metadata": {"page_label": "7", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "b98d367a32e513826ef13cf5772d9913aad4856685b0e4dbcb17fc1fca8f575f"}, "3": {"node_id": "3b75c8c1-58b1-4b1e-a6f8-d371dd394311", "node_type": "1", "metadata": {"page_label": "7", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "5894fb3a8b3f8ff1b67d1ba6863180330bd38fd8e70445c7f3a2861a40673699"}}, "hash": "a6be382651453303d4cfa2d0e26e2abaa22e6430dc1900ff3609de8bc561115c", "text": "For optimizing HM2, we \ufb01rst sample\na mini-batch of triplets at each iteration, and calculate the\nobjective according to Equation 7. The model parameters\nare updated via the Adam optimizer [27]. And we utilize\nextra 10% randomly sampled data as early stop for better\ngeneralization. With the learned model, we can conduct\ninductive corporate relative valuation.\nThe procedure of training HM2model can be summa-\nrized in algorithm 1. Line 4 and line 5 correspond to our\nneighbour sampling module. Line 6 and Line 7 calculate\nthe proposed modal attribute encoding and the multi-modal\naggregating results respectively. Line 8 and line 9 calculate\nthe classi\ufb01cation loss and triplet margin loss respectively.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "3b75c8c1-58b1-4b1e-a6f8-d371dd394311": {"__data__": {"id_": "3b75c8c1-58b1-4b1e-a6f8-d371dd394311", "embedding": null, "metadata": {"page_label": "7", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "30c34778-fd84-4247-a8e8-951e5f41fd1b", "node_type": "4", "metadata": {"page_label": "7", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "4a573f135fd6e0a296538270df39d45736b84875c1602c0db0139321f7e539ec"}, "2": {"node_id": "4453f417-2325-426f-b119-22f38fcafaed", "node_type": "1", "metadata": {"page_label": "7", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "a6be382651453303d4cfa2d0e26e2abaa22e6430dc1900ff3609de8bc561115c"}, "3": {"node_id": "b7603c42-6086-41f6-95ab-fceff7e81d7c", "node_type": "1", "metadata": {"page_label": "7", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "e233a82e4a3a2abc95aac32437a367e01d882ef210ce1d46b944214b2bac1b2b"}}, "hash": "5894fb3a8b3f8ff1b67d1ba6863180330bd38fd8e70445c7f3a2861a40673699", "text": "In\neach epoch, HM2samples mini-batches of company nodes\nv1and update model parameters using gradient descent.Algorithm 1 The pseudo code of HM2\nInput:\n\u2022Dataset: HMMG (V,E,C V,CE), attribute x, ground\ntruth y;\n\u2022Parameter:\u03bb;\n\u2022maxIter:T, learning rate: lr\nOutput:\n\u2022Classi\ufb01ers:F\n1:Initialize HM2model parameters \u0398;\n2:while stop condition is not triggered do\n3: formini-batch of company node v1do\n4: Gather neighbour nodes nfor each node in batch\nvia random walk;\n5: Select neighbour company and member nodes with\nhighest frequency, N1(v1)andN2(v1)respectively;\n6: Calculateft\n1(v1\ni)according to Equation 5;\n7: Calculatef2(v1\ni)according to Equation 6;\n8: Calculate\u2113m;\n9: Sample tripletsTand calculate \u2113b;\n10: Calculate loss \u2113=\u2113m+\u03bb\u2113baccording to Equation\n7;\n11: Update model parameters using gradient descent;\n12: end for\n13:end while\n4 E XPERIMENTS\nIn this section, we develop related experiments to validate\nthe effectiveness of our proposed method.\n\u2022Q1: How do HM2and state-of-the-art comparison\nmethods perform on real-world dataset? For example,\nthe prediction performances in De\ufb01nition 1.\n\u2022Q2:How do the components of HM2affect the estima-\ntion? For example, modal attribute encoding.\n\u2022Q3:How do various hyper-parameters in the approach\naffect performance? For example, the size of sampled\nneighbor and the embedding size.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "b7603c42-6086-41f6-95ab-fceff7e81d7c": {"__data__": {"id_": "b7603c42-6086-41f6-95ab-fceff7e81d7c", "embedding": null, "metadata": {"page_label": "7", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "30c34778-fd84-4247-a8e8-951e5f41fd1b", "node_type": "4", "metadata": {"page_label": "7", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "4a573f135fd6e0a296538270df39d45736b84875c1602c0db0139321f7e539ec"}, "2": {"node_id": "3b75c8c1-58b1-4b1e-a6f8-d371dd394311", "node_type": "1", "metadata": {"page_label": "7", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "5894fb3a8b3f8ff1b67d1ba6863180330bd38fd8e70445c7f3a2861a40673699"}}, "hash": "e233a82e4a3a2abc95aac32437a367e01d882ef210ce1d46b944214b2bac1b2b", "text": "For example, the size of sampled\nneighbor and the embedding size.\nWe compare our HM2model with three traditional\nmethods: SVM, MLP , KNN, and seven state-of-the-art graph\nmethods: HetGNN [19], m2vec [28], ASNE [29], Graph-\nSAGE (SAGE for simplicity) [18], GAT [25], HAN [30], and\nGATNE [31]. All the methods are given the best perfor-\nmance as [19]. The details are:\n\u2022SVM: A linear method that considers single modal\nfeatures as input, in detail, we develop the attributes\nof corporate node as the input;\n\u2022MLP: A fully connected network that considers single\nmodal features as input, in detail, we develop the\nattributes of corporate node as the input;\n\u2022HetGNN: A heterogeneous graph neural network\nmodel that constructs two modules to aggregate feature\ninformation of heterogeneous nodes respectively, in\nwhich the \ufb01rst module learns embeddings of heteroge-\nneous contents with the LSTM module, and the second\nmodule aggregates embeddings of different neighbor-\ning types for obtaining the \ufb01nal node embedding [13];\n\u2022m2vec: A heterogeneous graph model that leverages\nmeta-path based random walks in heterogeneous net-\nworks to generate heterogeneous neighborhoods, then\nAuthorized licensed use limited to: Univ of Calif Santa Barbara. Downloaded on June 20,2021 at 08:36:01 UTC from IEEE Xplore. Restrictions apply.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "7343c44f-ee68-4b65-88aa-cd75f706a7e6": {"__data__": {"id_": "7343c44f-ee68-4b65-88aa-cd75f706a7e6", "embedding": null, "metadata": {"page_label": "8", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "e0d7717d-cfe0-4212-9290-dfa3afea7aae", "node_type": "4", "metadata": {"page_label": "8", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "9a57af184d633ae88d28a9adf02309b9bec7408e52023bc5e2801fb60d74ef6e"}, "3": {"node_id": "bad287bc-422f-4d8c-93b2-9c2d64e8e462", "node_type": "1", "metadata": {"page_label": "8", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "27a88a0c05fc44a9f06f7e4f9fee823623b521c17a026caba9c47e72e4739f06"}}, "hash": "d4ed1436d9eb53c2b63a75b5f20e4cf54fff666946a22ef7351b5625f648dc7f", "text": "1041-4347 (c) 2021 IEEE. Personal use is permitted, but republication/redistribution requires IEEE permission. See http://www.ieee.org/publications_standards/publications/rights/index.html for more information.This article has been accepted for publication in a future issue of this journal, but has not been fully edited. Content may change prior to final publication. Citation information: DOI 10.1109/TKDE.2021.3080293, IEEE\nTransactions on Knowledge and Data Engineering\n8\nTABLE 3\nCorporate relative valuation prediction results (y ), percentage denotes training data ratio. The best results are highlighted in bold.\nMetric SVM MLP KNN HetGNN GAT SAGE ASNE m2vec HAN GATNE FAME HM2\nAccuracy10% .273\u00b1.002 .247\u00b1.014 .326\u00b1.012 .307\u00b1.004 .313\u00b1.011 .304\u00b1.012 .289\u00b1.011 .299\u00b1.008 .310\u00b1.011 .304\u00b1.004 .322\u00b1.014 .346\u00b1.008\n30% .305\u00b1.003 .305\u00b1.047 .333\u00b1.005 .353\u00b1.009 .346\u00b1.008 .364\u00b1.011 .359\u00b1.002 .342\u00b1.010 .347\u00b1.011 .330\u00b1.004 .349\u00b1.002 .388\u00b1.004\n50% .336\u00b1.002 .340\u00b1.046 .347\u00b1.009 .377\u00b1.005 .378\u00b1.003 .395\u00b1.007 .387\u00b1.007 .360\u00b1.002 .393\u00b1.009 .346\u00b1.002 .380\u00b1.005 .410\u00b1.008\n70% .367\u00b1.002 .374\u00b1.029 .349\u00b1.010 .393\u00b1.008 .407\u00b1.009 .413\u00b1.010 .399\u00b1.003 .388\u00b1.016 .403\u00b1.009 .357\u00b1.007 .390\u00b1.010 .446\u00b1.007\nPrecision10% .286\u00b1.003 .172\u00b1.097 .331\u00b1.010 .", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "bad287bc-422f-4d8c-93b2-9c2d64e8e462": {"__data__": {"id_": "bad287bc-422f-4d8c-93b2-9c2d64e8e462", "embedding": null, "metadata": {"page_label": "8", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "e0d7717d-cfe0-4212-9290-dfa3afea7aae", "node_type": "4", "metadata": {"page_label": "8", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "9a57af184d633ae88d28a9adf02309b9bec7408e52023bc5e2801fb60d74ef6e"}, "2": {"node_id": "7343c44f-ee68-4b65-88aa-cd75f706a7e6", "node_type": "1", "metadata": {"page_label": "8", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "d4ed1436d9eb53c2b63a75b5f20e4cf54fff666946a22ef7351b5625f648dc7f"}, "3": {"node_id": "7a3a29a2-2efe-42b9-83ef-68d88625774d", "node_type": "1", "metadata": {"page_label": "8", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "1042c875d48c52781ee5e9756be8c22d2b4a77c189140b5f5c3d9324d14a3812"}}, "hash": "27a88a0c05fc44a9f06f7e4f9fee823623b521c17a026caba9c47e72e4739f06", "text": "286\u00b1.003 .172\u00b1.097 .331\u00b1.010 .301\u00b1.009 .319\u00b1.313 .305\u00b1.013 .290\u00b1.014 .294\u00b1.010 .309\u00b1.011 .304\u00b1.003 .323\u00b1.011 .351\u00b1.006\n30% .341\u00b1.006 .266\u00b1.095 .337\u00b1.004 .355\u00b1.011 .339\u00b1.010 .354\u00b1.011 .361\u00b1.004 .335\u00b1.008 .346\u00b1.007 .329\u00b1.005 .351\u00b1.004 .395\u00b1.004\n50% .334\u00b1.005 .292\u00b1.102 .351\u00b1.009 .386\u00b1.007 .375\u00b1.003 .386\u00b1.007 .386\u00b1.013 .362\u00b1.005 .386\u00b1.014 .344\u00b1.003 .377\u00b1.007 .405\u00b1.012\n70% .358\u00b1.004 .384\u00b1.021 .355\u00b1.013 .385\u00b1.012 .404\u00b1.009 .408\u00b1.010 .400\u00b1.002 .385\u00b1.016 .398\u00b1.013 .354\u00b1.006 .383\u00b1.011 .428\u00b1.004\nRecall10% .273\u00b1.007 .247\u00b1.014 .326\u00b1.012 .307\u00b1.004 .313\u00b1.011 .304\u00b1.015 .289\u00b1.011 .299\u00b1.008 .310\u00b1.011 .304\u00b1.004 .322\u00b1.014 .346\u00b1.008\n30% .305\u00b1.004 .305\u00b1.047 .333\u00b1.005 .353\u00b1.009 .346\u00b1.010 .364\u00b1.011 .359\u00b1.002 .338\u00b1.010 .347\u00b1.011 .330\u00b1.004 .349\u00b1.002 .388\u00b1.004\n50% .336\u00b1.006 .340\u00b1.046 .346\u00b1.009 .377\u00b1.005 .378\u00b1.003 .395\u00b1.007 .384\u00b1.007 .358\u00b1.002 .393\u00b1.009 .346\u00b1.002 .380\u00b1.005 .410\u00b1.012\n70% .367\u00b1.005 .374\u00b1.029 .", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "7a3a29a2-2efe-42b9-83ef-68d88625774d": {"__data__": {"id_": "7a3a29a2-2efe-42b9-83ef-68d88625774d", "embedding": null, "metadata": {"page_label": "8", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "e0d7717d-cfe0-4212-9290-dfa3afea7aae", "node_type": "4", "metadata": {"page_label": "8", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "9a57af184d633ae88d28a9adf02309b9bec7408e52023bc5e2801fb60d74ef6e"}, "2": {"node_id": "bad287bc-422f-4d8c-93b2-9c2d64e8e462", "node_type": "1", "metadata": {"page_label": "8", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "27a88a0c05fc44a9f06f7e4f9fee823623b521c17a026caba9c47e72e4739f06"}, "3": {"node_id": "8b5e7d90-37d6-401c-aafe-ec7c32c0becc", "node_type": "1", "metadata": {"page_label": "8", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "759cd7e7054a33ecfc4d858a46df70fb86240b846dc44cecb4b79e5a8c9c881f"}}, "hash": "1042c875d48c52781ee5e9756be8c22d2b4a77c189140b5f5c3d9324d14a3812", "text": "005 .410\u00b1.012\n70% .367\u00b1.005 .374\u00b1.029 .349\u00b1.010 .393\u00b1.008 .407\u00b1.009 .413\u00b1.010 .399\u00b1.003 .388\u00b1.016 .403\u00b1.009 .357\u00b1.007 .390\u00b1.010 .446\u00b1.005\nF1-measure10% .175\u00b1.003 .122\u00b1.038 .326\u00b1.011 .301\u00b1.010 .313\u00b1.011 .302\u00b1.012 .288\u00b1.013 .292\u00b1.016 .301\u00b1.010 .297\u00b1.009 .322\u00b1.013 .340\u00b1.009\n30% .269\u00b1.004 .220\u00b1.085 .334\u00b1.004 .345\u00b1.016 .339\u00b1.008 .355\u00b1.012 .359\u00b1.003 .335\u00b1.012 .334\u00b1.018 .328\u00b1.005 .349\u00b1.004 .376\u00b1.005\n50% .327\u00b1.003 .275\u00b1.085 .347\u00b1.009 .377\u00b1.006 .376\u00b1.003 .389\u00b1.007 .381\u00b1.009 .359\u00b1.004 .385\u00b1.010 .343\u00b1.003 .375\u00b1.009 .400\u00b1.008\n70% .351\u00b1.004 .333\u00b1.053 .350\u00b1.011 .381\u00b1.015 .405\u00b1.009 .410\u00b1.010 .398\u00b1.003 .385\u00b1.017 .397\u00b1.013 .351\u00b1.007 .384\u00b1.011 .424\u00b1.007\nTABLE 4\nCorporate relative valuation results ( \u02c6y), percentage denotes training data ratio. The best results are highlighted in bold.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "8b5e7d90-37d6-401c-aafe-ec7c32c0becc": {"__data__": {"id_": "8b5e7d90-37d6-401c-aafe-ec7c32c0becc", "embedding": null, "metadata": {"page_label": "8", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "e0d7717d-cfe0-4212-9290-dfa3afea7aae", "node_type": "4", "metadata": {"page_label": "8", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "9a57af184d633ae88d28a9adf02309b9bec7408e52023bc5e2801fb60d74ef6e"}, "2": {"node_id": "7a3a29a2-2efe-42b9-83ef-68d88625774d", "node_type": "1", "metadata": {"page_label": "8", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "1042c875d48c52781ee5e9756be8c22d2b4a77c189140b5f5c3d9324d14a3812"}, "3": {"node_id": "f2886304-2409-4824-bbd0-bd1311c0ddbd", "node_type": "1", "metadata": {"page_label": "8", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "6f4e234b60be15b535b902507d9f9da6cb1305000e641be010a7f16f0842ba17"}}, "hash": "759cd7e7054a33ecfc4d858a46df70fb86240b846dc44cecb4b79e5a8c9c881f", "text": "percentage denotes training data ratio. The best results are highlighted in bold.\nMetric SVM MLP KNN HetGNN GAT SAGE ASNE m2vec HAN GATNE FAME HM2\nMSE10% 4.277\u00b1.057 4.268\u00b1.212 4.188\u00b1.077 4.122\u00b1.033 5.247\u00b1.304 4.998\u00b1.176 3.822\u00b1.080 4.599\u00b1.081 4.496\u00b1.198 4.927\u00b1.060 4.796\u00b1.093 3.919\u00b1.077\n30% 4.029\u00b1.104 4.170\u00b1.250 4.165\u00b1.093 4.002\u00b1.046 4.225\u00b1.080 4.517\u00b1.096 3.823\u00b1.056 4.458\u00b1.024 4.207\u00b1.116 4.729\u00b1.029 4.607\u00b1.059 3.481\u00b1.051\n50% 3.818\u00b1.098 3.856\u00b1.114 4.143\u00b1.115 3.868\u00b1.072 3.872\u00b1.116 4.366\u00b1.207 3.638\u00b1.065 4.247\u00b1.051 3.970\u00b1.096 4.633\u00b1.022 4.561\u00b1.134 3.432\u00b1.071\n70% 3.523\u00b1.130 3.680\u00b1.219 4.127\u00b1.108 3.723\u00b1.039 3.453\u00b1.127 3.981\u00b1.043 3.487\u00b1.080 4.005\u00b1.045 3.858\u00b1.304 4.622\u00b1.030 4.475\u00b1.163 2.951\u00b1.084\nextends the skip-gram model to facilitate the modeling\nof connected nodes.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "f2886304-2409-4824-bbd0-bd1311c0ddbd": {"__data__": {"id_": "f2886304-2409-4824-bbd0-bd1311c0ddbd", "embedding": null, "metadata": {"page_label": "8", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "e0d7717d-cfe0-4212-9290-dfa3afea7aae", "node_type": "4", "metadata": {"page_label": "8", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "9a57af184d633ae88d28a9adf02309b9bec7408e52023bc5e2801fb60d74ef6e"}, "2": {"node_id": "8b5e7d90-37d6-401c-aafe-ec7c32c0becc", "node_type": "1", "metadata": {"page_label": "8", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "759cd7e7054a33ecfc4d858a46df70fb86240b846dc44cecb4b79e5a8c9c881f"}, "3": {"node_id": "d8ee0993-1f35-4e2a-b7df-df1d0fed634c", "node_type": "1", "metadata": {"page_label": "8", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "59e1017bb3617e266d50079eebd1c21854507ee0459ab134930036eeb6fa048d"}}, "hash": "6f4e234b60be15b535b902507d9f9da6cb1305000e641be010a7f16f0842ba17", "text": "The model can preserve both the\nstructures and semantics of the given heterogeneous\nnetwork by maximizing the likelihood [28];\n\u2022ASNE: An attributed graph embedding method, which\nlearns representations for nodes by preserving both\nthe structural proximity (capturing the global network\nstructure) and attribute proximity [29];\n\u2022GraphSAGE (SAGE for simplicity): An inductive\ngraph neural network model that leverages node fea-\nture to ef\ufb01ciently generate node embeddings for un-\nseen data. The model generates embeddings through\nsampling and aggregating features from a node\u2019s lo-\ncal neighborhood by different neural networks, i.e.,\nLSTM [18];\n\u2022GAT: A graph network model, which aggregates neigh-\nbors\u2019 information by masked self-attentions [25];\n\u2022HAN: A novel heterogeneous graph neural network\nbased on the hierarchical attention, including node-\nlevel and semantic-level attentions;\n\u2022GATNE: A heterogeneous network that splits the over-\nall node embedding into three parts: base, edge, and\nattribute embedding. The base embedding and attribute\nembedding are shared among edges of different types,\nwhile the edge embedding is computed by aggregation\nof neighborhood information with the self-attention\nmechanism;\n\u2022FAME: A heterogeneous network that maps the units\nfrom different modalities into the same latent space,\nwhich can preserve both attribute semantics and multi-\ntype relations in the learned embeddings.HetGNN, ASNE and m2vec are unsupervised graph node\nembedding learning methods. Thereby we use the source\ncode provided by the authors, and modify our dataset to\nconform to their input formats. For m2vec, we employ three\nmeta-paths, i.e., company-company, company-member-\ncompany, and company-member-member-company respec-\ntively. In addition, the walk length is set to 300.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "d8ee0993-1f35-4e2a-b7df-df1d0fed634c": {"__data__": {"id_": "d8ee0993-1f35-4e2a-b7df-df1d0fed634c", "embedding": null, "metadata": {"page_label": "8", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "e0d7717d-cfe0-4212-9290-dfa3afea7aae", "node_type": "4", "metadata": {"page_label": "8", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "9a57af184d633ae88d28a9adf02309b9bec7408e52023bc5e2801fb60d74ef6e"}, "2": {"node_id": "f2886304-2409-4824-bbd0-bd1311c0ddbd", "node_type": "1", "metadata": {"page_label": "8", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "6f4e234b60be15b535b902507d9f9da6cb1305000e641be010a7f16f0842ba17"}}, "hash": "59e1017bb3617e266d50079eebd1c21854507ee0459ab134930036eeb6fa048d", "text": "In addition, the walk length is set to 300. For ASNE,\nwe employ the same content features of different modalities\nas HM2and concatenate them as general features besides\nthe latent features. For GraphSAGE and GAT, we use the\nsame input features and sampled neighbors set for each\nnode as HM2. With the learned embeddings, we train an\nMLP model to obtain a classi\ufb01er or regressor as HM2. GAT\nand SAGE are two transductive graph learning methods, we\nuse a mask to indicate training and test nodes. For all the\ncompared methods, we use the same early stop criterion.\n4.1 Implementation\nThe number of embedding aggregation head H is 2, the\ndimension of edge embedding pis set as 32, and the hidden\nlayer of node embedding qis set as 128 dimensions. f1is\na two-layer modal attribute encoding module, and f2is\ntwo-layer fully connected network. The batch size is set as\n32, triplet loss margin is 1.0in experiment, random walk\nlength is set as 300, and the probability of returning to\nthe starting point is 0.05. The\u03bbin Equation 7 is tuned\nwith cross validation. The validation set is randomly se-\nlected from the training set by 10%. When the validation\nset loss does not decrease within 50epochs, the training\nwill be stopped. For comparing methods, we adjust the\nhyper-parameters according to the original paper to ac-\nAuthorized licensed use limited to: Univ of Calif Santa Barbara. Downloaded on June 20,2021 at 08:36:01 UTC from IEEE Xplore. Restrictions apply.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "5c054979-9ff0-4480-abf0-d3cddbaf962a": {"__data__": {"id_": "5c054979-9ff0-4480-abf0-d3cddbaf962a", "embedding": null, "metadata": {"page_label": "9", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "4e3755a0-2c19-42b5-b693-15c44d7d49a9", "node_type": "4", "metadata": {"page_label": "9", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "7974425cbb776830c95e0300eafc863c309fde76b1a9e1f9dd231e7006b276f9"}, "3": {"node_id": "ed7f6e5e-afe7-4f68-b672-3a911d428a09", "node_type": "1", "metadata": {"page_label": "9", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "5c6cb7104a013fe28bc64f65e2aee96aad177f3d52210fdaa4696c6529a68f21"}}, "hash": "0332040847d898fd6390d58428245bec2784e27cef5bfa25b52eb8b0f11adc12", "text": "1041-4347 (c) 2021 IEEE. Personal use is permitted, but republication/redistribution requires IEEE permission. See http://www.ieee.org/publications_standards/publications/rights/index.html for more information.This article has been accepted for publication in a future issue of this journal, but has not been fully edited. Content may change prior to final publication. Citation information: DOI 10.1109/TKDE.2021.3080293, IEEE\nTransactions on Knowledge and Data Engineering\n9\nquire their optimal results. We implement HM2on a server\nwith GPU machines (Nvidia 2080ti). Code is available at:\nhttps://github.com/data-ming-and-application/HMM.\n4.2 Corporate Relative Valuation\nTo answer Q1, we design experiments to evaluate HM2on\ncorporate relative valuation task. We give both approximate\n(coarse-grained range y) and accurate (\ufb01ne-grained \u02c6y) valu-\nations, in which \u02c6ypdenotes the real value with logoperator\nof corporate valuation.\nSimilar to traditional node classi\ufb01cation task, we \ufb01rst use\ntraining data to build the model, then employ the learned\nmodel to predict nodes in test data. The ratio of training data\nis set to 10%, 30%, 50% and70%, and the remaining nodes\nare used for test. As a multi-class classi\ufb01cation problem,\nwe use Accuracy (Acc), Recall (Rec), Precision (Pre), and\nF1-measure as the evaluation metric. Note that we use\nweighted average measures of Rec, Pre and F1 considering\ndata imbalance problem. In addition, duplicated companies\nare removed from the experiments. Table 3 reports results\nof HM2and comparison methods.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "ed7f6e5e-afe7-4f68-b672-3a911d428a09": {"__data__": {"id_": "ed7f6e5e-afe7-4f68-b672-3a911d428a09", "embedding": null, "metadata": {"page_label": "9", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "4e3755a0-2c19-42b5-b693-15c44d7d49a9", "node_type": "4", "metadata": {"page_label": "9", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "7974425cbb776830c95e0300eafc863c309fde76b1a9e1f9dd231e7006b276f9"}, "2": {"node_id": "5c054979-9ff0-4480-abf0-d3cddbaf962a", "node_type": "1", "metadata": {"page_label": "9", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "0332040847d898fd6390d58428245bec2784e27cef5bfa25b52eb8b0f11adc12"}, "3": {"node_id": "6077f29c-2e41-40d6-b4eb-4f680ba8fedf", "node_type": "1", "metadata": {"page_label": "9", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "2436131b6e16c580a4ce434fe4be3fdb01fa8f14b4ed5462e2edcdc38ffde9af"}}, "hash": "5c6cb7104a013fe28bc64f65e2aee96aad177f3d52210fdaa4696c6529a68f21", "text": "Table 3 reports results\nof HM2and comparison methods. The results reveal that:\n1) Graph embedding methods are superior than traditional\nlinear method, which considers only the information of\ninput node itself; 2) Most methods achieve good perfor-\nmance in the corporate relative valuation, which re\ufb02ects\nthe effectiveness of machine learning models on simulating\nthe judgment of domain experts; 3) HM2achieves the best\nor comparable performance to comparison methods, which\nshows that HM2can encode effective node embedding\nfor valuation task by considering the heterogeneous node\nattributes and linkages comprehensively; 4) HM2performs\nbetter than another attention based graph model GAT, be-\ncause HM2considers heterogeneous neighbors and link-\nages comprehensively, and uses a more effective multi-\nhead attention mechanism that validates the effectiveness\nof heterogeneous neighbor construction and fusion; 5) HM2\nperforms better than HetGNN, which adopts LSTM to ag-\ngregate heterogeneous neighbors, and this indicates that\nattention mechanism can better employ the linkages; 6)\nHM2performs better than FAME, the reason is that HM2\ntakes the relations as feature vectors, and utilizes an extra\nmapping function \u03a8(\u00b7) for better learning the similarity\nbetween two nodes; and 7) With the increase of training\ndata, the performance of HM2improves faster than other\nmethods, for the reason that HM2employs the triplet loss by\nconsidering the embedding structure, and can better re\ufb02ect\nthe global structure of graphs with the increase of training\ndata. Moreover, we regard the corporate relative valuation\nas a regression problem, i.e., \u2113m=\u2225\u02c6yp\ni\u2212\u03b8\u22a4f2(v)\u22252. Table 4\nreports the MSE results of HM2and comparison methods.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "6077f29c-2e41-40d6-b4eb-4f680ba8fedf": {"__data__": {"id_": "6077f29c-2e41-40d6-b4eb-4f680ba8fedf", "embedding": null, "metadata": {"page_label": "9", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "4e3755a0-2c19-42b5-b693-15c44d7d49a9", "node_type": "4", "metadata": {"page_label": "9", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "7974425cbb776830c95e0300eafc863c309fde76b1a9e1f9dd231e7006b276f9"}, "2": {"node_id": "ed7f6e5e-afe7-4f68-b672-3a911d428a09", "node_type": "1", "metadata": {"page_label": "9", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "5c6cb7104a013fe28bc64f65e2aee96aad177f3d52210fdaa4696c6529a68f21"}, "3": {"node_id": "76a94fb2-faef-4a6c-be1c-be4983ab2606", "node_type": "1", "metadata": {"page_label": "9", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "51ecf674ada100977e93bda6442bca59aedeb795f2766d5877b57f235b07da4b"}}, "hash": "2436131b6e16c580a4ce434fe4be3fdb01fa8f14b4ed5462e2edcdc38ffde9af", "text": "Table 4\nreports the MSE results of HM2and comparison methods.\nThe results reveal that: 1) The machine learning methods\nalso have considerable performance on accurate relative\nvaluation prediction; and 2) HM2also achieves the best\nor comparable performance, which is much better than\ncomparison methods even under low training data ratio,\ni.e., HM2performs better with only 10% training data.\nA notable phenomenon is that various methods do not\nhave signi\ufb01cant performance: 1) Even the performance of\nthe best method are not signi\ufb01cant; and 2) The promotions\nbetween deep methods and linear method, and the promo-\ntions between HM2and other deep graph models are notsigni\ufb01cant. This is because: 1) Considering the data privacy\nand \ufb01eld limitations, the amount of data is relatively small,\nwhich affects the training of deep models; and 2) Consid-\nering the feature missing and information insuf\ufb01ciency, the\ninformation contained in raw multi-modal data is limited.\n5 10 15 20 25 30 35 400.4100.4150.4200.4250.4300.4350.4400.4450.450\nAccuracy\nF1\n(a) Accuracy &F1\n5 10 15 20 25 30 352.9252.9502.9753.0003.0253.0503.0753.100\nMSE\n(b) MSE\nFig. 5. In\ufb02uence of sampled neighbor size, x-axis denotes the neighbor\nsize and y-axis represents performance measure.\n4.3 Analysis of HM2\nTo answer Q2, we design ablation studies for evaluation.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "76a94fb2-faef-4a6c-be1c-be4983ab2606": {"__data__": {"id_": "76a94fb2-faef-4a6c-be1c-be4983ab2606", "embedding": null, "metadata": {"page_label": "9", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "4e3755a0-2c19-42b5-b693-15c44d7d49a9", "node_type": "4", "metadata": {"page_label": "9", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "7974425cbb776830c95e0300eafc863c309fde76b1a9e1f9dd231e7006b276f9"}, "2": {"node_id": "6077f29c-2e41-40d6-b4eb-4f680ba8fedf", "node_type": "1", "metadata": {"page_label": "9", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "2436131b6e16c580a4ce434fe4be3fdb01fa8f14b4ed5462e2edcdc38ffde9af"}}, "hash": "51ecf674ada100977e93bda6442bca59aedeb795f2766d5877b57f235b07da4b", "text": "Ablation Study\nTo explore the role of each module in HM2, we conduct\nextra ablation studies to evaluate performances of several\nvariants, including:\n\u2022HM2-N: The variant of HM2that only adopts the\ndirect neighbors, without considering the higher-order\nneighbors;\n\u2022HM2-L: The variant of HM2that replaces the trans-\nformer based attribute encoding module with tradi-\ntional Bi-LSTM to encode heterogeneous node;\n\u2022HM2-FC: The variant of HM2that replaces the trans-\nformer based attribute encoding module with fully\nconnected network to encode heterogeneous node;\n\u2022HM2-R: The variant of HM2that doesn\u2019t consider the\nrelation embedding in Eq. 3;\n\u2022HM2-A: The variant of HM2that replaces attention\nbased multi-modal aggregation module with directly\nconcatenating multi-modal embeddings;\n\u2022HM2-B: The variant of HM2that removes the triplet\nloss in Eq. 7.\nThe results of prediction are reported in Table 5 and\nTable 6. They reveal that: 1) HM2behaves better than HM2-\nN, which demonstrates that neighbor sampling is effective\nfor subsequent operation and embedding generalization;\n2) HM2behaves better than HM2-L and HM2-FC, which\nshows that linkage-aware multi-head attention based en-\ncoding outperforms other methods without considering the\nAuthorized licensed use limited to: Univ of Calif Santa Barbara. Downloaded on June 20,2021 at 08:36:01 UTC from IEEE Xplore. Restrictions apply.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "4dda1b89-e6af-4070-ae1b-7c3baaeb19dd": {"__data__": {"id_": "4dda1b89-e6af-4070-ae1b-7c3baaeb19dd", "embedding": null, "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "29fad43a-5695-4286-bc25-eff91020241f", "node_type": "4", "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "f48079b147206e24f1a603002ca88b8dd9ee1cedd13676fe17fa8d88d312fecf"}, "3": {"node_id": "e6091fcf-6fb9-4cc7-8d05-c11cda8d7d00", "node_type": "1", "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "fd7af15ee733d5361a0448cb63a0b7f72b5e1531d6d1ac6f53ecf937af0e6390"}}, "hash": "69377b5cac3704d12eb0430d50f1cf09a4accd5538bd282ff5bf1d2f7769c53b", "text": "1041-4347 (c) 2021 IEEE. Personal use is permitted, but republication/redistribution requires IEEE permission. See http://www.ieee.org/publications_standards/publications/rights/index.html for more information.This article has been accepted for publication in a future issue of this journal, but has not been fully edited. Content may change prior to final publication. Citation information: DOI 10.1109/TKDE.2021.3080293, IEEE\nTransactions on Knowledge and Data Engineering\n10\nTABLE 5\nAblation Study (y ), percentage denotes training data ratio. The best results are highlighted in bold.\nMetric HM2-N HM2-L HM2-FC HM2-A HM2-B HM2-R HM2\nAccuracy10% 0.335\u00b10.003 0.329\u00b10.006 0.334\u00b10.009 0.341\u00b10.003 0.326\u00b10.006 0.334\u00b10.005 0.346\u00b10.008\n30% 0.366\u00b10.003 0.380\u00b10.006 0.347\u00b10.011 0.388\u00b10.002 0.380\u00b10.004 0.385\u00b10.005 0.388\u00b10.004\n50% 0.396\u00b10.004 0.389\u00b10.014 0.381\u00b10.016 0.381\u00b10.006 0.377\u00b10.008 0.396\u00b10.008 0.410\u00b10.008\n70% 0.427\u00b10.009 0.434\u00b10.026 0.420\u00b10.017 0.430\u00b10.011 0.424\u00b10.008 0.433\u00b10.006 0.446\u00b10.007\nPrecision10% 0.333\u00b10.003 0.329\u00b10.031 0.337\u00b10.008 0.340\u00b10.002 0.337\u00b10.007 0.334\u00b10.005 0.351\u00b10.006\n30% 0.379\u00b10.005 0.375\u00b10.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "e6091fcf-6fb9-4cc7-8d05-c11cda8d7d00": {"__data__": {"id_": "e6091fcf-6fb9-4cc7-8d05-c11cda8d7d00", "embedding": null, "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "29fad43a-5695-4286-bc25-eff91020241f", "node_type": "4", "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "f48079b147206e24f1a603002ca88b8dd9ee1cedd13676fe17fa8d88d312fecf"}, "2": {"node_id": "4dda1b89-e6af-4070-ae1b-7c3baaeb19dd", "node_type": "1", "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "69377b5cac3704d12eb0430d50f1cf09a4accd5538bd282ff5bf1d2f7769c53b"}, "3": {"node_id": "7e36a599-5ad0-4db6-8f42-f1b923437e40", "node_type": "1", "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "c32eb78c5e862eb8ae35669f039abd348dcba4b62b586b00705a1340212bf8b3"}}, "hash": "fd7af15ee733d5361a0448cb63a0b7f72b5e1531d6d1ac6f53ecf937af0e6390", "text": "006\n30% 0.379\u00b10.005 0.375\u00b10.003 0.372\u00b10.012 0.393\u00b10.004 0.379\u00b10.004 0.388\u00b10.005 0.395\u00b10.004\n50% 0.385\u00b10.006 0.377\u00b10.016 0.382\u00b10.026 0.374\u00b10.005 0.377\u00b10.008 0.390\u00b10.016 0.405\u00b10.012\n70% 0.423\u00b10.008 0.421\u00b10.034 0.418\u00b10.023 0.432\u00b10.010 0.428\u00b10.009 0.425\u00b10.007 0.428\u00b10.004\nRecall10% 0.335\u00b10.003 0.329\u00b10.006 0.334\u00b10.009 0.341\u00b10.002 0.326\u00b10.010 0.334\u00b10.005 0.346\u00b10.008\n30% 0.366\u00b10.003 0.380\u00b10.006 0.347\u00b10.011 0.388\u00b10.004 0.380\u00b10.004 0.385\u00b10.005 0.388\u00b10.004\n50% 0.396\u00b10.004 0.389\u00b10.014 0.381\u00b10.016 0.381\u00b10.009 0.377\u00b10.008 0.396\u00b10.008 0.410\u00b10.012\n70% 0.427\u00b10.009 0.434\u00b10.025 0.420\u00b10.017 0.430\u00b10.013 0.424\u00b10.008 0.433\u00b10.006 0.446\u00b10.005\nF1-measure10% 0.331\u00b10.003 0.327\u00b10.022 0.334\u00b10.010 0.340\u00b10.003 0.329\u00b10.006 0.333\u00b10.004 0.340\u00b10.009\n30% 0.356\u00b10.004 0.371\u00b10.006 0.341\u00b10.013 0.387\u00b10.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "7e36a599-5ad0-4db6-8f42-f1b923437e40": {"__data__": {"id_": "7e36a599-5ad0-4db6-8f42-f1b923437e40", "embedding": null, "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "29fad43a-5695-4286-bc25-eff91020241f", "node_type": "4", "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "f48079b147206e24f1a603002ca88b8dd9ee1cedd13676fe17fa8d88d312fecf"}, "2": {"node_id": "e6091fcf-6fb9-4cc7-8d05-c11cda8d7d00", "node_type": "1", "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "fd7af15ee733d5361a0448cb63a0b7f72b5e1531d6d1ac6f53ecf937af0e6390"}, "3": {"node_id": "72cf3755-afc3-4628-a3f7-a9c05803d70f", "node_type": "1", "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "0a2342406048104f549d575f08dd22e211ba93b5bbd0d707320c456228a083b1"}}, "hash": "c32eb78c5e862eb8ae35669f039abd348dcba4b62b586b00705a1340212bf8b3", "text": "371\u00b10.006 0.341\u00b10.013 0.387\u00b10.002 0.376\u00b10.004 0.382\u00b10.006 0.376\u00b10.005\n50% 0.388\u00b10.004 0.368\u00b10.028 0.347\u00b10.024 0.376\u00b10.006 0.357\u00b10.009 0.387\u00b10.013 0.400\u00b10.008\n70% 0.418\u00b10.010 0.423\u00b10.026 0.418\u00b10.023 0.425\u00b10.011 0.423\u00b10.007 0.423\u00b10.006 0.424\u00b10.007\nTABLE 6\nAblation Study ( \u02c6y), percentage denotes training data ratio. The best results are highlighted in bold.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "72cf3755-afc3-4628-a3f7-a9c05803d70f": {"__data__": {"id_": "72cf3755-afc3-4628-a3f7-a9c05803d70f", "embedding": null, "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "29fad43a-5695-4286-bc25-eff91020241f", "node_type": "4", "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "f48079b147206e24f1a603002ca88b8dd9ee1cedd13676fe17fa8d88d312fecf"}, "2": {"node_id": "7e36a599-5ad0-4db6-8f42-f1b923437e40", "node_type": "1", "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "c32eb78c5e862eb8ae35669f039abd348dcba4b62b586b00705a1340212bf8b3"}, "3": {"node_id": "6cfd78f7-094e-4dab-ab20-db6d9ae7245e", "node_type": "1", "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "72438db53e82583b0ceb63476c33c0bbf3b190bf4a9c91b28ddb59cb79017986"}}, "hash": "0a2342406048104f549d575f08dd22e211ba93b5bbd0d707320c456228a083b1", "text": "percentage denotes training data ratio. The best results are highlighted in bold.\nMetric HM2-N HM2-L HM2-FC HM2-A HM2-B HM2-R HM2\nMSE10% 5.223\u00b10.231 3.921\u00b10.559 5.304\u00b10.487 4.091\u00b10.658 5.808\u00b10.269 4.013\u00b1 0.216 3.919\u00b10.077\n30% 3.807\u00b10.104 4.027\u00b10.905 4.022\u00b10.151 3.801\u00b10.068 3.766\u00b10.086 3.605\u00b1 0.120 3.481\u00b10.051\n50% 3.731\u00b10.066 3.708\u00b10.261 3.800\u00b10.138 3.505\u00b10.034 3.895\u00b10.053 3.477\u00b1 0.076 3.432\u00b10.071\n70% 3.267\u00b10.065 3.167\u00b10.352 3.034\u00b10.162 3.114\u00b10.031 3.251\u00b10.068 3.014\u00b10.058 2.951\u00b10.084\nlinkages, and is bene\ufb01cial for learning attribute interactions;\n3) HM2behaves competitive to HM2-A, which reveals that\nthe self-attention mechanism has a slight advantage, and\nboth kinds of neighbors have relative contributes for predic-\ntion; 4) HM2performances better than HM2-R, which indi-\ncates the effectiveness of linkage representation in learning\nnode embedding; and 5) HM2performs better than HM2-B,\nwhich shows that triplet loss can take graph structure into\nfull account to learn more discriminative embedding.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "6cfd78f7-094e-4dab-ab20-db6d9ae7245e": {"__data__": {"id_": "6cfd78f7-094e-4dab-ab20-db6d9ae7245e", "embedding": null, "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "29fad43a-5695-4286-bc25-eff91020241f", "node_type": "4", "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "f48079b147206e24f1a603002ca88b8dd9ee1cedd13676fe17fa8d88d312fecf"}, "2": {"node_id": "72cf3755-afc3-4628-a3f7-a9c05803d70f", "node_type": "1", "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "0a2342406048104f549d575f08dd22e211ba93b5bbd0d707320c456228a083b1"}, "3": {"node_id": "d45d3921-d8ca-4ad1-a11e-35fd0ce71045", "node_type": "1", "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "989aeea73e68efaf5b02b7804a58dce1eabda84a58706d7b1bf4193ba667a3f3"}}, "hash": "72438db53e82583b0ceb63476c33c0bbf3b190bf4a9c91b28ddb59cb79017986", "text": "4.4 In\ufb02uence of Linkages\nIn detail, the linkage type set of the CRV graph consists:\n1)company-company linkages, 2) company-member linkages,\nand 3) member-member linkages. The main statistics are in\nTable 1 of the main body. Although HM2only focused on\nthe embedding learning of company nodes in this paper,\nwhen collecting company nodes\u2019 neighbors, HM2utilizes\nthe random walk sampling for neighbor construction of\neach node following [14]. Therefore, the company-member\nand member-member linkages can help collect high-order\nmember neighbors for each node.\nTo explore the in\ufb02uence of linkages on embedding\nlearning, we have conducted ablation studies to evaluate\nthe mentioned relationships: 1) w/o m-m: HM2without\nmember-member linkages, i.e., HM2that only samples the\ncompany\u2019s direct member neighbors; 2) w/o c-m: HM2\nwithout company-member linkages, i.e., HM2that does not\nsample company\u2019s member neighbors; 3) w/o e: HM2that\ndoes not distinguish edge types, i.e., the linkage is 1if two\nnodes connected, otherwise is 0.\nTable 7 records the results, and they reveal that: 1) the\nperformance degradation of w/o m-m is not obvious, itTABLE 7\nCorporate relative valuation prediction results with ablation studies\nconsidering different linkage settings, percentage denotes training data\nratio. The best results are highlighted in bold.\nMetric w/o m-m w/o c-m w/o e HM2\nAccuracy10% .341\u00b1.006 .337\u00b1.005 .334\u00b1.005 .346\u00b1.008\n30% .383\u00b1.005 .385\u00b1.007 .385\u00b1.005 .388\u00b1.004\n50% .402\u00b1.010 .394\u00b1.006 .396\u00b1.008 .410\u00b1.008\n70% .434\u00b1.005 .", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "d45d3921-d8ca-4ad1-a11e-35fd0ce71045": {"__data__": {"id_": "d45d3921-d8ca-4ad1-a11e-35fd0ce71045", "embedding": null, "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "29fad43a-5695-4286-bc25-eff91020241f", "node_type": "4", "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "f48079b147206e24f1a603002ca88b8dd9ee1cedd13676fe17fa8d88d312fecf"}, "2": {"node_id": "6cfd78f7-094e-4dab-ab20-db6d9ae7245e", "node_type": "1", "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "72438db53e82583b0ceb63476c33c0bbf3b190bf4a9c91b28ddb59cb79017986"}, "3": {"node_id": "d683c0d4-eff0-49a9-855b-8be0efaac829", "node_type": "1", "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "c85556f147da60fb7396b65fb3356c2963e1e03a914486c13f9f9bf89c0d6d73"}}, "hash": "989aeea73e68efaf5b02b7804a58dce1eabda84a58706d7b1bf4193ba667a3f3", "text": "006 .396\u00b1.008 .410\u00b1.008\n70% .434\u00b1.005 .432\u00b1.005 .433\u00b1.006 .446\u00b1.007\nPrecision10% .340\u00b1.006 .339\u00b1.003 .334\u00b1.005 .351\u00b1.006\n30% .385\u00b1.008 .388\u00b1.008 .388\u00b1.005 .395\u00b1.004\n50% .399\u00b1.010 .395\u00b1.009 .390\u00b1.016 .405\u00b1.012\n70% .426\u00b1.008 .428\u00b1.006 .425\u00b1.007 .428\u00b1.004\nRecall10% .341\u00b1.006 .337\u00b1.005 .334\u00b1.005 .346\u00b1.008\n30% .383\u00b1.005 .385\u00b1.007 .385\u00b1.005 .388\u00b1.004\n50% .402\u00b1.010 .394\u00b1.006 .396\u00b1.008 .410\u00b1.012\n70% .432\u00b1.005 .432\u00b1.005 .433\u00b1.006 .446\u00b1.005\nF1-measure10% .338\u00b1.005 .337\u00b1.004 .333\u00b1.004 .340\u00b1.009\n30% .372\u00b1.007 .382\u00b1.007 .382\u00b1.006 .376\u00b1.005\n50% .395\u00b1.009 .390\u00b1.007 .387\u00b1.013 .400\u00b1.008\n70% .423\u00b1.008 .426\u00b1.006 .423\u00b1.006 .424\u00b1.007\nMSE10% 3.909\u00b10.068 3.877\u00b10.039 4.013\u00b10.216 3.919\u00b10.077\n30% 3.541\u00b10.090 3.553\u00b10.095 3.605\u00b10.120 3.481\u00b10.051\n50% 3.451\u00b10.062 3.440\u00b10.027 3.477\u00b10.076 3.432\u00b10.071\n70% 3.047\u00b10.060 3.013\u00b10.018 3.014\u00b10.058 2.951\u00b10.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "d683c0d4-eff0-49a9-855b-8be0efaac829": {"__data__": {"id_": "d683c0d4-eff0-49a9-855b-8be0efaac829", "embedding": null, "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "29fad43a-5695-4286-bc25-eff91020241f", "node_type": "4", "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "f48079b147206e24f1a603002ca88b8dd9ee1cedd13676fe17fa8d88d312fecf"}, "2": {"node_id": "d45d3921-d8ca-4ad1-a11e-35fd0ce71045", "node_type": "1", "metadata": {"page_label": "10", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "989aeea73e68efaf5b02b7804a58dce1eabda84a58706d7b1bf4193ba667a3f3"}}, "hash": "c85556f147da60fb7396b65fb3356c2963e1e03a914486c13f9f9bf89c0d6d73", "text": "013\u00b10.018 3.014\u00b10.058 2.951\u00b10.084\nindicates that the effect of member-member linkage is weak,\nfor the reason that only the core members (i.e. the direct\nmembers) are necessary for company valuation, whereas the\nmember-member linkage has less contribution to company\nnode embedding learning; 2) the performance degradation\nof w/o c-m is more signi\ufb01cant, because member neighbors\nAuthorized licensed use limited to: Univ of Calif Santa Barbara. Downloaded on June 20,2021 at 08:36:01 UTC from IEEE Xplore. Restrictions apply.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "f2d8b0fe-11fa-4721-b296-d7fce33485d8": {"__data__": {"id_": "f2d8b0fe-11fa-4721-b296-d7fce33485d8", "embedding": null, "metadata": {"page_label": "11", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "35ff0f02-f8a3-4f4b-9b80-c52d809439ef", "node_type": "4", "metadata": {"page_label": "11", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "02914b5015ef0db3eb6a652f9ff2edd193274fb65eea63ef0bf624d76079328e"}, "3": {"node_id": "eed605dc-21d5-45a3-ba2a-e00cedc08614", "node_type": "1", "metadata": {"page_label": "11", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "f57e4f350e6c7a8f1841459ed62f452c6396dcf59ef5e4a2d6dd51180641f3d4"}}, "hash": "722092a96de0ea9ec6a7dd859cf0608cfb61bd54e1753553b4547930678fc11a", "text": "1041-4347 (c) 2021 IEEE. Personal use is permitted, but republication/redistribution requires IEEE permission. See http://www.ieee.org/publications_standards/publications/rights/index.html for more information.This article has been accepted for publication in a future issue of this journal, but has not been fully edited. Content may change prior to final publication. Citation information: DOI 10.1109/TKDE.2021.3080293, IEEE\nTransactions on Knowledge and Data Engineering\n11\nare useful for learning more discriminative embedding of\ncompany node; 3) HM2is superior to w/o e, which indicates\nthat it is more meaningful to consider the type of linkages\nwhen learning node embedding.\n4.5 Hyper-parameters Study\nTo answer Q3, we also develop hyper-parameter experi-\nments to analyze the impacts of key parameters, i.e., the size\nof sampled neighbors set. We \ufb01x the ratio of training data\nto70%, with all valuation level labels. The performances\nof HM2are shown in Figure 5. Figures 5 (a) and (b) de-\nclare that, with the increase of neighbor size, all evaluation\nmetrics \ufb01rstly become better, i.e., accuracy and F1 increase\nand MSE decreases, and later turn worse after exceeding a\ncertain size, i.e., around 13, which may be caused by the\nnoise and weakly related neighbors. As is demonstrated in\nFigure 5, the best neighbor size is 10\u221215.\nFurthermore, Figure 6 shows the valuation performances\nof HM2embeddings with various dimensions, Figure 6 (a)\nre\ufb02ects the classi\ufb01cation task and Figure 6 (b) re\ufb02ects the\nregression task.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "eed605dc-21d5-45a3-ba2a-e00cedc08614": {"__data__": {"id_": "eed605dc-21d5-45a3-ba2a-e00cedc08614", "embedding": null, "metadata": {"page_label": "11", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "35ff0f02-f8a3-4f4b-9b80-c52d809439ef", "node_type": "4", "metadata": {"page_label": "11", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "02914b5015ef0db3eb6a652f9ff2edd193274fb65eea63ef0bf624d76079328e"}, "2": {"node_id": "f2d8b0fe-11fa-4721-b296-d7fce33485d8", "node_type": "1", "metadata": {"page_label": "11", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "722092a96de0ea9ec6a7dd859cf0608cfb61bd54e1753553b4547930678fc11a"}, "3": {"node_id": "ea96a8a2-4442-4ff9-b5f0-e0b8bc61cce5", "node_type": "1", "metadata": {"page_label": "11", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "31bff821f0364b1cabbd34e8e1862c0cfa90b3c8f81eba8fa04d66c8eddb932c"}}, "hash": "f57e4f350e6c7a8f1841459ed62f452c6396dcf59ef5e4a2d6dd51180641f3d4", "text": "The dimension dvaries from 32 to 256, the\n\ufb01gures reveal that all evaluation criteria improve \ufb01rst, i.e.,\naccuracy and F1 increase and MSE decreases, since better\nembeddings can be learned. However, the performance de-\nteriorate when dfurther increases, i.e., after 128 dimension,\nthis may be because of over-\ufb01tting.\n0 100 200 300 400 5000.4100.4150.4200.4250.4300.4350.4400.4450.450\nAccuracy\nF1\n(a) Accuracy &F1\n0 100 200 300 400 5002.9002.9252.9502.9753.0003.0253.0503.075\nMSE\n(b) MSE\nFig. 6. In\ufb02uence of embedding dimension, x-axis denotes the embed-\nding dimension and y-axis represents performance measure\n4.6 Case Study\nMoreover, in order to analyze the interpretability of HM2,\nwe also give the attention visualization results of two un-\nlisted company (MiaoQu and MayiJuniu software compa-\nnies) by using HM2. The visualization results are shown\nin Figure 7, and it is notable that we only exhibit the\nrelationships between input node and the neighbors, not thetopology structure. The \ufb01rst row displays MiaoQu company,\nand the second row illustrates MayiJuniu company.\nIn the \ufb01rst row, Figure 7 (a) indicates that the relative\nvaluation is strongly related to the company neighbors, i.e.,\n\u03b1= 0.75, which is reasonable, because unlisted company\u2019s\nvalue is generally strongly related to the company\u2019s in-\ndustry information.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "ea96a8a2-4442-4ff9-b5f0-e0b8bc61cce5": {"__data__": {"id_": "ea96a8a2-4442-4ff9-b5f0-e0b8bc61cce5", "embedding": null, "metadata": {"page_label": "11", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "35ff0f02-f8a3-4f4b-9b80-c52d809439ef", "node_type": "4", "metadata": {"page_label": "11", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "02914b5015ef0db3eb6a652f9ff2edd193274fb65eea63ef0bf624d76079328e"}, "2": {"node_id": "eed605dc-21d5-45a3-ba2a-e00cedc08614", "node_type": "1", "metadata": {"page_label": "11", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "f57e4f350e6c7a8f1841459ed62f452c6396dcf59ef5e4a2d6dd51180641f3d4"}, "3": {"node_id": "50701628-4ae8-4c0f-b246-07c40bcacef0", "node_type": "1", "metadata": {"page_label": "11", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "03821445ed54a2b2d7362731c15bf062b0c381c60c9140376d6a1a3b62749788"}}, "hash": "31bff821f0364b1cabbd34e8e1862c0cfa90b3c8f81eba8fa04d66c8eddb932c", "text": "Meanwhile, Figure 7 (b) and (c) reveal\nthat the companies \u201cXiaoChu\u201d, \u201cJingyue\u201d, \u201cMeishan\u201d (the\nweights are 0.18, 0.15, 0.13) and member \u201cJianGen Cao\u201d\n(the weight is 0.74) have great impacts on the company, as\nthese subsidiaries produced MiaoQu\u2019s main products, and\nJianGen Cao is the CEO of the company.\nIn the second row, Figure 7 (a) indicates that the rela-\ntive valuation has relatively balanced correlations with the\ncompany and member neighbors, i.e., company attention\nweight\u03b1= 0.51 and member attention weight \u03b1= 0.49,\nwhich is reasonable, since the company has several in-\n\ufb02uential members, \u201cXiaoming Hu\u201d not only owns several\nin\ufb02uential companies, but also has connections with many\ncore members of other listed company. Meanwhile, Figure 7\n(b) and (c) reveal that the companies \u201cZhejiangMayi, LLC\u201d,\n\u201cZhejiangMayi\u201d (the weights are 0.28, 0.27) and member\n\u201cXiaoming Hu\u201d (the weight is 0.75) have great impacts\non the \u201cMayiJuniu\u201d, as these companies are respectively\ninvestors and clients of the \u201cMayiJuniu\u201d, and \u201cXiaoming\nHu\u201d is the CEO of the company.\n5 R ELATED WORK\nThe related works include: 1) corporate valuation; 2) multi-\nmodal aggregation; and 3) heterogeneous graph mining.\nCorporate Valuation Corporate valuation methods can\nbe divided into two categories, i.e., relative valuation [32,\n33, 34] and absolute valuation [4, 5]. Relative valuation\nalways conducts a comparison with comparable companies\n(Trading Comps) or precedent transactions (Deal Comps).", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "50701628-4ae8-4c0f-b246-07c40bcacef0": {"__data__": {"id_": "50701628-4ae8-4c0f-b246-07c40bcacef0", "embedding": null, "metadata": {"page_label": "11", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "35ff0f02-f8a3-4f4b-9b80-c52d809439ef", "node_type": "4", "metadata": {"page_label": "11", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "02914b5015ef0db3eb6a652f9ff2edd193274fb65eea63ef0bf624d76079328e"}, "2": {"node_id": "ea96a8a2-4442-4ff9-b5f0-e0b8bc61cce5", "node_type": "1", "metadata": {"page_label": "11", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "31bff821f0364b1cabbd34e8e1862c0cfa90b3c8f81eba8fa04d66c8eddb932c"}}, "hash": "03821445ed54a2b2d7362731c15bf062b0c381c60c9140376d6a1a3b62749788", "text": "Absolute valuation, which concentrates on the analysis of\ncash \ufb02ow and the converting to current value, is a more\ncomplex re\ufb01ned forecast method. With the development of\ndata mining technologies, there have been some attempts\nto use related techniques for valuation [35, 36]. However,\nthese methods require full \ufb01nancial statements and stock\ninformation, which are dif\ufb01cult to obtain considering com-\nmercial privacy protection, especially for startups. Another\neffective method is to analyze the company\u2019s core resources\nand members, which are much easier to obtain from public\ninformation. But this method needs experienced experts.\nMulti-Modal Aggregation Multi-modal learning im-\nproves performance by leveraging heterogeneous multi-\nsource data, in which modal suf\ufb01ciency is one of the im-\nportant principles. Traditional methods make full use of\nmulti-modal data by directly aggregating multiple source\ninformation, i.e., early (i.e., feature-based) or late fusion\n(i.e., decision-based), for example, early fusion methods con-\ncatenated the multiple feature representations for \ufb01nal pre-\ndiction. In contrast, late fusion methods utilize max/mean\npooling to integrate multi-modal predictions. Theses ap-\nproaches are based on the assumption that each modal\ncan provide suf\ufb01cient information for prediction. However,\nthe information contained in various modalities is diver-\ngent, thus researchers turn to adopt weighted ensemble\nAuthorized licensed use limited to: Univ of Calif Santa Barbara. Downloaded on June 20,2021 at 08:36:01 UTC from IEEE Xplore. Restrictions apply.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "f997fc05-f082-45a9-ac34-35213160ab39": {"__data__": {"id_": "f997fc05-f082-45a9-ac34-35213160ab39", "embedding": null, "metadata": {"page_label": "12", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "a99b8daa-2f23-425d-86bd-5a50f12846ec", "node_type": "4", "metadata": {"page_label": "12", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "63af798fef7b1598e4ad2b16e9e40b34c2a978d585c6f3e831a52bee98a2c97c"}, "3": {"node_id": "da9beb03-1f3e-4b25-a36a-0bb40c1d2227", "node_type": "1", "metadata": {"page_label": "12", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "4a728fd0bd70b4aad7316ddd46b3cef9cd273e1a691501707dc92dc4653eb36c"}}, "hash": "7990782086b05c8f10530ae4abd83bc50802693943917a4f7cde652b44a8e0c1", "text": "1041-4347 (c) 2021 IEEE. Personal use is permitted, but republication/redistribution requires IEEE permission. See http://www.ieee.org/publications_standards/publications/rights/index.html for more information.This article has been accepted for publication in a future issue of this journal, but has not been fully edited. Content may change prior to final publication. Citation information: DOI 10.1109/TKDE.2021.3080293, IEEE\nTransactions on Knowledge and Data Engineering\n12\nMiaoqucompany\nperson\nMiaoquChushou\nKaixun\nCailiXiaochu\nZhishiyingMeishan\nJuranzhijiaGuomeiTengyun\nReyun TechJingyueXuejiaoyuYidian\nChelunTengyuntianxia\nZhilingwuxian\nMiaoqu\nLei XuXiong XuJiangen Cao Hongchuan Tu\nTao HuangJingjing Li\nSi Chen Hao Jiang\nMayiJuniu\ncompanyperson\n(a)\nOverall\nMayiJuniuZhejiangMayi\nHangzhouJunhanSuzhouJiaochi\nMayijinfu, Xiongan\nZhejiangMayi, LLC\nYouzuWangluoAilyun\nZhejiangRongxinShanghaiLuchengShangrongXiangyeShenzhenRixunHangzhouYunqing\nHangzhouZhishengShanghaiYunjinShunheShangyeWaitanHaina (b)\nCompany Neighbors\nMayiJuniuChen Li\nXiaoming Hu\nYanlan ZhengXinyi HanJianwei Tu Lei Peng\nXiandong Jing Libiao Chen (c)\nMember Neighbors\nFig. 7. Example of attention visualizations for two cases. (a) is attention visualizations of multi-modal aggregation module, (b) and (c) are attention\nvisualizations of modal attribute encoding module.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "da9beb03-1f3e-4b25-a36a-0bb40c1d2227": {"__data__": {"id_": "da9beb03-1f3e-4b25-a36a-0bb40c1d2227", "embedding": null, "metadata": {"page_label": "12", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "a99b8daa-2f23-425d-86bd-5a50f12846ec", "node_type": "4", "metadata": {"page_label": "12", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "63af798fef7b1598e4ad2b16e9e40b34c2a978d585c6f3e831a52bee98a2c97c"}, "2": {"node_id": "f997fc05-f082-45a9-ac34-35213160ab39", "node_type": "1", "metadata": {"page_label": "12", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "7990782086b05c8f10530ae4abd83bc50802693943917a4f7cde652b44a8e0c1"}, "3": {"node_id": "ce6915a9-e74e-4132-a576-43a0743acf04", "node_type": "1", "metadata": {"page_label": "12", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "d15ea8360bdb9864c76cccbeb650a194246fcf1d02f73b05d0979c062c558332"}}, "hash": "4a728fd0bd70b4aad7316ddd46b3cef9cd273e1a691501707dc92dc4653eb36c", "text": "for acquiring a more reliable prediction. For example, [37]\ndeveloped shot-variance and min-fusion schemes for both\nintra- and intermodal fusions; [38] utilized multiple kernel\nlearning to integrate different modal information. Recently,\nwith the development of deep learning and attention mech-\nanism, many approaches attempted to self-learn the modal\nweights, for example, [39] incorporated feature-wise atten-\ntion network to concatenate deep multi-modal embeddings\nfor rumor detection; [40] combined self-attention to adap-\ntively learn the weights for different modalities which is\nfurther used for prediction.\nHeterogeneous graph mining Graph learning [41] is one\nof the most popular data mining topics. Recently, with the\nadvent of deep learning, graph neural networks [18, 22,\n22, 41], which aggregate information from neighbors via\nneural networks, have been widely researched. Different\nfrom previous graph embedding models, which adopt linear\nmethods, the key idea of graph neural networks is to aggre-\ngate feature information from node\u2019s neighbors via neural\nnetworks. For example, [18] proposed Graph-SAGE using\nneural networks, i.e., LSTM, to aggregate neighbors\u2019 feature\ninformation; [25] developed GAT to measure impacts of\ndifferent neighbors via employing attention mechanism,\nand combine their impacts to obtain node embeddings.\nMost of these methods concentrate on homogeneous graph.\nHowever, as introduced in section 2.2, company\u2019s core\nresources and members construct a heterogeneous graph\nwith multi-modal attribute. To solve this problem, hetero-\ngeneous graphs mining has been proposed and applied\nwidely, for example, [42] extracted topological features, and\npredicted citation relationship, [43] developed a co-attention\ndeep network to leverage meta-path based context. Besides,[44, 45] designed heterogeneous networks to automatically\npreserve both attribute semantics and multi-type relations.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "ce6915a9-e74e-4132-a576-43a0743acf04": {"__data__": {"id_": "ce6915a9-e74e-4132-a576-43a0743acf04", "embedding": null, "metadata": {"page_label": "12", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "a99b8daa-2f23-425d-86bd-5a50f12846ec", "node_type": "4", "metadata": {"page_label": "12", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "63af798fef7b1598e4ad2b16e9e40b34c2a978d585c6f3e831a52bee98a2c97c"}, "2": {"node_id": "da9beb03-1f3e-4b25-a36a-0bb40c1d2227", "node_type": "1", "metadata": {"page_label": "12", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "4a728fd0bd70b4aad7316ddd46b3cef9cd273e1a691501707dc92dc4653eb36c"}}, "hash": "d15ea8360bdb9864c76cccbeb650a194246fcf1d02f73b05d0979c062c558332", "text": "6 C ONCLUSION\nConsidering the availability and de\ufb01ciency of \ufb01nancial\nstatements, corporate relate valuation, based on core re-\nsources, members, and competitors, plays an important role\nin entertainments services. Traditional CRV always relies\non domain experts, which undoubtedly brings huge costs.\nRecent years, an increasing number of machine learning\nmethods have been successfully applied in entertainments\nservices. Notably, company\u2019s structure can be represented\nas a heterogeneous multi-modal graph, and the attributes\non different types of nodes constitute multi-modal data.\nTherefore, we developed HM2, an HGNN style method,\nwhich can aggregate node attributes via linkage-aware\nmulti-head attention mechanism, rather than use multi-\ninstance based methods without considering relationships\namong nodes. Meanwhile, HM2adopted additional triplet\nloss with embedding of competitors as the constraint to\nlearn more discriminative features. Consequently, HM2can\nexplore company intrinsic properties to improve CRV . Ex-\ntensive experiments on real-world CRV data demonstrated\nthe effectiveness of HM2.\nACKNOWLEDGMENT\nThis research was supported by NSFC (62006118, 61773198,\n61632004), NSFC-NRF Joint Research Project under Grant\n61861146001, CCF- Baidu Open Fund (CCF-BAIDU\nOF2020011), Baidu TIC Open Fund, Natural Science\nFoundation of Jiangsu Province of China under Grant\n(BK20200460).\nAuthorized licensed use limited to: Univ of Calif Santa Barbara. Downloaded on June 20,2021 at 08:36:01 UTC from IEEE Xplore. Restrictions apply.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "424dd8a7-d41e-4c7d-888b-60048cf12eb3": {"__data__": {"id_": "424dd8a7-d41e-4c7d-888b-60048cf12eb3", "embedding": null, "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "e2a0dbaa-daf2-400a-8a52-560293fb0472", "node_type": "4", "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "d100d58e1005fb2c658803507071ecabcfe9ff3f9f7a07fc712974f29ac6a067"}, "3": {"node_id": "bcc2842f-dcca-44b6-96c1-a02b60ed42ec", "node_type": "1", "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "79b627940452a02c917b588f29291b4ff51f6441909c1e4fa759d58e8190fe9a"}}, "hash": "bc2fce9728fa58d37f14b83ace0dbb8e179f9e35e3e0a13d2c3e8128fd4a620f", "text": "1041-4347 (c) 2021 IEEE. Personal use is permitted, but republication/redistribution requires IEEE permission. See http://www.ieee.org/publications_standards/publications/rights/index.html for more information.This article has been accepted for publication in a future issue of this journal, but has not been fully edited. Content may change prior to final publication. Citation information: DOI 10.1109/TKDE.2021.3080293, IEEE\nTransactions on Knowledge and Data Engineering\n13\nREFERENCES\n[1] C. Qin, H. Zhu, C. Zhu, T. Xu, F. Zhuang, C. Ma, J. Zhang,\nand H. Xiong, \u201cDuerquiz: A personalized question rec-\nommender system for intelligent job interview,\u201d in KDD,\nAnchorage, AK, 2019, pp. 2165\u20132173.\n[2] Y. Sun, F. Zhuang, H. Zhu, X. Song, Q. He, and H. Xiong,\n\u201cThe impact of person-organization \ufb01t on talent man-\nagement: A structure-aware convolutional neural network\napproach,\u201d in KDD, Anchorage, AK, 2019, pp. 1625\u20131633.\n[3] M. Haldar, M. Abdool, P . Ramanathan, T. Xu, S. Yang,\nH. Duan, Q. Zhang, N. Barrow-Williams, B. C. Turnbull,\nB. M. Collins, and T. Legrand, \u201cApplying deep learning to\nairbnb search,\u201d in KDD, Anchorage, AK, 2019, pp. 1927\u2013\n1935.\n[4] M. C. Jensen, \u201cAgency costs of free cash \ufb02ow, corporate\n\ufb01nance, and takeovers,\u201d The American economic review,\nvol. 76, no. 2, pp.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "bcc2842f-dcca-44b6-96c1-a02b60ed42ec": {"__data__": {"id_": "bcc2842f-dcca-44b6-96c1-a02b60ed42ec", "embedding": null, "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "e2a0dbaa-daf2-400a-8a52-560293fb0472", "node_type": "4", "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "d100d58e1005fb2c658803507071ecabcfe9ff3f9f7a07fc712974f29ac6a067"}, "2": {"node_id": "424dd8a7-d41e-4c7d-888b-60048cf12eb3", "node_type": "1", "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "bc2fce9728fa58d37f14b83ace0dbb8e179f9e35e3e0a13d2c3e8128fd4a620f"}, "3": {"node_id": "a3fc4df2-de3c-4d38-96f1-556757db1937", "node_type": "1", "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "addc2a4de2cf0310da4f3c05e1e43aefc5d89a80f15248cd92c122b90944b9a0"}}, "hash": "79b627940452a02c917b588f29291b4ff51f6441909c1e4fa759d58e8190fe9a", "text": "76, no. 2, pp. 323\u2013329, 1986.\n[5] J. Francis, \u201cEva [r] and value-based management: a prac-\ntical guide to implementation.(book reviews),\u201d Accounting\nReview, vol. 77, no. 1, pp. 228\u2013229, 2002.\n[6] T. A. Luehrman, Investment opportunities as real options:\nGetting started on the numbers. Harvard Business Review\nBoston, 1998.\n[7] K. L. Fisher, The only three questions that count: investing by\nknowing what others don\u2019t. John Wiley & Sons, 2007, vol. 22.\n[8] A. Vaswani, N. Shazeer, N. Parmar, J. Uszkoreit, L. Jones,\nA. N. Gomez, L. Kaiser, and I. Polosukhin, \u201cAttention is all\nyou need,\u201d in NeurIPS, Long Beach, CA, 2017, pp. 5998\u2013\n6008.\n[9] T. Miloud, A. Aspelund, and M. Cabrol, \u201cStartup valuation\nby venture capitalists: an empirical study,\u201d Venture Capital,\nvol. 14, no. 2-3, pp. 151\u2013174, 2012.\n[10] A. Kohn, \u201cThe determinants of startup valuation in the\nventure capital context: a systematic review and avenues\nfor future research,\u201d Management Review Quarterly, vol. 68,\nno. 1, pp. 3\u201336, 2018.\n[11] J. Tang, J. Zhang, L. Yao, J. Li, L. Zhang, and Z. Su,\n\u201cArnetminer: extraction and mining of academic social\nnetworks,\u201d in KDD, Las Vegas, Nevada, 2008, pp. 990\u2013998.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "a3fc4df2-de3c-4d38-96f1-556757db1937": {"__data__": {"id_": "a3fc4df2-de3c-4d38-96f1-556757db1937", "embedding": null, "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "e2a0dbaa-daf2-400a-8a52-560293fb0472", "node_type": "4", "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "d100d58e1005fb2c658803507071ecabcfe9ff3f9f7a07fc712974f29ac6a067"}, "2": {"node_id": "bcc2842f-dcca-44b6-96c1-a02b60ed42ec", "node_type": "1", "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "79b627940452a02c917b588f29291b4ff51f6441909c1e4fa759d58e8190fe9a"}, "3": {"node_id": "aa4c2471-0e4b-410e-9d13-274b0c84c895", "node_type": "1", "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "65a8326008e5e9c93eac3d57229d6fc5e9970cad44525805ffcc19faba5b87cc"}}, "hash": "addc2a4de2cf0310da4f3c05e1e43aefc5d89a80f15248cd92c122b90944b9a0", "text": "990\u2013998.\n[12] R. He and J. J. McAuley, \u201cUps and downs: Modeling the\nvisual evolution of fashion trends with one-class collabo-\nrative \ufb01ltering,\u201d in WWW, Montreal, Canada, pp. 507\u2013517.\n[13] C. Zhang, D. Song, C. Huang, A. Swami, and N. V .\nChawla, \u201cHeterogeneous graph neural network,\u201d in KDD,\nAnchorage, AK, 2019, pp. 793\u2013803.\n[14] A. Grover and J. Leskovec, \u201cnode2vec: Scalable feature\nlearning for networks,\u201d in KDD, San Francisco, CA, 2016,\npp. 855\u2013864.\n[15] F. Zhang, X. Liu, J. Tang, Y. Dong, P . Yao, J. Zhang, X. Gu,\nY. Wang, B. Shao, R. Li, and K. Wang, \u201cOAG: toward\nlinking large-scale heterogeneous entity graphs,\u201d in KDD,\nAnchorage, AK, 2019, pp. 2585\u20132595.\n[16] J. Zhao, Z. Zhou, Z. Guan, W. Zhao, W. Ning, G. Qiu, and\nX. He, \u201cIntentgc: A scalable graph convolution framework\nfusing heterogeneous information for recommendation,\u201d\ninKDD, Anchorage, AK, 2019, pp. 2347\u20132357.\n[17] S. Hochreiter and J. Schmidhuber, \u201cLong short-term mem-\nory,\u201d Neural Computation, vol. 9, no. 8, pp. 1735\u20131780, 1997.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "aa4c2471-0e4b-410e-9d13-274b0c84c895": {"__data__": {"id_": "aa4c2471-0e4b-410e-9d13-274b0c84c895", "embedding": null, "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "e2a0dbaa-daf2-400a-8a52-560293fb0472", "node_type": "4", "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "d100d58e1005fb2c658803507071ecabcfe9ff3f9f7a07fc712974f29ac6a067"}, "2": {"node_id": "a3fc4df2-de3c-4d38-96f1-556757db1937", "node_type": "1", "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "addc2a4de2cf0310da4f3c05e1e43aefc5d89a80f15248cd92c122b90944b9a0"}, "3": {"node_id": "5ae76d28-2afa-443a-b517-81fd19c03ba4", "node_type": "1", "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "2ead5854d221d9fc79243c4ac2e79c6abaeb0e69e82068721670d5a97f5159a8"}}, "hash": "65a8326008e5e9c93eac3d57229d6fc5e9970cad44525805ffcc19faba5b87cc", "text": "9, no. 8, pp. 1735\u20131780, 1997.\n[18] W. L. Hamilton, Z. Ying, and J. Leskovec, \u201cInductive\nrepresentation learning on large graphs,\u201d in NeurIPS, Long\nBeach, CA, 2017, pp. 1024\u20131034.\n[19] Z. Liu, C. Chen, X. Yang, J. Zhou, X. Li, and L. Song, \u201cHet-\nerogeneous graph neural networks for malicious account\ndetection,\u201d in CIKM, Torino, Italy, 2018, pp. 2077\u20132085.\n[20] T. N. Kipf and M. Welling, \u201cSemi-supervised classi\ufb01cation\nwith graph convolutional networks,\u201d in ICLR, Toulon,\nFrance, 2017.\n[21] E. B. Khalil, H. Dai, Y. Zhang, B. Dilkina, and L. Song,\n\u201cLearning combinatorial optimization algorithms overgraphs,\u201d in NeurIPS, Long Beach, CA, 2017, pp. 6348\u20136358.\n[22] J. Zhou, G. Cui, Z. Zhang, C. Yang, Z. Liu, and M. Sun,\n\u201cGraph neural networks: A review of methods and appli-\ncations,\u201d CoRR, vol. abs/1812.08434, 2018.\n[23] T. Mikolov, K. Chen, G. Corrado, and J. Dean, \u201cEf\ufb01cient\nestimation of word representations in vector space,\u201d in\nICLR Workshop Track, Scottsdale, Arizona, 2013.\n[24] B. Xiang, Z. Liu, J. Zhou, and X. Li, \u201cFeature propagation\non graph: A new perspective to graph representation\nlearning,\u201d CoRR, vol. abs/1804.06111, 2018.\n[25] P .", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "5ae76d28-2afa-443a-b517-81fd19c03ba4": {"__data__": {"id_": "5ae76d28-2afa-443a-b517-81fd19c03ba4", "embedding": null, "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "e2a0dbaa-daf2-400a-8a52-560293fb0472", "node_type": "4", "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "d100d58e1005fb2c658803507071ecabcfe9ff3f9f7a07fc712974f29ac6a067"}, "2": {"node_id": "aa4c2471-0e4b-410e-9d13-274b0c84c895", "node_type": "1", "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "65a8326008e5e9c93eac3d57229d6fc5e9970cad44525805ffcc19faba5b87cc"}, "3": {"node_id": "fcce7b07-dfff-4be7-8002-9a29584e0517", "node_type": "1", "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "ae190f858fa4d18b3c0c7626707776253e21279281e2bf422ac9ae70b80111d3"}}, "hash": "2ead5854d221d9fc79243c4ac2e79c6abaeb0e69e82068721670d5a97f5159a8", "text": "abs/1804.06111, 2018.\n[25] P . Velickovic, G. Cucurull, A. Casanova, A. Romero, P . Lio,\nand Y. Bengio, \u201cGraph attention networks,\u201d in ICLR, Van-\ncouver, BC, 2018.\n[26] B. Perozzi, R. Al-Rfou, and S. Skiena, \u201cDeepwalk: online\nlearning of social representations,\u201d in KDD, New York, NY,\n2014, pp. 701\u2013710.\n[27] D. P . Kingma and J. Ba, \u201cAdam: A method for stochastic\noptimization,\u201d in ICLR, San Diego, CA, 2015.\n[28] Y. Dong, N. V . Chawla, and A. Swami, \u201cmetapath2vec:\nScalable representation learning for heterogeneous net-\nworks,\u201d in KDD, Halifax, Canada, 2017, pp. 135\u2013144.\n[29] L. Liao, X. He, H. Zhang, and T. Chua, \u201cAttributed social\nnetwork embedding,\u201d TKDE, vol. 30, no. 12, pp. 2257\u2013\n2270, 2018.\n[30] X. Wang, H. Ji, C. Shi, B. Wang, Y. Ye, P . Cui, and P . S. Yu,\n\u201cHeterogeneous graph attention network,\u201d in WWW, San\nFrancisco, CA, 2019, pp. 2022\u20132032.\n[31] Y. Cen, X. Zou, J. Zhang, H. Yang, J. Zhou, and J. Tang,\n\u201cRepresentation learning for attributed multiplex hetero-\ngeneous network,\u201d in KDD, Anchorage, AK, 2019, pp.\n1358\u20131368.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "fcce7b07-dfff-4be7-8002-9a29584e0517": {"__data__": {"id_": "fcce7b07-dfff-4be7-8002-9a29584e0517", "embedding": null, "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "e2a0dbaa-daf2-400a-8a52-560293fb0472", "node_type": "4", "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "d100d58e1005fb2c658803507071ecabcfe9ff3f9f7a07fc712974f29ac6a067"}, "2": {"node_id": "5ae76d28-2afa-443a-b517-81fd19c03ba4", "node_type": "1", "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "2ead5854d221d9fc79243c4ac2e79c6abaeb0e69e82068721670d5a97f5159a8"}, "3": {"node_id": "f40faf80-60cc-4fea-8e42-6eb0f03e2cfb", "node_type": "1", "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "de9920144a9e618ec266c6e715fce188b8781bfe644c4072baf944a8fe22f307"}}, "hash": "ae190f858fa4d18b3c0c7626707776253e21279281e2bf422ac9ae70b80111d3", "text": "1358\u20131368.\n[32] M. Ghaeli, \u201cPrice-to-earnings ratio: A state-of-art review,\u201d\nAccounting, vol. 3, no. 2, pp. 131\u2013136, 2017.\n[33] S. Agrawal, R. Monem, and M. Ariff, \u201cPrice to book ratio\nas a valuation model: An empirical investigation,\u201d Finance\nIndia, vol. 2, no. 10, pp. 333\u2013344, 1996.\n[34] J. P . O\u2019Shaughnessy, What works on Wall Street: A guide to the\nbest-performing investment strategies of all time. McGraw-\nHill, 1998.\n[35] E. Pagourtzi, K. Metaxiotis, K. Nikolopoulos, K. Gianne-\nlos, and V . Assimakopoulos, \u201cReal estate valuation with\narti\ufb01cial intelligence approaches,\u201d ISTA, vol. 2, no. 1, pp.\n50\u201357, 2007.\n[36] G. Karami and S. BeikBoshrouyeh, \u201cCorporate governance\nand equity valuation: the model by using arti\ufb01cial neu-\nral network,\u201d Journal of Accounting and Auditing Review,\nvol. 18, no. 64, pp. 129\u2013150, 2011.\n[37] G. Evangelopoulos, A. Zlatintsi, A. Potamianos, P . Mara-\ngos, K. Rapantzikos, G. Skoumas, and Y. Avrithis, \u201cMulti-\nmodal saliency and fusion for movie summarization based\non aural, visual, and textual attention,\u201d TMM, vol. 15,\nno. 7, pp. 1553\u20131568, 2013.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "f40faf80-60cc-4fea-8e42-6eb0f03e2cfb": {"__data__": {"id_": "f40faf80-60cc-4fea-8e42-6eb0f03e2cfb", "embedding": null, "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "e2a0dbaa-daf2-400a-8a52-560293fb0472", "node_type": "4", "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "d100d58e1005fb2c658803507071ecabcfe9ff3f9f7a07fc712974f29ac6a067"}, "2": {"node_id": "fcce7b07-dfff-4be7-8002-9a29584e0517", "node_type": "1", "metadata": {"page_label": "13", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "ae190f858fa4d18b3c0c7626707776253e21279281e2bf422ac9ae70b80111d3"}}, "hash": "de9920144a9e618ec266c6e715fce188b8781bfe644c4072baf944a8fe22f307", "text": "15,\nno. 7, pp. 1553\u20131568, 2013.\n[38] M. Gonen and E. Alpaydin, \u201cMultiple kernel learning\nalgorithms,\u201d JMLR, vol. 12, pp. 2211\u20132268, 2011.\n[39] Z. Jin, J. Cao, H. Guo, Y. Zhang, and J. Luo, \u201cMultimodal\nfusion with recurrent neural networks for rumor detection\non microblogs,\u201d in ACMMMM, Mountain View, CA, 2017,\npp. 795\u2013816.\n[40] Y. Yang, K. Wang, D. Zhan, H. Xiong, and Y. Jiang,\n\u201cComprehensive semi-supervised multi-modal learning,\u201d\ninIJCAI, Macao, China, 2019, pp. 4092\u20134098.\n[41] P . Cui, X. Wang, J. Pei, and W. Zhu, \u201cA survey on network\nembedding,\u201d TKDE, vol. 31, no. 5, pp. 833\u2013852, 2019.\n[42] Y. Sun, J. Han, C. C. Aggarwal, and N. V . Chawla, \u201cWhen\nwill it happen?: relationship prediction in heterogeneous\ninformation networks,\u201d in WWW, Seattle, WA, 2012, pp.\n663\u2013672.\n[43] B. Hu, C. Shi, W. X. Zhao, and P . S. Yu, \u201cLeveraging meta-\npath based context for top- N recommendation with A\nneural co-attention model,\u201d in KDD, London, UK, 2018,\nAuthorized licensed use limited to: Univ of Calif Santa Barbara. Downloaded on June 20,2021 at 08:36:01 UTC from IEEE Xplore. Restrictions apply.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "10c704e9-4bac-4a12-9842-685048fb12c9": {"__data__": {"id_": "10c704e9-4bac-4a12-9842-685048fb12c9", "embedding": null, "metadata": {"page_label": "14", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "fb50b1ba-bc1b-4d74-951a-22d5167d5ffb", "node_type": "4", "metadata": {"page_label": "14", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "a8f60bb292a6a8a5a8797722564372a56ffb6de873685c2bc9b524a068f8aafa"}, "3": {"node_id": "32935be2-7368-48d5-a254-cea57dc8b5d5", "node_type": "1", "metadata": {"page_label": "14", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "77a53e89e51b5cf3782de3e66f9b12eb3b824aeb72c47f56dc5cf3fb196214d7"}}, "hash": "bbb7ebe81a605be99f2f44d325e90348f14689f65f45db01e7e2493d8ce0ab24", "text": "1041-4347 (c) 2021 IEEE. Personal use is permitted, but republication/redistribution requires IEEE permission. See http://www.ieee.org/publications_standards/publications/rights/index.html for more information.This article has been accepted for publication in a future issue of this journal, but has not been fully edited. Content may change prior to final publication. Citation information: DOI 10.1109/TKDE.2021.3080293, IEEE\nTransactions on Knowledge and Data Engineering\n14\npp. 1531\u20131540.\n[44] Z. Liu, C. Huang, Y. Yu, B. Fan, and J. Dong, \u201cFast\nattributed multiplex heterogeneous network embedding,\u201d\ninCIKM, Virtual Event, Ireland, 2020, pp. 995\u20131004.\n[45] B. Li, D. Pi, Y. Lin, I. A. Khan, and L. Cui, \u201cMulti-\nsource information fusion based heterogeneous network\nembedding,\u201d Inf. Sci., vol. 534, pp. 53\u201371, 2020.\nYang Yang received the Ph.D. degree in com-\nputer science, Nanjing University, China in 2019.\nAt the same year, he became a faculty member\nat Nanjing University of Science and Technol-\nogy, China. He is currently a Professor with the\nSchool of Computer Science and Engineering.\nHis research interests lie primarily in machine\nlearning and data mining, including heteroge-\nneous learning, model reuse, and incremental\nmining. He has published over 10 papers in lead-\ning international journal/conferences. He serves\nas PC in leading conferences such as IJCAI, AAAI, ICML, NIPS, etc.\nJia-Qi Yang is working towards the M.Sc.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "32935be2-7368-48d5-a254-cea57dc8b5d5": {"__data__": {"id_": "32935be2-7368-48d5-a254-cea57dc8b5d5", "embedding": null, "metadata": {"page_label": "14", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "fb50b1ba-bc1b-4d74-951a-22d5167d5ffb", "node_type": "4", "metadata": {"page_label": "14", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "a8f60bb292a6a8a5a8797722564372a56ffb6de873685c2bc9b524a068f8aafa"}, "2": {"node_id": "10c704e9-4bac-4a12-9842-685048fb12c9", "node_type": "1", "metadata": {"page_label": "14", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "bbb7ebe81a605be99f2f44d325e90348f14689f65f45db01e7e2493d8ce0ab24"}, "3": {"node_id": "e5a73dc6-e5be-4c60-b357-6805138bb333", "node_type": "1", "metadata": {"page_label": "14", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "1c61bc7678dbf46dd50f1c45ee160fddd65cc73ea40af0ca46b1a2464c3256d4"}}, "hash": "77a53e89e51b5cf3782de3e66f9b12eb3b824aeb72c47f56dc5cf3fb196214d7", "text": "Jia-Qi Yang is working towards the M.Sc. de-\ngree with the National Key Lab for Novel Soft-\nware Technology, the Department of Computer\nScience &Technology in Nanjing University,\nChina. His research interests lie primarily in ma-\nchine learning and data mining, including multi-\nmodal learning.\nRan Bao is working towards the M.Sc. degree\nwith the National Key Lab for Novel Software\nTechnology, the Department of Computer Sci-\nence &Technology in Nanjing University, China.\nHis research interests lie primarily in machine\nlearning and data mining.\nDe-Chuan Zhan received the Ph.D. degree in\ncomputer science, Nanjing University, China in\n2010. At the same year, he became a faculty\nmember in the Department of Computer Science\nand Technology at Nanjing University, China. He\nis currently a Professor with the Department of\nComputer Science and Technology at Nanjing\nUniversity. His research interests are mainly in\nmachine learning, data mining and mobile intelli-\ngence. He has published over 20 papers in lead-\ning international journal/conferences. He serves\nas an editorial board member of IDA and IJAPR, and serves as SPC/PC\nin leading conferences such as IJCAI, AAAI, ICML, NIPS, etc.\nHengShu Zhu (SM\u201919) is currently a principal\ndata scientist &architect at Baidu Inc. He re-\nceived the Ph.D. degree in 2014 and B.E. de-\ngree in 2009, both in Computer Science from\nUniversity of Science and Technology of China\n(USTC), China. His general area of research is\ndata mining and machine learning, with a fo-\ncus on developing advanced data analysis tech-\nniques for innovative business applications.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "e5a73dc6-e5be-4c60-b357-6805138bb333": {"__data__": {"id_": "e5a73dc6-e5be-4c60-b357-6805138bb333", "embedding": null, "metadata": {"page_label": "14", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "fb50b1ba-bc1b-4d74-951a-22d5167d5ffb", "node_type": "4", "metadata": {"page_label": "14", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "a8f60bb292a6a8a5a8797722564372a56ffb6de873685c2bc9b524a068f8aafa"}, "2": {"node_id": "32935be2-7368-48d5-a254-cea57dc8b5d5", "node_type": "1", "metadata": {"page_label": "14", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "77a53e89e51b5cf3782de3e66f9b12eb3b824aeb72c47f56dc5cf3fb196214d7"}, "3": {"node_id": "69ed6cbc-effa-44cb-b100-ea1cace6e492", "node_type": "1", "metadata": {"page_label": "14", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "17f967d82e52c7268091fdcee2fcd5500d83e0ede3cb33a0cedb0067d93f771e"}}, "hash": "1c61bc7678dbf46dd50f1c45ee160fddd65cc73ea40af0ca46b1a2464c3256d4", "text": "He\nhas published proli\ufb01cally in refereed journals and\nconference proceedings, including IEEE Trans-\nactions on Knowledge and Data Engineering (TKDE), IEEE Transac-\ntions on Mobile Computing (TMC), ACM Transactions on Information\nSystems (ACM TOIS), ACM Transactions on Knowledge Discovery from\nData (TKDD), ACM SIGKDD, ACM SIGIR, WWW, IJCAI, and AAAI. He\nhas served regularly on the organization and program committees of\nnumerous conferences, including as a program co-chair of the KDD\nCup-2019 Regular ML Track, and a founding co-chair of the \ufb01rst In-\nternational Workshop on Organizational Behavior and Talent Analytics\n(OBTA) and the International Workshop on Talent and Management\nComputing (TMC), in conjunction with ACM SIGKDD. He was the re-\ncipient of the Distinguished Dissertation Award of CAS (2016), the\nDistinguished Dissertation Award of CAAI (2016), the Special Prize of\nPresident Scholarship for Postgraduate Students of CAS (2014), the\nBest Student Paper Award of KSEM-2011, WAIM-2013, CCDM-2014,\nand the Best Paper Nomination of ICDM-2014. He is the senior member\nof IEEE, ACM, and CCF .\nXiao-Ru Gao is working towards the Ph.D. de-\ngree in Marketing at Rutgers, the State Univer-\nsity of New Jersey, US. Her research interests lie\nprimarily in data mining, social networking, es-\npecially in\ufb02uencer marketing, and brand image\nperception.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "69ed6cbc-effa-44cb-b100-ea1cace6e492": {"__data__": {"id_": "69ed6cbc-effa-44cb-b100-ea1cace6e492", "embedding": null, "metadata": {"page_label": "14", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "fb50b1ba-bc1b-4d74-951a-22d5167d5ffb", "node_type": "4", "metadata": {"page_label": "14", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "a8f60bb292a6a8a5a8797722564372a56ffb6de873685c2bc9b524a068f8aafa"}, "2": {"node_id": "e5a73dc6-e5be-4c60-b357-6805138bb333", "node_type": "1", "metadata": {"page_label": "14", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "1c61bc7678dbf46dd50f1c45ee160fddd65cc73ea40af0ca46b1a2464c3256d4"}, "3": {"node_id": "c48f1ef4-0130-488b-a028-7f883c4a6430", "node_type": "1", "metadata": {"page_label": "14", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "4eb08cd23638864d6c8b13920fa3f4f823417fc357bcb9fb966788ddd256b678"}}, "hash": "17f967d82e52c7268091fdcee2fcd5500d83e0ede3cb33a0cedb0067d93f771e", "text": "Hui Xiong (Fellow) is currently a Full Profes-\nsor at the Rutgers, the State University of New\nJersey, where he received the 2018 Ram Cha-\nran Management Practice Award as the Grand\nPrix winner from the Harvard Business Review,\nRBS Dean\u2019s Research Professorship (2016), the\nRutgers University Board of Trustees Research\nFellowship for Scholarly Excellence (2009), the\nICDM Best Research Paper Award (2011), and\nthe IEEE ICDM Outstanding Service Award\n(2017). He received the Ph.D. degree from the\nUniversity of Minnesota (UMN), USA. He is a co-Editor-in-Chief of En-\ncyclopedia of GIS, an Associate Editor of IEEE Transactions on Big Data\n(TBD), ACM Transactions on Knowledge Discovery from Data (TKDD),\nand ACM Transactions on Management Information Systems (TMIS).\nHe has served regularly on the organization and program committees\nof numerous conferences, including as a Program Co-Chair of the\nIndustrial and Government Track for the 18th ACM SIGKDD Interna-\ntional Conference on Knowledge Discovery and Data Mining (KDD), a\nProgram Co-Chair for the IEEE 2013 International Conference on Data\nMining (ICDM), a General Co-Chair for the IEEE 2015 International\nConference on Data Mining (ICDM), and a Program Co-Chair of the\nResearch Track for the 2018 ACM SIGKDD International Conference on\nKnowledge Discovery and Data Mining. He is an IEEE Fellow and an\nACM Distinguished Scientist.\nJian Yang (M\u201908) received the Ph.D. degree\nin pattern recognition and intelligence systems\nfrom the Nanjing University of Science and Tech-\nnology (NUST), Nanjing, China, in 2002. In\n2003, he was a Post-Doctoral Researcher with\nthe University of Zaragoza, Zaragoza, Spain.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "c48f1ef4-0130-488b-a028-7f883c4a6430": {"__data__": {"id_": "c48f1ef4-0130-488b-a028-7f883c4a6430", "embedding": null, "metadata": {"page_label": "14", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "fb50b1ba-bc1b-4d74-951a-22d5167d5ffb", "node_type": "4", "metadata": {"page_label": "14", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "a8f60bb292a6a8a5a8797722564372a56ffb6de873685c2bc9b524a068f8aafa"}, "2": {"node_id": "69ed6cbc-effa-44cb-b100-ea1cace6e492", "node_type": "1", "metadata": {"page_label": "14", "file_name": "Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "file_path": "docs\\Corporate Relative Valuation Using Heterogeneous MultiModal Graph Neural Network Yang et al.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "17f967d82e52c7268091fdcee2fcd5500d83e0ede3cb33a0cedb0067d93f771e"}}, "hash": "4eb08cd23638864d6c8b13920fa3f4f823417fc357bcb9fb966788ddd256b678", "text": "From 2004 to 2006, he was a Post-Doctoral Fel-\nlow with the Biometrics Centre, The Hong Kong\nPolytechnic University, Hong Kong. From 2006\nto 2007, he was a Post-Doctoral Fellow with the\nDepartment of Computer Science, New Jersey\nInstitute of Technology, Newark, NJ, USA. He is currently a Chang-Jiang\nProfessor with the School of Computer Science and Engineering, NUST.\nHe has authored more than 200 scienti\ufb01c papers in pattern recognition\nand computer vision. His papers have been cited more than 6000 times\nin the Web of Science and 15,000 times in the Scholar Google. His\ncurrent research interests include pattern recognition, computer vision,\nand machine learning. Dr. Y ang is a Fellow of IAPR. He is currently\nan Associate Editor of Pattern Recognition, Pattern Recognition Letters,\nthe IEEE TRANSACTIONS ON NEURAL NETWORKS AND LEARNING\nSYSTEMS, and Neurocomputing.\nAuthorized licensed use limited to: Univ of Calif Santa Barbara. Downloaded on June 20,2021 at 08:36:01 UTC from IEEE Xplore. Restrictions apply.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "8321641d-8705-4c4f-b92b-2a5a93e98665": {"__data__": {"id_": "8321641d-8705-4c4f-b92b-2a5a93e98665", "embedding": null, "metadata": {"page_label": "474", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "ae82459d-41ef-4ac9-9820-3eb69e0be922", "node_type": "4", "metadata": {"page_label": "474", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "aee6cf38a4e5eabdb63c856c12d762982faa68814c4c26a216e7e82966f4b677"}, "3": {"node_id": "f3c7a322-53a9-47b6-8a14-2c46258f7c89", "node_type": "1", "metadata": {"page_label": "474", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "859060cfb63f4a1c868e3b3c6d063a17af5874879a3abcb3d03f948737698f3f"}}, "hash": "777f104a4af5d3cb85ee2bbc2ad31c0a79defcbdc6ab56b2cc4cfdafd35e0ad7", "text": "From Gaps to Transformation Paths\nin Enterprise Architecture Planning\nPhilipp Diefenthaler1,2(B)and Bernhard Bauer2\n1Softplant GmbH, Munich, Germany\n2Institute for Software & Systems Engineering, University of Augsburg,\nAugsburg, Germany\nphilipp.diefenthaler@softplant.de,\nBernhard.Bauer@informatik.uni-augsburg.de\nAbstract. Planning changes in an enterprise and its supporting IT\ncan be supported by enterprise architecture (EA) models. The planned\nchanges result in gaps which can be derived by a gap analysis. But, know-ing the gaps is not enough. Also important is to know in which sequence\ngaps are to be closed for transformation path planning. In this paper we\nshow how gaps are identi\ufb01ed and reused for detailing a model of the tar-get architecture. Based on this re\ufb01nement further gaps become visible.\nFurthermore, we describe how it is possible to create with a transfor-\nmation model and an action repository transformation paths towards a\ndesired and detailed target architecture. Afterwards, we give a use case\nexample and propose a technical realization of the solution.\nKeywords: Enterprise architecture planning\n\u00b7Gap analysis \u00b7Transfor-\nmation model \u00b7Graph transformation\n1 Introduction\nEnterprises nowadays face challenges like changing markets, security threats,\nevolving technologies and new regulations that drive the need to adapt theenterprise. Enterprise architecture management (EAM) supports this change\nin a structured manner. An enterprise architecture (EA) is the \u201cfundamental\norganization of a system [the enterprise] embodied in its components, their rela-tionships to each other, and to the environment, and the principles guiding its\ndesign and evolution\u201d [ 1].\nModels of this architecture can support decision making for planning pur-\nposes. Such EA models cover aspects from business, processes, integration, soft-\nware and technology [ 2].", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "f3c7a322-53a9-47b6-8a14-2c46258f7c89": {"__data__": {"id_": "f3c7a322-53a9-47b6-8a14-2c46258f7c89", "embedding": null, "metadata": {"page_label": "474", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "ae82459d-41ef-4ac9-9820-3eb69e0be922", "node_type": "4", "metadata": {"page_label": "474", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "aee6cf38a4e5eabdb63c856c12d762982faa68814c4c26a216e7e82966f4b677"}, "2": {"node_id": "8321641d-8705-4c4f-b92b-2a5a93e98665", "node_type": "1", "metadata": {"page_label": "474", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "777f104a4af5d3cb85ee2bbc2ad31c0a79defcbdc6ab56b2cc4cfdafd35e0ad7"}}, "hash": "859060cfb63f4a1c868e3b3c6d063a17af5874879a3abcb3d03f948737698f3f", "text": "To cope with the complexity of an EA it is crucial\nfor enterprises to use a managed approach to steer and control the redesign ofthe enterprise. The complexity arises from the level of abstraction, the num-\nber of stakeholders involved, and the change of internal and external conditions\ninherent to EAs.\nc\u20ddSpringer International Publishing Switzerland 2014\nS. Hammoudi et al. (Eds.): ICEIS 2013, LNBIP 190, pp. 474\u2013489, 2014.DOI: 10.1007/978-3-319-09492-2\n28", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "9f7c6bcd-56bd-411f-9e27-03d9c7050384": {"__data__": {"id_": "9f7c6bcd-56bd-411f-9e27-03d9c7050384", "embedding": null, "metadata": {"page_label": "475", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "e77f0ca8-a8fa-47af-85cd-aad9ca3cc25e", "node_type": "4", "metadata": {"page_label": "475", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "2f7788aa4b917d49a21e64d362ffc8d6188998d57f3ba44afd05a23057110c4d"}, "3": {"node_id": "68088673-ea16-4f8c-bbb4-9b5e6f6fdef5", "node_type": "1", "metadata": {"page_label": "475", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "c53f2e6e83687dbf33b5e1443e3138ac32ebf752eab8c83fba716bf23131b196"}}, "hash": "ad2858902754f42daea000681269e6a00b6e492aff2bb01116ab6a1995fe6216", "text": "From Gaps to Transformation Paths in Enterprise Architecture Planning 475\nTo plan the change it is necessary to have a plan basis, i.e. the current\narchitecture, and to know the goal of planning activities, i.e. the target architec-ture. According to [ 3,4] the planning activities take place at di\ufb00erent decision\nlevels. Each of them varies in detail and levels of abstraction seem to be inevitable\n[4]. The need to change and the resulting moving target are challenges for EA\nplanning, as part of the EAM, has to meet [ 5,6]. EAM and particularly EA plan-\nning is supported by tools which allow the creation of visualizations, automated\ndocumentation and analysis of EA models.\nIn this paper we describe how gaps can be derived from two EA models for\ndi\ufb00erent points in time. Furthermore, we introduce the transformation model by\nAier and Gleichauf [ 7] to connect architectural building blocks from the models\nof the current and target EA. With the results from gap analysis and the infor-\nmation contained in the transformation model we introduce an action repository\nfor the creation of di\ufb00erent transformation paths. We exemplify the solution to\nget from gaps to transformation paths based on a model of a current and target\narchitecture of an application architecture within a use case for a master dataconsolidation challenge. Furthermore, we propose a technical realization based\non semantic web technologies and graph transformations.\n2 Foundations\nThis section gives an introduction to the foundations of EA models and their\nusage for planning purposes. Furthermore, we introduce semantic web technolo-\ngies and graph transformations for planning purposes, as they are of relevance\nfor our proposed technical realization of the solution.\n2.1 Enterprise Architecture Models\nAccording to Buckl and Schweda [ 8] EAM follows a typical management cycle\nthat consists of the phases plan, do, check and act. The plan phase is concerned\nwith developing change proposals that are implemented in the do phase. Within\nthe check phase di\ufb00erences between intended and actually achieved results arecontrolled.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "68088673-ea16-4f8c-bbb4-9b5e6f6fdef5": {"__data__": {"id_": "68088673-ea16-4f8c-bbb4-9b5e6f6fdef5", "embedding": null, "metadata": {"page_label": "475", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "e77f0ca8-a8fa-47af-85cd-aad9ca3cc25e", "node_type": "4", "metadata": {"page_label": "475", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "2f7788aa4b917d49a21e64d362ffc8d6188998d57f3ba44afd05a23057110c4d"}, "2": {"node_id": "9f7c6bcd-56bd-411f-9e27-03d9c7050384", "node_type": "1", "metadata": {"page_label": "475", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "ad2858902754f42daea000681269e6a00b6e492aff2bb01116ab6a1995fe6216"}}, "hash": "c53f2e6e83687dbf33b5e1443e3138ac32ebf752eab8c83fba716bf23131b196", "text": "Within\nthe check phase di\ufb00erences between intended and actually achieved results arecontrolled. Based upon the results from the check phase the act phase provides\ninput to the plan phase by supplying information for the next plan phase. Models,\nas an abstraction mechanism, of an enterprise, can support the plan phase as\npart of an EAM approach [ 9,10].\nEA models can be used to describe an EA for di\ufb00erent points in time [ 8]. The\nmodel of the current architecture of the enterprise is a documented architecture\nat the present point in time and serves as a starting point for de\ufb01ning a model of\na target architecture. In contrast the model of the target architecture representsa desired architecture in the future which can be used to guide the development\nof an EA from the current towards a target architecture. The development of\na target architecture depends on the enterprises\u2019 EA goals. It is in\ufb02uenced bybusiness requirements, strategic goals and IT objectives like master data con-\nsolidation, improving the \ufb02exibility of IT and drive the coverage of standard\nplatforms [ 11].", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "63738b65-9843-4ff1-b6a1-8ddef353cd8c": {"__data__": {"id_": "63738b65-9843-4ff1-b6a1-8ddef353cd8c", "embedding": null, "metadata": {"page_label": "476", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "1dcb5cb9-fe5d-4ddc-85cd-2e85f79f60f7", "node_type": "4", "metadata": {"page_label": "476", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "5de25c19315462d3549f5e784b49d5fe540f2dd41be784136ea8f183d044841e"}, "3": {"node_id": "4ab7b75b-e6bf-46ec-b174-bfe670f1168a", "node_type": "1", "metadata": {"page_label": "476", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "644ea2c2c137e0109637a37ddb64d891d66f4806a850d3df4bd8fe4a0d6412ae"}}, "hash": "7fa30c61984a02a847addd85deac15f56d93a5ae09656cb1d778d80da7027429", "text": "476 P. Diefenthaler and B. Bauer\nWhich factors and how exactly they in\ufb02uence the target architecture depends\non the architecture method applied and how it is integrated into the enterprise\u2018sgovernance processes.\nA gap analysis, sometimes also referred to as delta analysis, is the comparison\nbetween two models of an EA that is used to clarify the di\ufb00erences betweenthose two architectures. Di\ufb00erent models of architectures that can be compared\nare current to target, current to planned, planned to target and planned to\nplanned [ 8].\n2.2 Semantic Web Technologies in a Nutshell\nSemantic web technologies are used to integrate heterogeneous data sets and\nformalize the underlying structure of the information to allow a machine to\nunderstand the semantics of it [ 12]. The World Wide Web Consortium (W3C)\nprovides a set of standards to describe an ontology and to query it. An ontology\u201cis a set of precise descriptive statements about some part of the world (usually\nreferred to as the domain of interest or the subject matter of the ontology)\u201d [ 13].\nTwo standards are of relevance for a proposed technical realization: \ufb01rstly,\nthe Web Ontology Language (OWL) [ 13] for making descriptive statements and\nsecondly, the SPARQL Query Language for RDF (SPARQL) [ 14], which allows\nquerying these statements.\nThe Resource Description Framework (RDF) [ 15] is a basis for both stan-\ndards, as OWL ontologies can be serialized as RDF graphs and can be accessed\nvia SPARQL. An RDF graph consists of triples of the form \u2018subject, predicate,object\u2019, whereas subjects and objects are nodes and predicates are relations.\nEvery resource in an ontology is identi\ufb01ed by a resource identi\ufb01er which allows\nfor example distinguishing between a bank in a \ufb01nancial context and a bank ofa river.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "4ab7b75b-e6bf-46ec-b174-bfe670f1168a": {"__data__": {"id_": "4ab7b75b-e6bf-46ec-b174-bfe670f1168a", "embedding": null, "metadata": {"page_label": "476", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "1dcb5cb9-fe5d-4ddc-85cd-2e85f79f60f7", "node_type": "4", "metadata": {"page_label": "476", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "5de25c19315462d3549f5e784b49d5fe540f2dd41be784136ea8f183d044841e"}, "2": {"node_id": "63738b65-9843-4ff1-b6a1-8ddef353cd8c", "node_type": "1", "metadata": {"page_label": "476", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "7fa30c61984a02a847addd85deac15f56d93a5ae09656cb1d778d80da7027429"}}, "hash": "644ea2c2c137e0109637a37ddb64d891d66f4806a850d3df4bd8fe4a0d6412ae", "text": "Information from the ontology is queried via SPARQL, which provides\nthe resources that match patterns speci\ufb01ed within the query.\nSemantic web technologies have already been applied to domains of interest\nthat range from semantic business process modeling [ 16] to diagnosis of embed-\nded systems [ 17]. First implementations based upon semantic web technologies\nfor EAM already exist from TopQuadrant with its TopBraid Composer\n1and\nEssential Project2.\n2.3 Graph Transformations for Planning Purposes\nSeveral di\ufb00erent approaches, techniques and representations to planning prob-\nlems have been developed over the last decades [ 18,19]. These approaches range\nfrom state space model based planning to task networks, where tasks for reachinga goal are decomposed and sequenced. A state space based approach is prefer-\nable, because models of the current and target architecture are used in many\nEAM approaches [ 5,11,20,21] and are present in tools used in practice [ 22].\n1www.topquadrant.com/docs/whitepapers/WP-BuildingSemanticEASolutions\n-withTopBraid.pdf\n2www.enterprise-architecture.org/", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "bab46858-35ad-4e50-8cdf-434b6ae20645": {"__data__": {"id_": "bab46858-35ad-4e50-8cdf-434b6ae20645", "embedding": null, "metadata": {"page_label": "477", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "f53299ea-9b05-4449-a158-4c5ea426befc", "node_type": "4", "metadata": {"page_label": "477", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "d167f69365feeebf08d4e5311c0293ff15b57098d968240bfde1be734a32b9a1"}, "3": {"node_id": "95228c59-35b1-46a1-92e6-59449d559e11", "node_type": "1", "metadata": {"page_label": "477", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "8c5397da689efade87c6c3f0f5668d8daeccfb6654500cb0704fd7427460729c"}}, "hash": "34bd881779db752ec2937e948a5c23f17380370515419b19772d866aa0e462a0", "text": "From Gaps to Transformation Paths in Enterprise Architecture Planning 477\nGraph transformations for AI planning purposes solve a planning problem\nby applying graph transformations to a model until a solution for the planningproblem is found. The result of such a planning process is a sequence of actions\nchanging a model into another model.\nHowever, graph transformations have the disadvantage that they provide a\nhuge state space regarding the states, which have to be examined when all states\nin the graph are computed. As a consequence this in\ufb02uences the computation\ntime of all possible worlds created through the transformations. With graphtransformations a planning problem can be solved by searching for graph pat-\nterns in a state represented by a graph and applying graph transformations to\nchange the state [ 23]. Graph transformations have the bene\ufb01t that they have a\nsound theoretical foundation [ 24].\n3 From Gaps to Transformation Paths as Sequences\nof Actions\nThe goal of the proposed approach is to deliver a more detailed model of the\ntarget architecture by making suggestions to a domain expert how a detailed\ntarget architecture could look like. Afterwards, we describe how these gaps are\nrelated to each other to generate a transformation path which allows to structurechange activities, which close gaps, in sequence of actions.\n3.1 Modeling Current and Target Architecture\nFirst, a current architecture is modeled and afterwards, a target architecture is\nmodeled, at the same level of detail. We reuse the model of the current architec-\nture and change it to the desired target architecture. The same level of detail is\nnecessary to ensure the comparability of the models.\nThe current architecture may be more detailed, but can be aggregated in a\nway which restores the comparability [ 25]. Business support maps, which relate\napplications to supported processes and organization units, are an example forsuch a model with the same level of detail [ 11].\nResults of the Modeling.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "95228c59-35b1-46a1-92e6-59449d559e11": {"__data__": {"id_": "95228c59-35b1-46a1-92e6-59449d559e11", "embedding": null, "metadata": {"page_label": "477", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "f53299ea-9b05-4449-a158-4c5ea426befc", "node_type": "4", "metadata": {"page_label": "477", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "d167f69365feeebf08d4e5311c0293ff15b57098d968240bfde1be734a32b9a1"}, "2": {"node_id": "bab46858-35ad-4e50-8cdf-434b6ae20645", "node_type": "1", "metadata": {"page_label": "477", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "34bd881779db752ec2937e948a5c23f17380370515419b19772d866aa0e462a0"}}, "hash": "8c5397da689efade87c6c3f0f5668d8daeccfb6654500cb0704fd7427460729c", "text": "Results of the Modeling. The result of this phase are the two sets:\ncurrentArchitecture =model of the current architecture of the EA\ntargetArchitecture =model of the target architecture of the EA\nIn our solution the core of an EA model is a set which consists of three di\ufb00erent\ntypes of elements. The EA model contains the architecture building blocks (B)\nof the EA, relations between architecture building blocks (R)and attributes of\narchitecture building blocks (A). In this sense an EA model can be de\ufb01ned as:\nM:={B\u222aR\u222aA}\nB:={x|xis an architecture building block }\nR:={x|x\u2208B\u00d7B}and A:={x|x\u2208B\u00d7V}", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "7f5c8cb8-aa49-454b-8207-07e69b5210f5": {"__data__": {"id_": "7f5c8cb8-aa49-454b-8207-07e69b5210f5", "embedding": null, "metadata": {"page_label": "478", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "495e4cc2-2841-4f5f-81da-e858fa85f927", "node_type": "4", "metadata": {"page_label": "478", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "82b05d0d1fd143fc24f18c451af7e1ea6dd510b3940358ac556bdeba9575a469"}, "3": {"node_id": "7a766a69-d441-4edd-a442-d9bc9d0228d6", "node_type": "1", "metadata": {"page_label": "478", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "37f5b16cabc14d7773d37a02b8c5a80aec9a924f352257d6244a15b3a9976a2c"}}, "hash": "e0f1e7362d215e7d2563db6a8e5aee677ee27867bbeb68f88beb80b3c807804a", "text": "478 P. Diefenthaler and B. Bauer\nArchitecture building blocks stand for the elements of the EA, for instance\na Customer Relationship Management application within the application archi-tecture. Relations hold between these architecture building blocks, for exam-\nple when an application depends on another application the respective building\nblocks are connected by a dependency relation. Attributes are values associatedwith architecture building blocks that characterize measurable and observable\ncharacteristics of the architecture building block, e.g. the release number of an\napplication or the uptime of an service.\n3.2 Performing Gap Analysis\nGap analysis is performed to compare the modeled current and target architec-\nture. It compares the di\ufb00erences between currentArchitecture and targetArchi-\ntecture . In terms of a set operation this comparison corresponds to a intersection\nof the two compared sets. As a result three subsets are identi\ufb01ed: onlyCurrentAr-\nchitecture ,onlyTargetArchitecture and stable.\nResults of Gap Analysis. onlyCurrentArchitecture is the set of building\nblocks, relations and attributes which only exist in the model of the current\narchitecture.\nonlyCurrentArchitecture :={x|x\u2208currentArchitecture\n\u2227x/\u2208targetArchitecture }\nIn contrast, onlyTargetArchitecture is the set of building blocks, relations and\nattributes which only exist in the target architecture.\nonlyT argetArchitecture :={x|x/\u2208currentArchitecture\n\u2227x\u2208targetArchitecture }\nThe third set stable is the set of building blocks, relations and attributes which\nthe current and target architecture have in common.\nstable :={x|x\u2208currentArchitecture \u2227x\u2208targetArchitecture }\n3.3 Setting the Successor Relationships for Building Blocks\nThe successor relationships are modelled within a transformation model [ 7].", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "7a766a69-d441-4edd-a442-d9bc9d0228d6": {"__data__": {"id_": "7a766a69-d441-4edd-a442-d9bc9d0228d6", "embedding": null, "metadata": {"page_label": "478", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "495e4cc2-2841-4f5f-81da-e858fa85f927", "node_type": "4", "metadata": {"page_label": "478", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "82b05d0d1fd143fc24f18c451af7e1ea6dd510b3940358ac556bdeba9575a469"}, "2": {"node_id": "7f5c8cb8-aa49-454b-8207-07e69b5210f5", "node_type": "1", "metadata": {"page_label": "478", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "e0f1e7362d215e7d2563db6a8e5aee677ee27867bbeb68f88beb80b3c807804a"}}, "hash": "37f5b16cabc14d7773d37a02b8c5a80aec9a924f352257d6244a15b3a9976a2c", "text": "The\ntransformation model is de\ufb01ned as follows: transf ormationM odel :={x|x\u2208\ncurrentArchitecture \u00d7targetArchitecture }\nWith the successor relationships at hand it is possible to identify the succes-\nsor type for building blocks which can be divided into noSuccessor ,noPredeces-\nsor,oneToOne ,oneToMany ,\nmanyToOne ,a n d manyToMany . The inverse of the\nsuccessor relation is the predecessor relation.\nAll building blocks in onlyCurrentArchitecture that do not have a successor\nb e l o n gt ot h es e t noSuccessor . All building blocks that belong to onlyTargetAr-\nchitecture and do not have a predecessor belong to the set noPredecessor . The set\noneToOne consists of the pairs of building blocks that have exactly one successor\nand this successor has only one predecessor.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "1553f926-2e53-4d00-87de-e94c0769dd93": {"__data__": {"id_": "1553f926-2e53-4d00-87de-e94c0769dd93", "embedding": null, "metadata": {"page_label": "479", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "730f6711-eabb-4590-9fab-575896022f6c", "node_type": "4", "metadata": {"page_label": "479", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "3f51f64cdd0021155737e7826c409d607939518f65e164e3ee555f38594170fc"}, "3": {"node_id": "22d51c10-fe03-4329-bc30-7b011e523841", "node_type": "1", "metadata": {"page_label": "479", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "327769e5f7bc05ff01d5f71874c43576108d68967503623fcb27bc7814c39d17"}}, "hash": "57f9c2ea68a8fdce036a9aafde6856ce7f66c43af0bfbecbb7ef933373ecb7a8", "text": "From Gaps to Transformation Paths in Enterprise Architecture Planning 479\noneToMany is the set of building blocks that have several successors in the\ntarget architecture whereas the set manyToOne is the set of building blocks\nwhich have the same successor in the target architecture. manyToMany is the\nset of building blocks which have common successors, which in turn have several\npredecessors. By querying the models we can determine to which set a buildingblock belongs.\nA successor relationship is part of exactly one of the above subsets. Within\nthe six di\ufb00erent sets disjoint subsets exist. For the noSuccessor and noPredeces-\nsorset each building block represents a disjoint subset and are planned inde-\npendently in contrast to the other successor sets. This is an implicit information\nof the transformation model, as we do not model self-directed relations for thisinformation.\n3.4 Creating Suggestions for a Detailed Target Architecture\nIn order to make suggestions the model of the current architecture considers\napplications, services and business building blocks. Business Building Blocks are\nin a tight relationship with the business activities of an enterprise but implemen-tation independent. With the detailed information of the current architecture\nand the successor relationships at hand for applications it is possible to generate\nsuggestions how a model of a detailed target architecture could look like.\nEach application belongs to exactly one subset of the transformation model.\nDi\ufb00erent suggestions are made for the subsets how to detail the target archi-\ntecture. By following a suggestion the target is stepwise getting more detailed,as all sets of successor relationships are getting processed. A suggestion may\nbe inappropriate for a domain expert she can overrule it by modeling di\ufb00erent\ndetails. The result is a model of a detailed target architecture. At \ufb01rst all ser-vices are transferred to the model of a detailed target architecture. Then the\ndependencies can be added to the model of the detailed target architecture.\nSuggestions for Provided Services.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "22d51c10-fe03-4329-bc30-7b011e523841": {"__data__": {"id_": "22d51c10-fe03-4329-bc30-7b011e523841", "embedding": null, "metadata": {"page_label": "479", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "730f6711-eabb-4590-9fab-575896022f6c", "node_type": "4", "metadata": {"page_label": "479", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "3f51f64cdd0021155737e7826c409d607939518f65e164e3ee555f38594170fc"}, "2": {"node_id": "1553f926-2e53-4d00-87de-e94c0769dd93", "node_type": "1", "metadata": {"page_label": "479", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "57f9c2ea68a8fdce036a9aafde6856ce7f66c43af0bfbecbb7ef933373ecb7a8"}}, "hash": "327769e5f7bc05ff01d5f71874c43576108d68967503623fcb27bc7814c39d17", "text": "Suggestions for Provided Services.\n1.noSuccessor set: for each provided service in the current architecture check\nif it is used by an application that is part of the target architecture or theconsuming application has a successor relationship.\n(a) If there are any applications it is necessary to check if they still can work\nproperly without consuming the service.\n(b) Otherwise, no information from the current architecture is added to the\ntarget architecture.\n2.noPredecessor set: it is not possible to suggest a detail for the target architec-\nture as there exists no detail in the current architecture. A manual addition of\nprovided services and their business building blocks in the target architecture\nis necessary.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "92e8d650-e363-4d70-a1c0-e748c5393256": {"__data__": {"id_": "92e8d650-e363-4d70-a1c0-e748c5393256", "embedding": null, "metadata": {"page_label": "480", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "f3cb11e5-8af0-4129-9192-4b58bde83b30", "node_type": "4", "metadata": {"page_label": "480", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "a5553ea6ed07b8e093c44b85d94740f40d50a4c641b848483c962fff81cd2291"}, "3": {"node_id": "48de3e95-ec15-491e-865b-568f9e614cc9", "node_type": "1", "metadata": {"page_label": "480", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "f44733c47a39bcb9a159f2952a09d84ae4068c64a4dd3ee59cf910cbfa7e299f"}}, "hash": "18417ca71c2b53898fd417ce94f1fbd456b0137e9664e9a6f394e773f96ebf2e", "text": "480 P. Diefenthaler and B. Bauer\n3.oneToMany set:\n(a) If the predecessor is part of onlyCurrentArchitecture all provided services\nof the predecessor, including their business building blocks, are suggested\nto be provided by one of the successor applications.\n(b) Otherwise, all provided services and business building blocks of the pre-\ndecessor are suggested to be provided by one of the successor applications\nor the remaining part of the predecessor in the target architecture.\n4.manyToOne set:\n(a) If the successor is part of onlyTargetArchitecture it is suggested to provide\neach service of its successors, but only one per business building block.\n(b) Otherwise, it is suggested that the successor provides the services already\nprovided in the current architecture, i.e. by itself, and provide all services\nof the other predecessors, but only one per business building block.\n5.manyToMany set: All provided services are suggested to be provided by one of\nthe successors. If more than one predecessor provides a service with the same\nbusiness building block the suggestion is to provide only one service in thetarget architecture with such a business building block. Further suggestions\nwere not identi\ufb01ed as this type represents a complex type of restructuring.\nNevertheless, the domain expert should be supported with information aboutapplications changing business support and assigned customer groups. Fur-\nthermore, information which applications belong to onlyCurrentArchitecture\nand onlyTargetArchitecture needs to be presented to the domain expert.\n6.oneToOne set: all services, including their business building blocks, provided\nby the predecessor are suggested to be provided by the successor.\n7. Furthermore, the domain expert can model additional services or let suggested\nservices be provided by an application that is not a successor of the application\nthat provided it in the current architecture.\n8. For each service information is stored if it is the successor of one or more\nservices in the current architecture. This is necessary to allow a sound trans-\nformation planning [ 9].\nAs a result all provided services have been modeled in the target architecture\nincluding their business building blocks.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "48de3e95-ec15-491e-865b-568f9e614cc9": {"__data__": {"id_": "48de3e95-ec15-491e-865b-568f9e614cc9", "embedding": null, "metadata": {"page_label": "480", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "f3cb11e5-8af0-4129-9192-4b58bde83b30", "node_type": "4", "metadata": {"page_label": "480", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "a5553ea6ed07b8e093c44b85d94740f40d50a4c641b848483c962fff81cd2291"}, "2": {"node_id": "92e8d650-e363-4d70-a1c0-e748c5393256", "node_type": "1", "metadata": {"page_label": "480", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "18417ca71c2b53898fd417ce94f1fbd456b0137e9664e9a6f394e773f96ebf2e"}}, "hash": "f44733c47a39bcb9a159f2952a09d84ae4068c64a4dd3ee59cf910cbfa7e299f", "text": "Furthermore, the information aboutsuccessor relationships of the services is available.\nSuggestions for Used Services.\n1.manyToMany set: all used services of predecessors are suggested to be used\nby at least one successor. The domain expert can choose if more than one\nsuccessor uses the service of a predecessor.\n2.oneToOne set: all services used by the predecessor are suggested to be used\nby the successor.\n3.manyToOne set: used services of the predecessors are suggested to be also\nused in the target architecture.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "655edc73-5ce9-4802-9c63-0f7f4b2b79d4": {"__data__": {"id_": "655edc73-5ce9-4802-9c63-0f7f4b2b79d4", "embedding": null, "metadata": {"page_label": "481", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "56f26472-42eb-4429-b4c7-024d54499981", "node_type": "4", "metadata": {"page_label": "481", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "4ad5c5d20a0766e3c50e1d4af23dfb8e23882a51c39751faad138e202e6a1fb4"}, "3": {"node_id": "eaaeb217-2fa0-4fbf-8787-a23d9f4b23a9", "node_type": "1", "metadata": {"page_label": "481", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "160b256a10df27ca7df01c54a12bd959d0354013226a513e2c747ac690ccf2b2"}}, "hash": "90731e741d544b73dd697485e5ec78d563cff3a410a8852f6332573539fd9bbe", "text": "From Gaps to Transformation Paths in Enterprise Architecture Planning 481\n4.oneToMany set:\n(a) If the predecessor is part of onlyCurrentArchitecture all used services of\nthe predecessor are suggested to be used by one of the successor applica-\ntions.\n(b) Otherwise, all used services of the predecessor are suggested to be used by\none of the successor applications or the remaining part of the predecessor\nin the target architecture.\n5.noPredecessor set: which services are used by the application need to be mod-\neled manually as no information from the model of the current architecture\nis available.\n6.noSuccessor set: as the application does not exist in the model of the target\narchitecture no information about used services needs to be added to the\ntarget architecture.\n7. Furthermore, the domain expert can model additionally used services for\nevery application.\nResults of the Guided Re\ufb01nement. The result is a model of a detailed target\narchitecture including provided and used services with related business building\nblocks. Consistency checks can be performed on the model to check whetherservices exist which are provided but no longer used by any application. Gap\nanalysis can be performed again and the detailed gaps between the models of\nthe current and target architecture are available.\nWith the results of gap analysis and a detailed current architecture it is\npossible to assist a domain expert in modeling a detailed target architectureby making suggestions how to detail it based on the current architecture. The\nvariety of suggestions that can be provided is limited to the information available\nin the EA model. For example, technical information about the services canbe added to allow more sophisticated suggestions, like to prefer web service\ntechnology for services of applications that have to be build.\n3.5 Creating an Action Repository\nBefore the transformation path from the current to the target architecture can\nbe created, it is necessary to describe possible changes in a way which allows the\nsequencing of actions. This is realized with an action repository where abstractactions are modeled. An abstract action consists of two parts.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "eaaeb217-2fa0-4fbf-8787-a23d9f4b23a9": {"__data__": {"id_": "eaaeb217-2fa0-4fbf-8787-a23d9f4b23a9", "embedding": null, "metadata": {"page_label": "481", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "56f26472-42eb-4429-b4c7-024d54499981", "node_type": "4", "metadata": {"page_label": "481", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "4ad5c5d20a0766e3c50e1d4af23dfb8e23882a51c39751faad138e202e6a1fb4"}, "2": {"node_id": "655edc73-5ce9-4802-9c63-0f7f4b2b79d4", "node_type": "1", "metadata": {"page_label": "481", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "90731e741d544b73dd697485e5ec78d563cff3a410a8852f6332573539fd9bbe"}}, "hash": "160b256a10df27ca7df01c54a12bd959d0354013226a513e2c747ac690ccf2b2", "text": "An abstract action consists of two parts. One part speci\ufb01es\nthe preconditions for an action to be applicable. The other part is the e\ufb00ect part,\nwhich speci\ufb01es the changes to an architecture model if an (abstract) action is\napplied to it.\nThe creation of the action repository is only done once as the actions are\ndescribed on an abstract level. However, if the meta-model of the EA changes\nthe actions in the action repository need to be checked if they are impacted by\nthese changes.\nIn a technical sense the abstract action matches via a graph pattern into the\nconcrete model of the di\ufb00erent states. Concrete actions relate to concrete entities", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "83e0514c-8117-4af1-a568-e05c05cac038": {"__data__": {"id_": "83e0514c-8117-4af1-a568-e05c05cac038", "embedding": null, "metadata": {"page_label": "482", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "014f9f76-25c6-49c0-9684-97ff451b3a3f", "node_type": "4", "metadata": {"page_label": "482", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "15672c9e516b1d378e4664126899f48bc6dc97265e02e44fa6195f7c530592fd"}, "3": {"node_id": "e9c43a6e-719e-4651-8a63-20dbd18f957c", "node_type": "1", "metadata": {"page_label": "482", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "7b95909313a55486156fa06078c42f86de008e2298843ef9cb11250b934549b4"}}, "hash": "13894aa8a1948c7959aeec130722746bb4b1fe01f8a7df4a58f950b666c2d6c7", "text": "482 P. Diefenthaler and B. Bauer\nand relationships in an architecture model and concrete changes to the state of\narchitecture models. The application of a concrete action to an architecturemodel, may enable the application of several other concrete actions.\nAbstract actions are either atomic or composed. An atomic action changes\nexactly one element of either currentArchitecture ortargetArchitecture . Com-\nposed actions are a composition of other actions, regardless if atomic or com-\nposed. To create a transformation path it is necessary to model at least abstract\nactions for shutting down and developing building blocks and abstract actionsthat take care of the relationships between the building blocks and the attributes\nof the building blocks.\nLogical Order of Abstract Actions. The abstract actions are modeled in\na logical order, which means that it is only possible to apply the action if the\npreceding actions were already applied. For example, it is not possible to changethe dependencies from a service to its successor service if it has not yet been\nbuilt. Furthermore, it may be necessary to build the application \ufb01rst to allow the\ncreation of a new service. After the dependencies of a service have been changed\nto a successor it is possible to shutdown the service.\nIf all services of an application have been shutdown it is possible to shutdown\nthe application. The logical order prevents the creation of loops in the transfor-\nmation path, i.e. to shutdown and create the same application several times. It\nmay be the case that it is not necessary to enact the develop application action.\nFor example, if a service which has to be developed for an application that\nalready exists. In this case it is not necessary to develop that application again\nsince it already exists in the current architecture. The logical order prevents theshutdown of the predecessor services, until the successor service is developed.\n3.6 Creating the Transformation Path\nWith the action repository, the transformation model, the models of current\narchitecture and target architecture at hand it is possible to start the creationof possible transformation paths.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "e9c43a6e-719e-4651-8a63-20dbd18f957c": {"__data__": {"id_": "e9c43a6e-719e-4651-8a63-20dbd18f957c", "embedding": null, "metadata": {"page_label": "482", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "014f9f76-25c6-49c0-9684-97ff451b3a3f", "node_type": "4", "metadata": {"page_label": "482", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "15672c9e516b1d378e4664126899f48bc6dc97265e02e44fa6195f7c530592fd"}, "2": {"node_id": "83e0514c-8117-4af1-a568-e05c05cac038", "node_type": "1", "metadata": {"page_label": "482", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "13894aa8a1948c7959aeec130722746bb4b1fe01f8a7df4a58f950b666c2d6c7"}}, "hash": "7b95909313a55486156fa06078c42f86de008e2298843ef9cb11250b934549b4", "text": "We derive all applicable concrete actions by checking which preconditions of\nabstract actions match in\nplanningKnowledgeBase :={transf ormationM odel \u222acurrentArchitecture \u222a\ntargetArchitecture }\nThis corresponds to a breadth search of applicable actions for a possible change\nfrom the current towards the target architecture. If a concrete action is applied to\nplanningKnowledgeBase it changes the state of the planningKnowledgeBase .I n\ncontrast if we apply a depth search we receive a transformation path changing the\nEA in a sequence of concrete actions from the current to the target architecture.\nIf no such transformation path exists the more exhaustive breadth search canbe omitted and we are informed that no transformation path was found. By\napplying the breadth search on each state recursively and we get the whole state\nspace.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "f15188af-1a6b-4679-97ed-ceccd73fbc4e": {"__data__": {"id_": "f15188af-1a6b-4679-97ed-ceccd73fbc4e", "embedding": null, "metadata": {"page_label": "483", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "14282b74-c50d-4960-bd8a-324dbd31df6c", "node_type": "4", "metadata": {"page_label": "483", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "44b70d9dd3875a1574c410efe32c278ab0e1181c77c9ec481e2b18c5e3b978a0"}, "3": {"node_id": "cbcff33d-dec8-4395-a7b0-3b883b03327b", "node_type": "1", "metadata": {"page_label": "483", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "8cd76ed9abbd9e628709b4e1e49369649c5ceee86b6794e0f17e21fb33ec4aba"}}, "hash": "69d387d9a0325b93644fa8cad5d9dcdd621731fb4868a7edcd83eb3b4fc3b545", "text": "From Gaps to Transformation Paths in Enterprise Architecture Planning 483\nWith the state space it is possible to determine all possible transformation\npaths from the current to the target architecture. By selecting concrete actionswe create the transformation path, change the planningKnowledgeBase and get\neach time a list of concrete actions which we now can apply. When the transfor-\nmation path is complete, i.e. all necessary changes have been applied, no furtheractions are applicable and the transformation path is saved. If gaps are not to\nbe closed it is possible to stop the creation of the transformation path.\nThe selection process for choosing concrete actions can be enhanced by pro-\nviding development costs for proposed applications and services, and mainte-\nnance costs for applications and services which are to be retired. Furthermore,\nthe consideration of desired bene\ufb01ts, anticipated risks and resource constraintscould be considered if available to allow for a weighting of favorable sequences\nof actions.\n4 Use Case - Development Master Data Management\nIn the past, applications were often developed to address the speci\ufb01c business\nneeds that a part of the organization had at a certain moment. However, consid-ering the whole enterprise it is not e\ufb00ective to store redundant data in several\napplications as this increases the risk of outdated and inconsistent data. This\nis the basis for the master data management (MDM) challenge [ 26]. In our use\ncase we show a typical (and simpli\ufb01ed) example for the introduction of master\ndata management in the research and development division of an organization.Figure 1shows a part of the model of the current architecture of the organi-\nzation\u2019s IT landscape. There has already been placed a development master\ndata management (DMDM) system in the organization which provides services(MasterData\nv1 and v2) to other applications. However, not all existing appli-\ncations use the master data provided by DMDM: the application DevManager\nprovides similar data that is still used by existing applications such as the prod-uct planning tool and the quality tests planning tool.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "cbcff33d-dec8-4395-a7b0-3b883b03327b": {"__data__": {"id_": "cbcff33d-dec8-4395-a7b0-3b883b03327b", "embedding": null, "metadata": {"page_label": "483", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "14282b74-c50d-4960-bd8a-324dbd31df6c", "node_type": "4", "metadata": {"page_label": "483", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "44b70d9dd3875a1574c410efe32c278ab0e1181c77c9ec481e2b18c5e3b978a0"}, "2": {"node_id": "f15188af-1a6b-4679-97ed-ceccd73fbc4e", "node_type": "1", "metadata": {"page_label": "483", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "69d387d9a0325b93644fa8cad5d9dcdd621731fb4868a7edcd83eb3b4fc3b545"}}, "hash": "8cd76ed9abbd9e628709b4e1e49369649c5ceee86b6794e0f17e21fb33ec4aba", "text": "Other applications such\nas the virtual quality test result database store the master data themselves and\nare not connected to DMDM. For the modi\ufb01cation of products (from one testto another) there exist two applications for the di\ufb00erent product classes the\norganization provides to their customers. Additionally, applications to plan the\nproduct, the quality tests and store the results that have been gathered duringthe (physical or virtual) quality tests, exist. In the model of the target architec-\nture the functionality in the di\ufb00erent applications shall be united and all other\ntools will use the data provided by DMDM. There will be only one planning tool\nthat includes planning for the product as well as the quality tests. All quality\ntests (including the results) will be managed by one quality test assistance andresult management tool (cf. Figure 2).\nPlease note that Figs. 1and2already contain the services, which may not\nbe considered in the \ufb01rst place for planning purposes.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "d991ee32-0afb-40a7-b5d0-5b3e60a70557": {"__data__": {"id_": "d991ee32-0afb-40a7-b5d0-5b3e60a70557", "embedding": null, "metadata": {"page_label": "484", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "b0861860-7803-4610-8cec-2a5dd7c79879", "node_type": "4", "metadata": {"page_label": "484", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "ecc43c6e256acb254beef7108c49b128ae23445c3a0e57659376c374f0094682"}, "3": {"node_id": "bcffad6f-fe7f-499c-b681-360725514cc2", "node_type": "1", "metadata": {"page_label": "484", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "d9b745bd5fc9354af0c969356ac8b4bb501b83568136f6357493517b7ff65fe3"}}, "hash": "f2f71ccc41200004fad7bf95b574ae92a1e79d8c516c69470cd8193d5f5906e5", "text": "484 P. Diefenthaler and B. Bauer\nDevManager\nQuality tests\nplanning tool Product planning \ntool\nDevelopment master\ndata management\nsystem (DMDM) Physical quality \ntest assistance \ntool Physical quality \ntest result\ndatabaseVirtual quality \ntest result\ndatabase\nProduct class A \nassistance\ndatabase\nProduct class B \nassistance\ndatabase\nFig. 1. Master data management: current architecture.\nDevelopment master\ndata management\nsystem (DMDM) Product and Quality \ntest planning toolQuality test assistance\nand result management\ntool\nProduct modification\nassistance database\nFig. 2. Master data management: target architecture.\nSolution Applied to the Use Case\nAt \ufb01rst currentArchitecture and targetArchitecture are created by modelling both\narchitectures. Applying gap analysis it is possible to derive that onlyCurrentAr-\nchitecture contains: DevManager, Product Planning Tool, Quality tests planning\ntool, Physical quality test assistance tool, Physical quality test result database,\nVirtual quality test result database, Product class A assistance database, Prod-\nuct class B assistance database, QueryDev v1, MasterData v1 and v2.\nThe set stable contains only Development master data management system\n(DMDM) whereas onlyTargetArchitecture contains Product and Quality test\nplanning tool, Quality test assistance and result management tool, Product mod-\ni\ufb01cation assistance database, MasterData v3 and PlanningData v1.\nWithin the transformation model information on the successor relationships\nis kept: Product planning tool and Quality tests planning tool have the same suc-\ncessor (Product and Quality test planning tool). Physical quality test assistance\ntool, Physical quality test result database and Virtual quality test result data-base have the Quality test assistance and result management tool as a common\nsuccessor. DMDM is a successor of itself, which is in accordance with [ 7], and\nDevManager has no successor. Product modi\ufb01cation assistance database is thesuccessor of Product class A assistance database and product class B assistance\ndatabase.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "bcffad6f-fe7f-499c-b681-360725514cc2": {"__data__": {"id_": "bcffad6f-fe7f-499c-b681-360725514cc2", "embedding": null, "metadata": {"page_label": "484", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "b0861860-7803-4610-8cec-2a5dd7c79879", "node_type": "4", "metadata": {"page_label": "484", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "ecc43c6e256acb254beef7108c49b128ae23445c3a0e57659376c374f0094682"}, "2": {"node_id": "d991ee32-0afb-40a7-b5d0-5b3e60a70557", "node_type": "1", "metadata": {"page_label": "484", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "f2f71ccc41200004fad7bf95b574ae92a1e79d8c516c69470cd8193d5f5906e5"}}, "hash": "d9b745bd5fc9354af0c969356ac8b4bb501b83568136f6357493517b7ff65fe3", "text": "Regarding the services the following successor relationships are contained in\nthe transformation model: MasterData\nv3 is a successor of MasterData v1 and\nv2. The QueryDev v1 has no successor and PlanningData v1 has no predecessor.\nBased upon this information the action repository can show that it is possibleto develop MasterData\nv3 in the \ufb01rst place or one of the successor applications.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "7cba94e3-99b9-456c-b068-f79e84d6d8cd": {"__data__": {"id_": "7cba94e3-99b9-456c-b068-f79e84d6d8cd", "embedding": null, "metadata": {"page_label": "485", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "290c3c9f-bcba-49f3-9af1-8e83a8b02247", "node_type": "4", "metadata": {"page_label": "485", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "0243dbf30e08de942f6fc080f3e64eedaecc3d1d0c734f189cb27f92d1ef18c6"}, "3": {"node_id": "3680cb90-e69d-40ff-8e85-5580f194b152", "node_type": "1", "metadata": {"page_label": "485", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "e483ed9ad19faa3501b1cf0058d3cc4265762db334539182df09e5b3ebe3888a"}}, "hash": "81072b254b353d9b74e4673570ba06565bbd0c06f4db1aededf74661a5ae5cf5", "text": "From Gaps to Transformation Paths in Enterprise Architecture Planning 485\nIf for example as the \ufb01rst action the development of MasterData v3 is selected\nit is possible to take care of the dependencies of applications to the predecessorsof the service. After removing the dependencies and creating the new ones to the\nsuccessor service it is possible to shutdown the predecessors. The development\nof the new applications are to be selected as the next steps in the transformationpath. The remaining actions are not described in detail, however their sequence\nis constrained by the logical order of the abstract actions.\n5 Discussion\nThe discussion is divided into two parts. At \ufb01rst we discuss the results of solution\nand its application to the use case. After that, the limitations of the solution arepresented.\nThe solution describes how it is possible to derive gaps between the models\nof a current and target architecture for planning purposes using a set theoreticdescription. With the gaps at hand and information regarding the successor\nrelationships of elements the solution reuses existing information to aid in the\ndetailing the model of the target architecture. Afterwards, an action repositoryaids in the creation of possible transformation paths, which are sequences of\nactions. Overall, the solution considers a domain expert as an important part of\nthe activities and assists her in the decision making process.\nCreating suggestions for detailing the model of a target architecture is only\npossible if business building blocks are available. However, the mechanism ofgap analysis, the transformation model and the creation of transformation paths\nusing the action repository are not impacted by this limitation.\nFurthermore, requirements regarding the meta-model are posed by the solu-\ntion. If the EAM approach does not concern application architectures, and as a\nconsequence the models of applications and their dependencies to services, the\nsolution would in its current shape not be suitable. However, the mechanisms asdescribed in the solution can be adapted to aid in the modelling and creation of\ntransformation paths which address the concerns of the stakeholders. From our\npoint of view, applications and their provided services are an important part ofan EA.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "3680cb90-e69d-40ff-8e85-5580f194b152": {"__data__": {"id_": "3680cb90-e69d-40ff-8e85-5580f194b152", "embedding": null, "metadata": {"page_label": "485", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "290c3c9f-bcba-49f3-9af1-8e83a8b02247", "node_type": "4", "metadata": {"page_label": "485", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "0243dbf30e08de942f6fc080f3e64eedaecc3d1d0c734f189cb27f92d1ef18c6"}, "2": {"node_id": "7cba94e3-99b9-456c-b068-f79e84d6d8cd", "node_type": "1", "metadata": {"page_label": "485", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "81072b254b353d9b74e4673570ba06565bbd0c06f4db1aededf74661a5ae5cf5"}}, "hash": "e483ed9ad19faa3501b1cf0058d3cc4265762db334539182df09e5b3ebe3888a", "text": "Currently, we create the connection of the models of the current and tar-\nget architecture manually, which is prone to errors and time consuming. Themodel of the target architecture does currently not consider information which\ntransformation paths, taking technology architecture aspects into account, are\npossible.\n6 Proposed Technical Realization\nUsing semantic web technologies for formalizing information sources yields a\nnumber of advantages, starting with having a formal, unambiguous model to\nthe possibilities of reasoning and consistency checking. The knowledge base", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "db81a990-35d0-474d-ad9d-e5f4c0122776": {"__data__": {"id_": "db81a990-35d0-474d-ad9d-e5f4c0122776", "embedding": null, "metadata": {"page_label": "486", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "9d6248bd-b296-4a6d-9600-3d026b72246a", "node_type": "4", "metadata": {"page_label": "486", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "62c283b3781b3eb1252f949a458c78ef86493d1e5524328b881d9b326d7a80ac"}, "3": {"node_id": "fb3db90c-a561-46fe-a2db-785b9fb9cb73", "node_type": "1", "metadata": {"page_label": "486", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "3161abe8415bc95ea8b5343645f317c8d3b19c2a3b1b619e49680a9a4b9926f4"}}, "hash": "1a7f87cadc0fd63b3a1ea8202fa1e5a4f5c109652a07e9090eb4c0474c4dfa90", "text": "486 P. Diefenthaler and B. Bauer\ncontaining, the current and target EA models, as well as the transformation\nmodel, can be consulted at run time by humans as well as by applications.\nIdentifying gaps can be realized using standard tools like Prot\u00b4 eg\u00b4e3for mod-\neling and OWLDi\ufb004for comparing the modeled EAs. For detailing the model of\nthe target architecture we suggest the usage of SPARQL as it allows queryingand adding information in a semi-automated manner.\nRegarding the creation of transformation paths we suggest to use a more\nsophisticated graph transformation approach, as it provides the expressivenessnecessary for the creation of transformation paths. This requirement exceeds\nthe current capabilities of SPARQL. A promising World Wide Web Consor-\ntium standard is the Rule Interchange Format\n5(RIF), which initial purpose\nwas the exchange of rules. The second edition of RIF provides an action lan-\nguage which can be used to express the actions necessary for transformation\npath planning. However, we were not able to test the proposed solution as no\nfree implementations are available yet. Therefore, we propose to use a mature\ngraph transformation tool like GROOVE6.\nHowever, this proposed technical realization requires a model to model (M2M)\ntransformation of the ontologies to a model which is interpretable for a graph\ntransformation approach.\n7 Related Work\nIn this section related work is introduced. As a starting point the technical report\u2018On the state of the Art in Enterprise Architecture Management Literature\u2019 [ 8]\nwas taken, as they consider gap (delta) analysis as part of the di\ufb00erent EAM\napproaches. Besides the listed approaches in the technical report an approach\nfrom the University of Oldenburg and a technical standard from The Open Groupwas identi\ufb01ed as relate work.\n7.1 University of Oldenburg\nThe Institute for Information Technology of the University of Oldenburg presents\na tool supported approach for performing a gap analysis on a current and ideal\nlandscape [ 27].", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "fb3db90c-a561-46fe-a2db-785b9fb9cb73": {"__data__": {"id_": "fb3db90c-a561-46fe-a2db-785b9fb9cb73", "embedding": null, "metadata": {"page_label": "486", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "9d6248bd-b296-4a6d-9600-3d026b72246a", "node_type": "4", "metadata": {"page_label": "486", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "62c283b3781b3eb1252f949a458c78ef86493d1e5524328b881d9b326d7a80ac"}, "2": {"node_id": "db81a990-35d0-474d-ad9d-e5f4c0122776", "node_type": "1", "metadata": {"page_label": "486", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "1a7f87cadc0fd63b3a1ea8202fa1e5a4f5c109652a07e9090eb4c0474c4dfa90"}}, "hash": "3161abe8415bc95ea8b5343645f317c8d3b19c2a3b1b619e49680a9a4b9926f4", "text": "The approach is tightly coupled to the Quasar Enterprise app-\nroach, which can be used to develop service-oriented application landscapes.\nIn order to be able to perform their gap analysis it is necessary to model the\ncurrent application landscape consisting of current components, current services,current operations and business objects. The ideal landscape is modeled with\nideal components, ideal services, ideal operations and domains. Based on these\ntwo models the tool is capable to generate a list of actions that would, if all wereapplied, result in the ideal landscape. Within the paper the suggested procedure\n3http://protege.stanford.edu/\n4http://krizik.felk.cvut.cz/km/owldi\ufb00/\n5http://www.w3.org/TR/2013/NOTE-rif-overview-20130205/\n6http://groove.cs.utwente.nl/", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "294d6aff-90f6-4f41-8d36-51e086905848": {"__data__": {"id_": "294d6aff-90f6-4f41-8d36-51e086905848", "embedding": null, "metadata": {"page_label": "487", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "7565958f-8cee-4307-9686-729fbd2fa991", "node_type": "4", "metadata": {"page_label": "487", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "06a251c9de1a6d9955e088165ab5bd81d8f6f9a71b1e68acf034c38b64d05878"}, "3": {"node_id": "98ab4b4c-460d-4789-8dc1-9d7efa325853", "node_type": "1", "metadata": {"page_label": "487", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "6e69af0863a9bdf465871b31be8f5db7bfce2a86419b5e1d97c3aa8d55faaffc"}}, "hash": "7c5dfc6263ee2fdd4e1d29530489aee794b677009d0e38fc3ce2fc126f5c0483", "text": "From Gaps to Transformation Paths in Enterprise Architecture Planning 487\nfor selecting actions is to allow an architect to select certain actions that result\nin a target. Furthermore, the tool is capable to provide metrics for quantitativeanalysis of the application landscape.\nGringel and Postina state that gap analysis needs a \u201cdetailed level of descrip-\ntion when it comes to modeling both landscapes\u201d ([ 27], p. 283) and as a result\nthe \u201cdata necessary to perform gap analysis on the entire application landscape\non a detailed level considering operations is overwhelming\u201d ([ 27], p. 291). How\nthe di\ufb00erent actions interfere with each other is not considered and actions canonly be provided if an ideal landscape with all details has been modeled.\n7.2 Strategic IT Managment by Hanschke\nThe \u2018Strategic IT Management\u2019 [ 11] approach is intended to serve as a toolkit\nfor EAM by providing best-practices derived from work experience. After a tar-\nget architecture has been modeled and agreed upon gap analysis is used to\ndetect di\ufb00erences between the current and target architecture. Gap analysis isperformed on the basis of process support maps visualizing which applications\nsupport which business processes (x-axis) and which customer group (y-axis)\nthe applications are assigned to. For a more \ufb01ne grained gap analysis Hanschkesuggests to additionally add information about services and information objects\nof the applications. Afterwards, for each gap possible actions to close the gap\nare considered.\nThe actions range from introducing a new application, adding or reducing\nfunctionality of an existing application, changing or adding services to the shut\ndown of applications and services. Based upon the results of gap analysis and\nderivation of appropriate actions it is necessary to clarify dependencies between\nthe actions, bundle the actions and create planned architectures as recommenda-tions for change. As far as we were able to verify the limitations of the tool and\napproach it is not possible to create suggestions for a detailed target architecture.\n7.3 ArchiMate\nArchiMate ([ 21], Chap.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "98ab4b4c-460d-4789-8dc1-9d7efa325853": {"__data__": {"id_": "98ab4b4c-460d-4789-8dc1-9d7efa325853", "embedding": null, "metadata": {"page_label": "487", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "7565958f-8cee-4307-9686-729fbd2fa991", "node_type": "4", "metadata": {"page_label": "487", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "06a251c9de1a6d9955e088165ab5bd81d8f6f9a71b1e68acf034c38b64d05878"}, "2": {"node_id": "294d6aff-90f6-4f41-8d36-51e086905848", "node_type": "1", "metadata": {"page_label": "487", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "7c5dfc6263ee2fdd4e1d29530489aee794b677009d0e38fc3ce2fc126f5c0483"}}, "hash": "6e69af0863a9bdf465871b31be8f5db7bfce2a86419b5e1d97c3aa8d55faaffc", "text": "7.3 ArchiMate\nArchiMate ([ 21], Chap. 11) introduces an Implementation and Migration Exten-\nsion including a Gap element. A gap can be associated with any core element\nof the ArchiMate meta-models, except for the Value and Meaning element. In\ngeneral, a gap links several elements of two EA models and contains elements to\nbe removed (retired) and to be added (developed). The linkage of the di\ufb00erences\nbetween the EA models and the resulting gap is not described.\n8 Future Work\nCreating transition architectures as plateaus (see [ 21]) between the current and\ntarget architecture should be supported by actions. A plateau is a stable state\nof the EA. The current and target architecture are also plateaus according to\nArchiMate. However, we need to identify at \ufb01rst in which situations actions are", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "2fdc5d29-c912-487d-ad1a-c2d26d3dd110": {"__data__": {"id_": "2fdc5d29-c912-487d-ad1a-c2d26d3dd110", "embedding": null, "metadata": {"page_label": "488", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "7173d059-7feb-4711-a7ed-cbaf25bd35b1", "node_type": "4", "metadata": {"page_label": "488", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "b0385e450f438088394742c5e283c6ae66d5d576d1d4abf3491473344c886b57"}, "3": {"node_id": "e820746a-5f9b-4646-9d8c-7c72b50717be", "node_type": "1", "metadata": {"page_label": "488", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "8510c50aac4a1935a42c61ba299d9465ffd9d42280b418c4f3e32aee2ab54968"}}, "hash": "f1802c67bef335d7f65ab00d6743e71576307b3439c072fe5ef1959202a5ed11", "text": "488 P. Diefenthaler and B. Bauer\nof relevance for transition architecture creation and if it is possible to provide\nmeaningful support for a domain expert.\nA value based weighting for di\ufb00erent transformation paths is currently elab-\norated to support a domain expert with information which paths seem to be\nmore promising than others. This ranking will take into account di\ufb00erent factorsrelevant for transformation planning.\nThe methodology how to create, use and maintain the action repository is\ncurrently extended to cope with di\ufb00erent EA models and di\ufb00erent concerns whichneed to be addressed during transformation planning.\n9 Conclusions\nWe have shown how it is possible to get from identi\ufb01ed gaps to transformationpaths by creating a transformation model, detailing a target architecture and\nusing an action repository to create possible sequences of actions for transfor-\nmation paths.\nAn use case for parts of an application architecture was presented and the\nsolution was applied to it. Furthermore, we presented a proposition for a tech-\nnical realisation to allow for tool support.\nWe discussed the results and limitations of the solution and clari\ufb01ed its con-\nnection to related work. Future work to be addressed was also presented.\nReferences\n1. International Organization for Standardization. ISO/IEC 42010:2007 Standard for\nsystems and software engineering - recommended practice for architectural descrip-tion of software-intensive systems (2007)\n2. Winter, R., Fischer, R.: Essential layers, artifacts, and dependencies of enterprise\narchitecture. In: 2006 10th IEEE International Enterprise Distributed Object Com-\nputing Conference Workshops (EDOCW\u201906), IEEE, p. 30 (2006)\n3. Pulkkinen, M., Hirvonen, A.: Ea planning, development and management process\nfor agile enterprise development. In: Proceedings of the 38th Annual Hawaii Inter-\nnational Conference on System Sciences, IEEE, p. 223c (2005)\n4.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "e820746a-5f9b-4646-9d8c-7c72b50717be": {"__data__": {"id_": "e820746a-5f9b-4646-9d8c-7c72b50717be", "embedding": null, "metadata": {"page_label": "488", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "7173d059-7feb-4711-a7ed-cbaf25bd35b1", "node_type": "4", "metadata": {"page_label": "488", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "b0385e450f438088394742c5e283c6ae66d5d576d1d4abf3491473344c886b57"}, "2": {"node_id": "2fdc5d29-c912-487d-ad1a-c2d26d3dd110", "node_type": "1", "metadata": {"page_label": "488", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "f1802c67bef335d7f65ab00d6743e71576307b3439c072fe5ef1959202a5ed11"}}, "hash": "8510c50aac4a1935a42c61ba299d9465ffd9d42280b418c4f3e32aee2ab54968", "text": "223c (2005)\n4. Pulkkinen, M.: Systemic management of architectural decisions in enterprise archi-\ntecture planning, four dimensions and three abstraction levels. In: Proceedings ofthe 39th Annual Hawaii International Conference on System Sciences (HICSS\u201906),\nIEEE, p. 179a (2006)\n5. Niemann, K.D.: From Enterprise Architecture to IT Governance: Elements of E\ufb00ec-\ntive IT Management. Vieweg, Wiesbaden (2006)\n6. Aier, S., Gleichauf, B., Saat, J., Winter, R.: Complexity levels of representing\ndynamics in EA planning. In: Albani, A., Barjis, J., Dietz, J.L.G. (eds.) Advances\nin Enterprise Engineering III. LNBIP, vol. 34, pp. 55\u201369. Springer, Heidelberg\n(2009)\n7. Aier, S., Gleichauf, B.: Towards a systematic approach for capturing dynamic\ntransformation in enterprise models. In: Sprague, R.H. (ed.) Proceedings of the\n43rd Hawaii International Conference on System Sciences 2010 (HICSS-43). Los\nAlamitos, IEEE Computer Society (2010)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "7dceb720-792d-43b2-9e4a-8d373b9f0b6e": {"__data__": {"id_": "7dceb720-792d-43b2-9e4a-8d373b9f0b6e", "embedding": null, "metadata": {"page_label": "489", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "dead2a62-5d5f-4ac8-b2e5-dfa16f691ab0", "node_type": "4", "metadata": {"page_label": "489", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "710cb5ff4959b9c99a37e3173bbab0cd10febb03f71cae910fd6e6acecc99a53"}, "3": {"node_id": "49ed5d4f-fb50-4f4c-94e5-dfae17b9b7e8", "node_type": "1", "metadata": {"page_label": "489", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "50c4ecdd5d696100d158bebbf93a6ad61ad1466dc10848128fbf82193baee283"}}, "hash": "cbc0f8975699b8753d389e638557ca95cc576f222eae47a98714a4ad42f17462", "text": "From Gaps to Transformation Paths in Enterprise Architecture Planning 489\n8. Buckl, S., Schweda, C.M.: On the State-of-the-Art in Enterprise Architecture Man-\nagement Literature (2011)\n9. Aier, S., Gleichauf, B.: Application of enterprise models for engineering enterprise\ntransformation. Enterp. Model. Inf. Syst. Archit. 5, 56\u201372 (2010)\n10. Buckl, S., Ernst, A.M., Matthes, F., Schweda, C.M.: An information model captur-\ning the managed evolution of application landscapes. J. Enterp. Archit. 5, 12\u201326\n(2009)\n11. Hanschke, I.: Strategisches Management der IT-Landschaft: Ein praktischer Leit-\nfaden f\u00a8 ur das Enterprise Architecture Management, 1st edn. Hanser, M\u00a8 unchen\n(2009)\n12. Shadbolt, N., Hall, W., Berners-Lee, T.: The semantic web revisited. IEEE Intell.\nSyst. 21(3), 96\u2013101 (2006). (IEEE Computer Society)\n13. Motik, B., Patel-Schneider, P.F., Horrocks, I.: Owl 2 web ontology language: Struc-\ntural speci\ufb01cation and functional-style syntax (2009)\n14. Prud\u2019hommeaux, E., Seaborne, A.: SPARQL Query Language for RDF. World\nWide Web Consortium (2008)\n15. Manola, F., Miller, E., McBride, B.: RDF Primer. World Wide Web Consortium\n(2004)\n16. Lautenbacher, F.: Semantic Business Process Modeling: Principles, Design Support\nand Realization. Shaker, Aachen (2010)\n17.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "49ed5d4f-fb50-4f4c-94e5-dfae17b9b7e8": {"__data__": {"id_": "49ed5d4f-fb50-4f4c-94e5-dfae17b9b7e8", "embedding": null, "metadata": {"page_label": "489", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "dead2a62-5d5f-4ac8-b2e5-dfa16f691ab0", "node_type": "4", "metadata": {"page_label": "489", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "710cb5ff4959b9c99a37e3173bbab0cd10febb03f71cae910fd6e6acecc99a53"}, "2": {"node_id": "7dceb720-792d-43b2-9e4a-8d373b9f0b6e", "node_type": "1", "metadata": {"page_label": "489", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "cbc0f8975699b8753d389e638557ca95cc576f222eae47a98714a4ad42f17462"}, "3": {"node_id": "0f0e83cd-5678-426b-b828-7f80df9c85e3", "node_type": "1", "metadata": {"page_label": "489", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "2d330d75caa4737f89d516d0aaa8c05fac7c104138cc76c4a0b3a5ab9666f4c9"}}, "hash": "50c4ecdd5d696100d158bebbf93a6ad61ad1466dc10848128fbf82193baee283", "text": "Shaker, Aachen (2010)\n17. Grimm, S., Watzke, M., Hubauer, T., Cescolini, F.: Embedded EL+reasoning\non programmable logic controllers. In: Cudr\u00b4 e-Mauroux, P., He\ufb02in, J., Sirin, E.,\nTudorache, T., Euzenat, J., Hauswirth, M., Parreira, J.X., Hendler, J., Schreiber,G., Bernstein, A., Blomqvist, E. (eds.) ISWC 2012, Part II. LNCS, vol. 7650, pp.\n66\u201381. Springer, Heidelberg (2012)\n18. Russell, S.J., Norvig, P.: Arti\ufb01cial Intelligence: A Modern Approach, 3rd edn. Pren-\ntice Hall, Upper Saddle River (2010)\n19. Ghallab, M., Nau, D.S., Traverso, P.: Automated Planning: Theory & Practice.\nMorgan Kaufmann/Elsevier Science, San Francisco/Oxford (2004)\n20. The Open Group: TOGAF Version 9.1. TOGAF Series, 1st edn. Van Haren Pub-\nlishing, Zaltbommel (2011)\n21. The Open Group: Archimate 2.0 Speci\ufb01cation. Van Haren Publishing, Zaltbommel\n(2012)\n22. Matthes, F., Buckl, S., Leitel, J., Schweda, C.M.: Enterprise Architecture Manage-\nment Tool Survey 2008. Technische Universit\u00a8 at M\u00a8unchen, M\u00a8 unchen (2008)\n23. Edelkamp, S., Rensink, A.: Graph transformation and AI Planning. In: Edelkamp,\nS., Frank, J. (eds.) Knowledge Engineering Competition (ICKEPS), Rhode Island,\nUSA (2007)\n24.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "0f0e83cd-5678-426b-b828-7f80df9c85e3": {"__data__": {"id_": "0f0e83cd-5678-426b-b828-7f80df9c85e3", "embedding": null, "metadata": {"page_label": "489", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "dead2a62-5d5f-4ac8-b2e5-dfa16f691ab0", "node_type": "4", "metadata": {"page_label": "489", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "710cb5ff4959b9c99a37e3173bbab0cd10febb03f71cae910fd6e6acecc99a53"}, "2": {"node_id": "49ed5d4f-fb50-4f4c-94e5-dfae17b9b7e8", "node_type": "1", "metadata": {"page_label": "489", "file_name": "From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "file_path": "docs\\From Gaps to Transformation Paths in Enterprise Architecture Planning.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "50c4ecdd5d696100d158bebbf93a6ad61ad1466dc10848128fbf82193baee283"}}, "hash": "2d330d75caa4737f89d516d0aaa8c05fac7c104138cc76c4a0b3a5ab9666f4c9", "text": "Rozenberg, G.: Handbook of Graph Grammars and Computing by Graph Trans-\nformation, vol. 1. World Scienti\ufb01c River Edge, NJ, USA (1997)\n25. Binz, T., Leymann, F., Nowak, A., Schumm, D.: Improving the manageability of\nenterprise topologies through segmentation, graph transformation, and analysis\nstrategies. In: 2012 16th IEEE International Enterprise Distributed Object Com-puting Conference (EDOC 2012), pp. 61\u201370 (2012)\n26. Loshin, D.: Master Data Management. Elsevier/Morgan Kaufmann, Amster-\ndam/Boston (2009)\n27. Gringel, P., Postina, M.: I-pattern for gap analysis. In: Engels, G., Luckey, M.,\nPretschner, A., Reussner, R. (eds.) Software Engineering 2010. Lecture Notes in\nInformatics, pp. 281\u2013292. Gesellschaft f\u00a8 ur Informatik, Bonn (2010)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "d9122d3c-7a34-416a-8b96-d071fbc97ff1": {"__data__": {"id_": "d9122d3c-7a34-416a-8b96-d071fbc97ff1", "embedding": null, "metadata": {"page_label": "1", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "cdee50c4-5767-4a9d-9c14-65222d9a2c67", "node_type": "4", "metadata": {"page_label": "1", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "53ef4c20e509492cc140d37adee597ff6cd68aed6522a0a503af2eee93b5d6db"}, "3": {"node_id": "326af7e9-6f5b-4e17-a2ba-6101200750e2", "node_type": "1", "metadata": {"page_label": "1", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "4c3b31fcc218114d24961bfdd3e1d329ca7ac55a19b992382ca18406dfce3815"}}, "hash": "38198c0616dce6a00fc7ee3d36f11d3fe63090f395c1019a3089fb7979b9ca07", "text": "Heterogeneous Graph Transformer\nZiniu Hu\u2217\nUniversity of California, Los Angeles\nbull@cs.ucla.eduYuxiao Dong\nMicrosoft Research, Redmond\nyuxdong@microsoft.com\nKuansan Wang\nMicrosoft Research, Redmond\nkuansanw@microsoft.comYizhou Sun\nUniversity of California, Los Angeles\nyzsun@cs.ucla.edu\nABSTRACT\nRecent years have witnessed the emerging success of graph neu-\nral networks (GNNs) for modeling structured data. However, most\nGNNs are designed for homogeneous graphs, in which all nodes\nand edges belong to the same types, making them infeasible to\nrepresent heterogeneous structures. In this paper, we present the\nHeterogeneous Graph Transformer (HGT) architecture for mod-\neling Web-scale heterogeneous graphs. To model heterogeneity,\nwe design node- and edge-type dependent parameters to charac-\nterize the heterogeneous attention over each edge, empowering\nHGT to maintain dedicated representations for different types of\nnodes and edges. To handle dynamic heterogeneous graphs, we in-\ntroduce the relative temporal encoding technique into HGT, which\nis able to capture the dynamic structural dependency with arbitrary\ndurations. To handle Web-scale graph data, we design the hetero-\ngeneous mini-batch graph sampling algorithm\u2014HGSampling\u2014for\nefficient and scalable training. Extensive experiments on the Open\nAcademic Graph of 179 million nodes and 2 billion edges show\nthat the proposed HGT model consistently outperforms all the\nstate-of-the-art GNN baselines by 9 %\u201321%on various downstream\ntasks. The dataset and source code of HGT are publicly available at\nhttps://github.com/acbull/pyHGT.\nKEYWORDS\nGraph Neural Networks; Heterogeneous Information Networks;\nRepresentation Learning; Graph Embedding; Graph Attention\nACM Reference Format:\nZiniu Hu, Yuxiao Dong, Kuansan Wang, and Yizhou Sun. 2020. Hetero-\ngeneous Graph Transformer.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "326af7e9-6f5b-4e17-a2ba-6101200750e2": {"__data__": {"id_": "326af7e9-6f5b-4e17-a2ba-6101200750e2", "embedding": null, "metadata": {"page_label": "1", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "cdee50c4-5767-4a9d-9c14-65222d9a2c67", "node_type": "4", "metadata": {"page_label": "1", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "53ef4c20e509492cc140d37adee597ff6cd68aed6522a0a503af2eee93b5d6db"}, "2": {"node_id": "d9122d3c-7a34-416a-8b96-d071fbc97ff1", "node_type": "1", "metadata": {"page_label": "1", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "38198c0616dce6a00fc7ee3d36f11d3fe63090f395c1019a3089fb7979b9ca07"}, "3": {"node_id": "f8b77e70-a304-42d0-90e3-03fe2f913ff0", "node_type": "1", "metadata": {"page_label": "1", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "1e8af07606e1fcb090b659a8eefef63afa9730bd9aa18267f5de1de99406def1"}}, "hash": "4c3b31fcc218114d24961bfdd3e1d329ca7ac55a19b992382ca18406dfce3815", "text": "2020. Hetero-\ngeneous Graph Transformer. In Proceedings of The Web Conference 2020\n(WWW \u201920), April 20\u201324, 2020, Taipei, Taiwan. ACM, New York, NY, USA,\n11 pages. https://doi.org/10.1145/3366423.3380027\n1 INTRODUCTION\nHeterogeneous graphs have been commonly used for abstracting\nand modeling complex systems, in which objects of different types\n\u2217This work was done when Ziniu was an intern at Microsoft Research.\nPermission to make digital or hard copies of all or part of this work for personal or\nclassroom use is granted without fee provided that copies are not made or distributed\nfor profit or commercial advantage and that copies bear this notice and the full citation\non the first page. Copyrights for components of this work owned by others than ACM\nmust be honored. Abstracting with credit is permitted. To copy otherwise, or republish,\nto post on servers or to redistribute to lists, requires prior specific permission and/or a\nfee. Request permissions from permissions@acm.org.\nWWW \u201920, April 20\u201324, 2020, Taipei, Taiwan\n\u00a92020 Association for Computing Machinery.\nACM ISBN 978-1-4503-7023-3/20/04.\nhttps://doi.org/10.1145/3366423.3380027\nFigure 1: The schema and meta relations of Open Academic\nGraph (OAG). Given a Web-scale heterogeneous graph, e.g., an\nacademic network, HGT takes only its one-hop edges as input\nwithout manually designing meta paths.\ninteract with each other in various ways. Some prevalent instances\nof such systems include academic graphs, Facebook entity graph,\nLinkedIn economic graph, and broadly the Internet of Things net-\nwork. For example, the Open Academic Graph (OAG) [ 28] in Figure\n1 contains five types of nodes: papers, authors, institutions, venues\n(journal, conference, or preprint), and fields, as well as different\ntypes of relationships between them.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "f8b77e70-a304-42d0-90e3-03fe2f913ff0": {"__data__": {"id_": "f8b77e70-a304-42d0-90e3-03fe2f913ff0", "embedding": null, "metadata": {"page_label": "1", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "cdee50c4-5767-4a9d-9c14-65222d9a2c67", "node_type": "4", "metadata": {"page_label": "1", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "53ef4c20e509492cc140d37adee597ff6cd68aed6522a0a503af2eee93b5d6db"}, "2": {"node_id": "326af7e9-6f5b-4e17-a2ba-6101200750e2", "node_type": "1", "metadata": {"page_label": "1", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "4c3b31fcc218114d24961bfdd3e1d329ca7ac55a19b992382ca18406dfce3815"}}, "hash": "1e8af07606e1fcb090b659a8eefef63afa9730bd9aa18267f5de1de99406def1", "text": "Over the past decade, a significant line of research has been ex-\nplored for mining heterogeneous graphs [ 17]. One of the classical\nparadigms is to define and use meta paths to model heterogeneous\nstructures, such as PathSim [ 18] and metapath2vec [ 3]. Recently,\nin view of graph neural networks\u2019 (GNNs) success [ 7,9,22], there\nare several attempts to adopt GNNs to learn with heterogeneous\nnetworks [ 14,23,26,27]. However, these works face several issues:\nFirst, most of them involve the design of meta paths for each type of\nheterogeneous graphs, requiring specific domain knowledge; Sec-\nond, they either simply assume that different types of nodes/edges\nshare the same feature and representation space or keep distinct\nnon-sharing weights for either node type or edge type alone, mak-\ning them insufficient to capture heterogeneous graphs\u2019 properties;\nThird, most of them ignore the dynamic nature of every (hetero-\ngeneous) graph; Finally, their intrinsic design and implementation\nmake them incapable of modeling Web-scale heterogeneous graphs.\nTake OAG for example: First, the nodes and edges in OAG could\nhave different feature distributions, e.g., papers have text features\nwhereas institutions may have features from affiliated scholars, and\ncoauthorships obviously differ from citation links; Second, OAG\nhas been consistently evolving, e.g., 1) the volume of publications\ndoubles every 12 years [ 4], and 2) the KDD conference was more\nrelated to database in the 1990s whereas more to machine learning\nin recent years; Finally, OAG contains hundreds of millions of nodesarXiv:2003.01332v1 [cs.LG] 3 Mar 2020", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "225511cb-2445-4b55-8fba-74e2779df88d": {"__data__": {"id_": "225511cb-2445-4b55-8fba-74e2779df88d", "embedding": null, "metadata": {"page_label": "2", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "1eb867f6-2334-4615-9360-df36808375eb", "node_type": "4", "metadata": {"page_label": "2", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "0165fd3447625d17d4424f764da78f21b9aab377a01e12e0ae99a474bdcca254"}, "3": {"node_id": "cbf07eca-e026-4906-964b-811831c2c3b9", "node_type": "1", "metadata": {"page_label": "2", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "b27832d1bcd3f3d7fc3bf739b5e0b7599c1cfe810774b0a9f7798a47d4b8a967"}}, "hash": "8eb89bc098cc88a1cd617ccc5ea6891f1cc20bc391a55ed1c56fe05f9e3f2b05", "text": "WWW \u201920, April 20\u201324, 2020, Taipei, Taiwan Ziniu Hu, Yuxiao Dong, Kuansan Wang, and Yizhou Sun\nand billions of relationships, leaving existing heterogeneous GNNs\nnot scalable for handling it.\nIn light of these limitations and challenges, we propose to study\nheterogeneous graph neural networks with the goal of maintaining\nnode- and edge-type dependent representations, capturing network\ndynamics, avoiding customized meta paths, and being scalable to\nWeb-scale graphs. In this work, we present the Heterogeneous\nGraph Transformer (HGT) architecture to deal with all these issues.\nTo handle graph heterogeneity, we introduce the node- and edge-\ntype dependent attention mechanism. Instead of parameterizing\neach type of edges, the heterogeneous mutual attention in HGT is\ndefined by breaking down each edge e=(s,t)based on its meta\nrelation triplet, i.e., \u27e8node type of s, edge type of ebetween s&\nt, node type of t\u27e9. Figure 1 illustrates the meta relations of hetero-\ngeneous academic graphs. In specific, we use these meta relations\nto parameterize the weight matrices for calculating attention over\neach edge. As a result, nodes and edges of different types are al-\nlowed to maintain their specific representation spaces. Meanwhile,\nconnected nodes in different types can still interact, pass, and aggre-\ngate messages without being restricted by their distribution gaps.\nDue to the nature of its architecture, HGT can incorporate informa-\ntion from high-order neighbors of different types through message\npassing across layers, which can be regarded as \u201csoft\u201d meta paths.\nThat said, even if HGT take only its one-hop edges as input without\nmanually designing meta paths, the proposed attention mechanism\ncan automatically and implicitly learn and extract \u201cmeta paths\u201d that\nare important for different downstream tasks.\nTo handle graph dynamics, we enhance HGT by proposing the\nrelative temporal encoding (RTE) strategy.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "cbf07eca-e026-4906-964b-811831c2c3b9": {"__data__": {"id_": "cbf07eca-e026-4906-964b-811831c2c3b9", "embedding": null, "metadata": {"page_label": "2", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "1eb867f6-2334-4615-9360-df36808375eb", "node_type": "4", "metadata": {"page_label": "2", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "0165fd3447625d17d4424f764da78f21b9aab377a01e12e0ae99a474bdcca254"}, "2": {"node_id": "225511cb-2445-4b55-8fba-74e2779df88d", "node_type": "1", "metadata": {"page_label": "2", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "8eb89bc098cc88a1cd617ccc5ea6891f1cc20bc391a55ed1c56fe05f9e3f2b05"}, "3": {"node_id": "905772c9-f508-47a1-9f3f-ab5f94a5fb57", "node_type": "1", "metadata": {"page_label": "2", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "92464fbd6d1e8dc196d85d4ed27792dec4dfad9a12efcc13e492f2eeabbfcbaa"}}, "hash": "b27832d1bcd3f3d7fc3bf739b5e0b7599c1cfe810774b0a9f7798a47d4b8a967", "text": "Instead of slicing the\ninput graph into different timestamps, we propose to maintain all\nthe edges happening in different times as a whole, and design the\nRTE strategy to model structural temporal dependencies with any\nduration length, and even with unseen and future timestamps. By\nend-to-end training, RTE enables HGT to automatically learn the\ntemporal dependency and evolution of heterogeneous graphs.\nTo handle Web-scale graph data, we design the first hetero-\ngeneous sub-graph sampling algorithm\u2014HGSampling\u2014for mini-\nbatch GNN training. Its main idea is to sample heterogeneous sub-\ngraphs in which different types of nodes are with similar propor-\ntions, since the direct usage of existing (homogeneous) GNN sam-\npling methods, such as GraphSage [ 7], FastGCN [ 1], and LADIES [ 29],\nresults in highly imbalanced ones regarding to both node and edge\ntypes. In addition, it is also designed to keep the sampled sub-graphs\ndense for minimizing the loss of information. With HGSampling,\nall the GNN models, including our proposed HGT, can train and\ninfer on arbitrary-size heterogeneous graphs.\nWe demonstrate the effectiveness and efficiency of the proposed\nHeterogeneous Graph Transformer on the Web-scale Open Aca-\ndemic Graph comprised of 179 million nodes and 2 billion edges\nspanning from 1900 to 2019, making this the largest-scale and\nlongest-spanning representation learning yet performed on hetero-\ngeneous graphs. Additionally, we also examine it on domain-specific\ngraphs: the computer science and medicine academic graphs. Exper-\nimental results suggest that HGT can significantly improve various\ndownstream tasks over state-of-the-art GNNs as well as dedicated\nheterogeneous models by 9\u201321 %. We further conduct case studiesto show the proposed method can indeed automatically capture the\nimportance of implicit meta paths for different tasks.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "905772c9-f508-47a1-9f3f-ab5f94a5fb57": {"__data__": {"id_": "905772c9-f508-47a1-9f3f-ab5f94a5fb57", "embedding": null, "metadata": {"page_label": "2", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "1eb867f6-2334-4615-9360-df36808375eb", "node_type": "4", "metadata": {"page_label": "2", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "0165fd3447625d17d4424f764da78f21b9aab377a01e12e0ae99a474bdcca254"}, "2": {"node_id": "cbf07eca-e026-4906-964b-811831c2c3b9", "node_type": "1", "metadata": {"page_label": "2", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "b27832d1bcd3f3d7fc3bf739b5e0b7599c1cfe810774b0a9f7798a47d4b8a967"}, "3": {"node_id": "1e63b909-05a4-48d8-92b3-c8db6e3caeec", "node_type": "1", "metadata": {"page_label": "2", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "28a822e278204927cccb8e0eb20083566c5171bd81b47560b01981faef56b269"}}, "hash": "92464fbd6d1e8dc196d85d4ed27792dec4dfad9a12efcc13e492f2eeabbfcbaa", "text": "2 PRELIMINARIES AND RELATED WORK\nIn this section, we introduce the basic definition of heteroge-\nneous graphs with network dynamics and review the recent devel-\nopment on graph neural networks (GNNs) and their heterogeneous\nvariants. We also highlight the difference between HGT and existing\nattempts on heterogeneous graph neural networks.\n2.1 Heterogeneous Graph Mining\nHeterogeneous graphs [ 17] (a.k.a., heterogeneous information\nnetworks) are an important abstraction for modeling relational data\nfor many real-world complex systems. Formally, it is defined as:\nDefinition 1. Heterogeneous Graph: A heterogeneous graph\nis defined as a directed graph G=(V,E,A,R)where each node\nv\u2208V and each edge e\u2208Eare associated with their type mapping\nfunctions\u03c4(v):V\u2192A and\u03d5(e):E\u2192R , respectively.\nMeta Relation. For an edge e=(s,t)linked from source node sto\ntarget node t, its meta relation is denoted as \u27e8\u03c4(s),\u03d5(e),\u03c4(t)\u27e9. Natu-\nrally,\u03d5(e)\u22121represents the inverse of \u03d5(e). The classical meta path\nparadigm [17\u201319] is defined as a sequence of such meta relation.\nNotice that, to better model real-world heterogeneous networks,\nwe assume that there may exist multiple types of relations between\ndifferent types of nodes. For example, in OAG there are different\ntypes of relations between the author andpaper nodes by consid-\nering the authorship order, i.e., \u201cthe first author of\u201d, \u201cthe second\nauthor of\u201d, and so on.\nDynamic Heterogeneous Graph. To model the dynamic nature\nof real-world (heterogeneous) graphs, we assign an edge e=(s,t)\na timestamp T, when node sconnects to node tatT.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "1e63b909-05a4-48d8-92b3-c8db6e3caeec": {"__data__": {"id_": "1e63b909-05a4-48d8-92b3-c8db6e3caeec", "embedding": null, "metadata": {"page_label": "2", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "1eb867f6-2334-4615-9360-df36808375eb", "node_type": "4", "metadata": {"page_label": "2", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "0165fd3447625d17d4424f764da78f21b9aab377a01e12e0ae99a474bdcca254"}, "2": {"node_id": "905772c9-f508-47a1-9f3f-ab5f94a5fb57", "node_type": "1", "metadata": {"page_label": "2", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "92464fbd6d1e8dc196d85d4ed27792dec4dfad9a12efcc13e492f2eeabbfcbaa"}}, "hash": "28a822e278204927cccb8e0eb20083566c5171bd81b47560b01981faef56b269", "text": "Ifsappears\nfor the first time, Tis also assigned to s.scan be associated with\nmultiple timestamps if it builds connections over time.\nIn other words, we assume that the timestamp of an edge is\nunchanged, denoting the time it is created. For example, when a\npaper published on a conference at time T,Twill be assigned to\nthe edge between the paper and conference nodes. On the contrary,\ndifferent timestamps can be assigned to a node accordingly. For\nexample, the conference node \u201cWWW\u201d can be assigned any year.\nWWW @1994 means that we are considering the first edition of\nWWW, which focuses more on internet protocol and Web infras-\ntructure, while WWW @2020 means the upcoming WWW, which\nexpands its research topics to social analysis, ubiquitous computing,\nsearch & IR, privacy and society, etc.\nThere have been significant lines of research on mining heteroge-\nnous graphs, such as node classification, clustering, ranking and\nrepresentation learning [ 3,17\u201319], while the dynamic perspective\nof HGs has not been extensively explored and studied.\n2.2 Graph Neural Networks\nRecent years have witnessed the success of graph neural net-\nworks for relational data [ 7,9,22]. Generally, a GNN can be regarded\nas using the input graph structure as the computation graph for", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "01d82a0f-42e6-49ef-9a7a-3e175795b415": {"__data__": {"id_": "01d82a0f-42e6-49ef-9a7a-3e175795b415", "embedding": null, "metadata": {"page_label": "3", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "681b072c-d4ae-407a-b4d2-2e37d084b018", "node_type": "4", "metadata": {"page_label": "3", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "22cd93d4a5ea32e00f61c8b2fc56465b6622bbb9cc3219da94235742c5c249dc"}, "3": {"node_id": "ae8f3a67-6919-4cf1-9c7c-5ca1a07e0402", "node_type": "1", "metadata": {"page_label": "3", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "1b1bb59ffe0e2804f9b06f8880b9159b2aea6ffd394a80ab16326e058653a702"}}, "hash": "def959a033d12011e58530d6cd34053416a9388271fafc9c899f25cd29cb3dbb", "text": "Heterogeneous Graph Transformer WWW \u201920, April 20\u201324, 2020, Taipei, Taiwan\nmessage passing [ 6], during which the local neighborhood informa-\ntion is aggregated to get a more contextual representation. Formally,\nit has the following form:\nDefinition 2. General GNN Framework: Suppose Hl[t]is the\nnode representation of node tat the(l)-th GNN layer, the update\nprocedure from the (l-1)-th layer to the(l)-th layer is:\nHl[t]\u2190 Aggregate\n\u2200s\u2208N(t),\u2200e\u2208E(s,t)\u0012\nExtract\u0010\nHl\u22121[s];Hl\u22121[t],e\u0011\u0013\n(1)\nwhere N(t)denotes all the source nodes of node tandE(s,t)denotes\nall the edges from node stot.\nThe most important GNN operators are Extract( \u00b7) and Aggregate(\u00b7).\nExtract(\u00b7) represents the neighbor information extractor. It extract\nuseful information from source node\u2019s representation Hl\u22121[s], with\nthe target node\u2019s representation Hl\u22121[t]and the edge ebetween the\ntwo nodes as query. Aggregate( \u00b7) gather the neighborhood informa-\ntion of souce nodes via some aggregation operators, such as mean,\nsum, andmax, while more sophisticated pooling and normalization\nfunctions can be also designed.\nVarious (homogeneous) GNN architectures have been proposed\nfollowing this framework. Kipf et al. [9] propose graph convolu-\ntional network (GCN), which averages the one-hop neighbor of each\nnode in the graph, followed by a linear projection and non-linear\nactivation operations. Hamilton et al. propose GraphSAGE that\ngeneralizes GCN\u2019s aggregation operation from average tosum, max\nand a RNN unit . Velickovi et al.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "ae8f3a67-6919-4cf1-9c7c-5ca1a07e0402": {"__data__": {"id_": "ae8f3a67-6919-4cf1-9c7c-5ca1a07e0402", "embedding": null, "metadata": {"page_label": "3", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "681b072c-d4ae-407a-b4d2-2e37d084b018", "node_type": "4", "metadata": {"page_label": "3", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "22cd93d4a5ea32e00f61c8b2fc56465b6622bbb9cc3219da94235742c5c249dc"}, "2": {"node_id": "01d82a0f-42e6-49ef-9a7a-3e175795b415", "node_type": "1", "metadata": {"page_label": "3", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "def959a033d12011e58530d6cd34053416a9388271fafc9c899f25cd29cb3dbb"}, "3": {"node_id": "611d01f7-d6d0-41ee-ba40-7ed46668a98e", "node_type": "1", "metadata": {"page_label": "3", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "cea31513e031df82fbc5d812c65da48e406a824cdc7ca07ed7ecb8e363480a60"}}, "hash": "1b1bb59ffe0e2804f9b06f8880b9159b2aea6ffd394a80ab16326e058653a702", "text": "Velickovi et al. propose graph attention network\n(GAT) [ 22] by introducing the attention mechanism into GNNs,\nwhich allows GAT to assign different importance to nodes within\nthe same neighborhood.\n2.3 Heterogeneous GNNs\nRecently, studies have attempted to extend GNNs for modeling\nheterogeneous graphs. Schlichtkrull et al. [14] propose the rela-\ntional graph convolutional networks (RGCN) to model knowledge\ngraphs. RGCN keeps a distinct linear projection weight for each\nedge type. Zhang et al. [27] present the heterogeneous graph neural\nnetworks (HetGNN) that adopts different RNNs for different node\ntypes to integrate multi-modal features. Wang et al. [23] extend\ngraph attention networks by maintaining different weights for dif-\nferent meta-path-defined edges. They also use high-level semantic\nattention to differentiate and aggregate information from different\nmeta paths.\nThough these methods have shown to be empirically better than\nthe vanilla GCN and GAT models, they have not fully utilized the\nheterogeneous graphs\u2019 properties. All of them use either node type\nor edge type alone to determine GNN weight matrices. However,\nthe node or edge counts of different types can vary greatly. For\nrelations that don\u2019t have sufficient occurrences, it\u2019s hard to learn\naccurate relation-specific weights. To address this, we propose to\nconsider parameter sharing for a better generalization. Given an\nedge e=(s,t)with its meta relation as \u27e8\u03c4(s),\u03d5(e),\u03c4(t)\u27e9, if we use\nthree interaction matrices to model the three corresponding ele-\nments\u03c4(s),\u03d5(e), and\u03c4(t)in the meta relation, then the majority of\nweights could be shared. For example, in \u201cthe first author of\u201d and\n\u201cthe second author of\u201d relationships, their source and target node\ntypes are both author topaper , respectively.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "611d01f7-d6d0-41ee-ba40-7ed46668a98e": {"__data__": {"id_": "611d01f7-d6d0-41ee-ba40-7ed46668a98e", "embedding": null, "metadata": {"page_label": "3", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "681b072c-d4ae-407a-b4d2-2e37d084b018", "node_type": "4", "metadata": {"page_label": "3", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "22cd93d4a5ea32e00f61c8b2fc56465b6622bbb9cc3219da94235742c5c249dc"}, "2": {"node_id": "ae8f3a67-6919-4cf1-9c7c-5ca1a07e0402", "node_type": "1", "metadata": {"page_label": "3", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "1b1bb59ffe0e2804f9b06f8880b9159b2aea6ffd394a80ab16326e058653a702"}, "3": {"node_id": "ccdf0301-0bc3-4355-a575-31f5cea8baf6", "node_type": "1", "metadata": {"page_label": "3", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "f15944ec52d0509bb12c9338985646ea59ba84f639b0d3ec3d7789ec25433b54"}}, "hash": "cea31513e031df82fbc5d812c65da48e406a824cdc7ca07ed7ecb8e363480a60", "text": "In other words, theknowledge about author andpaper learned from one relation could\nbe quickly transferred and adapted to the other one. Therefore, we\nintegrate this idea with the powerful Transformer-like attention\narchitecture, and propose Heterogeneous Graph Transformer.\nTo summarize, the key differences between HGT and existing\nattempts include:\n(1)Instead of attending on node or edge type alone, we use the\nmeta relation\u27e8\u03c4(s),\u03d5(e),\u03c4(t)\u27e9to decompose the interaction\nand transform matrices, enabling HGT to capture both the\ncommon and specific patterns of different relationships using\nequal or even fewer parameters.\n(2)Different from most of the existing works that are based on\ncustomized meta paths, we rely on the nature of the neural\narchitecture to incorporate high-order heterogeneous neigh-\nbor information, which automatically learns the importance\nof implicit meta paths.\n(3)Most previous works don\u2019t take the dynamic nature of (het-\nerogeneous) graphs into consideration, while we propose\nthe relative temporal encoding technique to incorporate tem-\nporal information by using limited computational resources.\n(4)None of the existing heterogeneous GNNs are designed for\nand experimented with Web-scale graphs, we therefore pro-\npose the heterogeneous Mini-Batch graph sampling algo-\nrithm designed for Web-scale graph training, enabling ex-\nperiments on the billion-scale Open Academic Graph.\n3 HETEROGENEOUS GRAPH TRANSFORMER\nIn this section, we present the Heterogeneous Graph Trans-\nformer (HGT). Its idea is to use the meta relations of heteroge-\nneous graphs to parameterize weight matrices for the heteroge-\nneous mutual attention, message passing, and propagation steps.\nTo further incorporate network dynamics, we introduce a relative\ntemporal encoding mechanism into the model.\n3.1 Overall HGT Architecture\nFigure 2 shows the overall architecture of Heterogeneous Graph\nTransformer. Given a sampled heterogeneous sub-graph (Cf.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "ccdf0301-0bc3-4355-a575-31f5cea8baf6": {"__data__": {"id_": "ccdf0301-0bc3-4355-a575-31f5cea8baf6", "embedding": null, "metadata": {"page_label": "3", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "681b072c-d4ae-407a-b4d2-2e37d084b018", "node_type": "4", "metadata": {"page_label": "3", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "22cd93d4a5ea32e00f61c8b2fc56465b6622bbb9cc3219da94235742c5c249dc"}, "2": {"node_id": "611d01f7-d6d0-41ee-ba40-7ed46668a98e", "node_type": "1", "metadata": {"page_label": "3", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "cea31513e031df82fbc5d812c65da48e406a824cdc7ca07ed7ecb8e363480a60"}}, "hash": "f15944ec52d0509bb12c9338985646ea59ba84f639b0d3ec3d7789ec25433b54", "text": "Given a sampled heterogeneous sub-graph (Cf. Sec-\ntion 4), HGT extracts all linked node pairs, where target node tis\nlinked by source node svia edge e. The goal of HGT is to aggregate\ninformation from source nodes to get a contextualized representa-\ntion for target node t. Such process can be decomposed into three\ncomponents: Heterogeneous Mutual Attention ,Heterogeneous Mes-\nsage Passing andTarget-Specific Aggregation .\nWe denote the output of the (l)-th HGT layer as H(l), which is\nalso the input of the (l+1)-th layer. By stacking Llayers, we can get\nthe node representations of the whole graph H(L), which can be\nused for end-to-end training or fed into downstream tasks.\n3.2 Heterogeneous Mutual Attention\nThe first step is to calculate the mutual attention between source\nnode sand target node t. We first give a brief introduction to the\ngeneral attention-based GNNs as follows:\nHl[t]\u2190 Aggregate\n\u2200s\u2208N(t),\u2200e\u2208E(s,t)\u0010\nAttention(s,t)\u00b7Message(s)\u0011\n(2)", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "6262b8bd-2101-4d15-b3ce-a56d217b7c4f": {"__data__": {"id_": "6262b8bd-2101-4d15-b3ce-a56d217b7c4f", "embedding": null, "metadata": {"page_label": "4", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "859b3475-3d01-499b-9e73-8c43546ea104", "node_type": "4", "metadata": {"page_label": "4", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "8277539a9bb2bc291202182cd75f19bcdff308635b52308fc69e837d85013297"}, "3": {"node_id": "5bd87cc3-41e4-41fc-bf4f-24d867b3af6a", "node_type": "1", "metadata": {"page_label": "4", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "cab86ca6821292ec51ba7986744fce1f1001afc5354e332479d7f14f331995d5"}}, "hash": "38d6830d511ad1f421b5e395fe0d133afa24f76aafca0a71a4d682d5a6f24318", "text": "WWW \u201920, April 20\u201324, 2020, Taipei, Taiwan Ziniu Hu, Yuxiao Dong, Kuansan Wang, and Yizhou Sun\nFigure 2: The Overall Architecture of Heterogeneous Graph Transformer. Given a sampled heterogeneous sub-graph with tas\nthe target node, s1&s2as source nodes, the HGT model takes its edges e1=(s1,t)&e2=(s2,t)and their corresponding meta relations\n<\u03c4(s1),\u03d5(e1),\u03c4(t)>&<\u03c4(s2),\u03d5(e2),\u03c4(t)>as input to learn a contextualized representation H(L)for each node, which can be used for\ndownstream tasks. Color decodes the node type. HGT includes three components: (1) meta relation-aware heterogeneous mutual attention,\n(2) heterogeneous message passing from source nodes, and (3) target-specific heterogeneous message aggregation.\nwhere there are three basic operators: Attention , which estimates\nthe importance of each source node; Message , which extracts the\nmessage by using only the source node s; and Aggregate , which\naggregates the neighborhood message by the attention weight.\nFor example, the Graph Attention Network (GAT) [ 22] adopts\nan additive mechanism as Attention , uses the same weight for\ncalculating Message , and leverages the simple average followed by\na nonlinear activation for the Aggregate step.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "5bd87cc3-41e4-41fc-bf4f-24d867b3af6a": {"__data__": {"id_": "5bd87cc3-41e4-41fc-bf4f-24d867b3af6a", "embedding": null, "metadata": {"page_label": "4", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "859b3475-3d01-499b-9e73-8c43546ea104", "node_type": "4", "metadata": {"page_label": "4", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "8277539a9bb2bc291202182cd75f19bcdff308635b52308fc69e837d85013297"}, "2": {"node_id": "6262b8bd-2101-4d15-b3ce-a56d217b7c4f", "node_type": "1", "metadata": {"page_label": "4", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "38d6830d511ad1f421b5e395fe0d133afa24f76aafca0a71a4d682d5a6f24318"}, "3": {"node_id": "eb6d3b55-7df3-4945-af0f-2ef1bd12223a", "node_type": "1", "metadata": {"page_label": "4", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "42515c7003ad66d66479ef220df85cc0878119096f1c53b72d493f6b28f79bdf"}}, "hash": "cab86ca6821292ec51ba7986744fce1f1001afc5354e332479d7f14f331995d5", "text": "Formally, GAT has\nAttention GAT(s,t)=Softmax\n\u2200s\u2208N(t)\u0012\n\u00aea\u0010\nW Hl\u22121[t]\u2225W Hl\u22121[s]\u0011\u0013\nMessageGAT(s)=W Hl\u22121[s]\nAggregateGAT(\u00b7)=\u03c3\u0010\nMean(\u00b7)\u0011\nThough GAT is effective to give high attention values to important\nnodes, it assumes that sandthave the same feature distributions by\nusing one weight matrix W. Such an assumption, as we\u2019ve discussed\nin Section 1, is usually incorrect for heterogeneous graphs, where\neach type of nodes can have its own feature distribution.\nIn view of this limitation, we design the Heterogeneous Mu-\ntual Attention mechanism. Given a target node t, and all its neigh-\nbors s\u2208N(t), which might belong to different distributions, we\nwant to calculate their mutual attention grounded by their meta\nrelations , i.e., the\u27e8\u03c4(s),\u03d5(e),\u03c4(t)\u27e9triplets.\nInspired by the architecture design of Transformer [ 21], we map\ntarget node tinto a Query vector, and source node sinto a Key vec-\ntor, and calculate their dot product as attention. The key difference\nis that the vanilla Transformer uses a single set of projections for all\nwords, while in our case each meta relation should have a distinct\nset of projection weights. To maximize parameter sharing while\nstill maintaining the specific characteristics of different relations,we propose to parameterize the weight matrices of the interac-\ntion operators into a source node projection, an edge projection,\nand a target node projection.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "eb6d3b55-7df3-4945-af0f-2ef1bd12223a": {"__data__": {"id_": "eb6d3b55-7df3-4945-af0f-2ef1bd12223a", "embedding": null, "metadata": {"page_label": "4", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "859b3475-3d01-499b-9e73-8c43546ea104", "node_type": "4", "metadata": {"page_label": "4", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "8277539a9bb2bc291202182cd75f19bcdff308635b52308fc69e837d85013297"}, "2": {"node_id": "5bd87cc3-41e4-41fc-bf4f-24d867b3af6a", "node_type": "1", "metadata": {"page_label": "4", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "cab86ca6821292ec51ba7986744fce1f1001afc5354e332479d7f14f331995d5"}, "3": {"node_id": "8c9b8955-6862-41f4-b727-0e4ed0876bc1", "node_type": "1", "metadata": {"page_label": "4", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "1249f31ab9a18e116dead7b11d197698bfc1e0dfa00ee2183eaa1d7fb1465460"}}, "hash": "42515c7003ad66d66479ef220df85cc0878119096f1c53b72d493f6b28f79bdf", "text": "Specifically, we calculate the h-head\nattention for each edge e=(s,t)(See Figure 2 (1)) by:\nAttention HGT(s,e,t)=Softmax\n\u2200s\u2208N(t)\u0010\n\u2225\ni\u2208[1,h]ATT-headi(s,e,t)\u0011\n(3)\nATT-headi(s,e,t)=\u0010\nKi(s)WATT\n\u03d5(e)Qi(t)T\u0011\n\u00b7\u00b5\u27e8\u03c4(s),\u03d5(e),\u03c4(t)\u27e9\u221a\nd\nKi(s)=K-Lineari\n\u03c4(s)\u0010\nH(l\u22121)[s]\u0011\nQi(t)=Q-Lineari\n\u03c4(t)\u0010\nH(l\u22121)[t]\u0011\nFirst, for the i-th attention head ATT-headi(s,e,t), we project the\n\u03c4(s)-type source node sinto the i-thKeyvector Ki(s)with a linear\nprojection K-Lineari\n\u03c4(s):Rd\u2192Rd\nh, where his the number of\nattention heads andd\nhis the vector dimension per head. Note that\nK-Lineari\n\u03c4(s)is indexed by the source node s\u2019s type\u03c4(s), meaning\nthat each type of nodes has a unique linear projection to maximally\nmodel the distribution differences. Similarly, we also project the\ntarget node twith a linear projection Q-Lineari\n\u03c4(t)into the i\u2212th\nQuery vector.\nNext, we need to calculate the similarity between the Query\nvector Qi(t)and Key vector Ki(s). One unique characteristic of\nheterogeneous graphs is that there may exist different edge types\n(relations) between a node type pair, e.g., \u03c4(s)and\u03c4(t).", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "8c9b8955-6862-41f4-b727-0e4ed0876bc1": {"__data__": {"id_": "8c9b8955-6862-41f4-b727-0e4ed0876bc1", "embedding": null, "metadata": {"page_label": "4", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "859b3475-3d01-499b-9e73-8c43546ea104", "node_type": "4", "metadata": {"page_label": "4", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "8277539a9bb2bc291202182cd75f19bcdff308635b52308fc69e837d85013297"}, "2": {"node_id": "eb6d3b55-7df3-4945-af0f-2ef1bd12223a", "node_type": "1", "metadata": {"page_label": "4", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "42515c7003ad66d66479ef220df85cc0878119096f1c53b72d493f6b28f79bdf"}}, "hash": "1249f31ab9a18e116dead7b11d197698bfc1e0dfa00ee2183eaa1d7fb1465460", "text": "Therefore,\nunlike the vanilla Transformer that directly calculates the dot prod-\nuct between the Query and Key vectors, we keep a distinct edge-\nbased matrix WATT\n\u03d5(e)\u2208Rd\nh\u00d7d\nhfor each edge type \u03d5(e). In doing so,\nthe model can capture different semantic relations even between", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "48643cd6-43bc-4667-bc82-e8bd01a20fbe": {"__data__": {"id_": "48643cd6-43bc-4667-bc82-e8bd01a20fbe", "embedding": null, "metadata": {"page_label": "5", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "40514f02-0035-4454-bc49-ddb28d097eee", "node_type": "4", "metadata": {"page_label": "5", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "864598e743cc8b86a13d888c7f1fe9f382b30358a7cb9f69741e3d9ed7e533bf"}, "3": {"node_id": "fa0e90cb-9ff0-4ef5-b097-09d1e3ee43e6", "node_type": "1", "metadata": {"page_label": "5", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "7fc1518376332e4eacfbac8512679727d105e83caf0b83de897c345f364096ee"}}, "hash": "139abb98a7fbd0b1845dbe5dce70a51e7c5592e4e013b4060e1fed9771d5b80b", "text": "Heterogeneous Graph Transformer WWW \u201920, April 20\u201324, 2020, Taipei, Taiwan\nthe same node type pairs. Moreover, since not all the relation-\nships contribute equally to the target nodes, we add a prior tensor\n\u00b5\u2208R|A|\u00d7|R|\u00d7|A|to denote the general significance of each meta\nrelation triplet, serving as an adaptive scaling to the attention.\nFinally, we concatenate hattention heads together to get the\nattention vector for each node pair. Then, for each target node t,\nwe gather all attention vectors from its neighbors N(t)and conduct\nsoftmax, making it fulfill\u00cd\n\u2200s\u2208N(t)Attention HGT(s,e,t)=1h\u00d71.\n3.3 Heterogeneous Message Passing\nParallel to the calculation of mutual attention, we pass informa-\ntion from source nodes to target nodes (See Figure 2 (2)). Similar\nto the attention process, we would like to incorporate the meta\nrelations of edges into the message passing process to alleviate the\ndistribution differences of nodes and edges of different types. For a\npair of nodes e=(s,t), we calculate its multi-head Message by:\nMessageHGT(s,e,t)=\u2225\ni\u2208[1,h]MSG -headi(s,e,t) (4)\nMSG -headi(s,e,t)=M-Lineari\n\u03c4(s)\u0010\nH(l\u22121)[s]\u0011\nWMSG\n\u03d5(e)\nTo get the i-th message head MSG -headi(s,e,t), we first project the\n\u03c4(s)-type source node sinto the i-th message vector with a linear\nprojection M-Lineari\n\u03c4(s):Rd\u2192Rd\nh. It is then followed by a matrix\nWMSG\n\u03d5(e)\u2208Rd\nh\u00d7d\nhfor incorporating the edge dependency.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "fa0e90cb-9ff0-4ef5-b097-09d1e3ee43e6": {"__data__": {"id_": "fa0e90cb-9ff0-4ef5-b097-09d1e3ee43e6", "embedding": null, "metadata": {"page_label": "5", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "40514f02-0035-4454-bc49-ddb28d097eee", "node_type": "4", "metadata": {"page_label": "5", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "864598e743cc8b86a13d888c7f1fe9f382b30358a7cb9f69741e3d9ed7e533bf"}, "2": {"node_id": "48643cd6-43bc-4667-bc82-e8bd01a20fbe", "node_type": "1", "metadata": {"page_label": "5", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "139abb98a7fbd0b1845dbe5dce70a51e7c5592e4e013b4060e1fed9771d5b80b"}, "3": {"node_id": "f516eb1f-209f-440e-90bc-fb7242626f8e", "node_type": "1", "metadata": {"page_label": "5", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "77e43f3ce8d665a8cc3def596bbf8f11a6c86e4c19b86a44b2273c3f6d491941"}}, "hash": "7fc1518376332e4eacfbac8512679727d105e83caf0b83de897c345f364096ee", "text": "The final\nstep is to concat all hmessage heads to get the MessageHGT(s,e,t)\nfor each node pair.\n3.4 Target-Specific Aggregation\nWith the heterogeneous multi-head attention and message cal-\nculated, we need to aggregate them from the source nodes to the\ntarget node (See Figure 2 (3)). Note that the softmax procedure in\nEq. 3 has made the sum of each target node t\u2019s attention vectors\nto one, we can thus simply use the attention vector as the weight\nto average the corresponding messages from the source nodes and\nget the updated vector eH(l)[t]as:\neH(l)[t]=\u2295\n\u2200s\u2208N(t)\u0010\nAttention HGT(s,e,t)\u00b7MessageHGT(s,e,t)\u0011\n.\nThis aggregates information to the target node tfrom all its neigh-\nbors (source nodes) of different feature distributions.\nThe final step is to map target node t\u2019s vector back to its type-\nspecific distribution, indexed by its node type \u03c4(t). To do so, we\napply a linear projection A-Linear \u03c4(t)to the updated vector eH(l)[t],\nfollowed by residual connection [8] as:\nH(l)[t]=A-Linear \u03c4(t)\u0010\n\u03c3\u0000eH(l)[t]\u0001\u0011\n+H(l\u22121)[t]. (5)\nIn this way, we get the l-th HGT layer\u2019s output H(l)[t]for the target\nnode t. Due to the \u201csmall-world\u201d property of real-world graphs,\nstacking the HGT blocks for Llayers ( Lbeing a small value) can\nenable each node reaching a large proportion of nodes\u2014with differ-\nent types and relations\u2014in the full graph. That is, HGT generates\na highly contextualized representation H(L)for each node, which\ncan be fed into any models to conduct downstream heterogeneous\nnetwork tasks, such as node classification and link prediction.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "f516eb1f-209f-440e-90bc-fb7242626f8e": {"__data__": {"id_": "f516eb1f-209f-440e-90bc-fb7242626f8e", "embedding": null, "metadata": {"page_label": "5", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "40514f02-0035-4454-bc49-ddb28d097eee", "node_type": "4", "metadata": {"page_label": "5", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "864598e743cc8b86a13d888c7f1fe9f382b30358a7cb9f69741e3d9ed7e533bf"}, "2": {"node_id": "fa0e90cb-9ff0-4ef5-b097-09d1e3ee43e6", "node_type": "1", "metadata": {"page_label": "5", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "7fc1518376332e4eacfbac8512679727d105e83caf0b83de897c345f364096ee"}, "3": {"node_id": "fa3ee10b-cd66-434d-8184-baeb41614874", "node_type": "1", "metadata": {"page_label": "5", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "9515212925dd815cae0ed80b972ef0046da39d417b2e2e339bc6625ed9da2499"}}, "hash": "77e43f3ce8d665a8cc3def596bbf8f11a6c86e4c19b86a44b2273c3f6d491941", "text": "Figure 3: Relative Temporal Encoding (RTE) to model graph\ndynamic. Nodes are associated with timestamps T(\u00b7). After the\nRTE process, the temporal augmented representations are fed to\nthe HGT model.\nThrough the whole model architecture, we highly rely on using\nthemeta relation \u2014\u27e8\u03c4(s),\u03d5(e),\u03c4(t)\u27e9\u2014to parameterize the weight\nmatrices separately. This can be interpreted as a trade-off between\nthe model capacity and efficiency. Compared with the vanilla Trans-\nformer, our model distinguishes the operators for different relations\nand thus is more capable to handle the distribution differences in\nheterogeneous graphs. Compared with existing models that keep a\ndistinct matrix for each meta relation as a whole, HGT\u2019s triplet pa-\nrameterization can better leverage the heterogeneous graph schema\nto achieve parameter sharing. On one hand, relations with few oc-\ncurrences can benefit from such parameter sharing for fast adapta-\ntion and generalization. On the other hand, different relationships\u2019\noperators can still maintain their specific characteristics by using a\nmuch smaller parameter set.\n3.5 Relative Temporal Encoding\nBy far, we present HGT\u2014a graph neural network for modeling\nheterogeneous graphs. Next, we introduce the Relative Temporal\nEncoding (RTE) technique for HGT to handle graph dynamic.\nThe traditional way to incorporate temporal information is to\nconstruct a separate graph for each time slot. However, such a pro-\ncedure may lose a large portion of structural dependencies across\ndifferent time slots. Meanwhile, the representation of a node at\ntime tmight rely on edges that happen at other time slots. There-\nfore, a proper way to model dynamic graphs is to maintain all the\nedges happening at different times and allow nodes and edges with\ndifferent timestamps to interact with each other.\nIn light of this, we propose the Relative Temporal Encoding\n(RTE) mechanism to model the dynamic dependencies in heteroge-\nneous graphs.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "fa3ee10b-cd66-434d-8184-baeb41614874": {"__data__": {"id_": "fa3ee10b-cd66-434d-8184-baeb41614874", "embedding": null, "metadata": {"page_label": "5", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "40514f02-0035-4454-bc49-ddb28d097eee", "node_type": "4", "metadata": {"page_label": "5", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "864598e743cc8b86a13d888c7f1fe9f382b30358a7cb9f69741e3d9ed7e533bf"}, "2": {"node_id": "f516eb1f-209f-440e-90bc-fb7242626f8e", "node_type": "1", "metadata": {"page_label": "5", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "77e43f3ce8d665a8cc3def596bbf8f11a6c86e4c19b86a44b2273c3f6d491941"}}, "hash": "9515212925dd815cae0ed80b972ef0046da39d417b2e2e339bc6625ed9da2499", "text": "RTE is inspired by Transformer\u2019s positional encoding\nmethod [ 15,21], which has been shown successful to capture the\nsequential dependencies of words in long texts.\nSpecifically, given a source node sand a target node t, along\nwith their corresponding timestamps T(s)andT(t), we denote the\nrelative time gap \u2206T(t,s)=T(t)\u2212T(s)as an index to get a relative\ntemporal encoding RT E(\u2206T(t,s)). Noted that the training dataset", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "e611770c-8153-4e7a-b7e4-9bb00a639a23": {"__data__": {"id_": "e611770c-8153-4e7a-b7e4-9bb00a639a23", "embedding": null, "metadata": {"page_label": "6", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "610d896b-01cd-43ed-a0e6-3847712913a6", "node_type": "4", "metadata": {"page_label": "6", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "c14210eb722eadddb07bb607a0a10354c9147de1afbed69ef7eebfb20d641da7"}, "3": {"node_id": "ecc7d6b0-a501-4eba-856e-8986c908bc51", "node_type": "1", "metadata": {"page_label": "6", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "e650c9aacc7b4d589bf819777a111711381969ad1592671fd34dd4f868444d87"}}, "hash": "4464b0a75435307c8c066ad529aae5d6ed37d25cc1182b2d0069cfc7bb0e74ae", "text": "WWW \u201920, April 20\u201324, 2020, Taipei, Taiwan Ziniu Hu, Yuxiao Dong, Kuansan Wang, and Yizhou Sun\nwill not cover all possible time gaps, and thus RT E should be capable\nof generalizing to unseen times and time gaps. Therefore, we adopt\na fixed set of sinusoid functions as basis, with a tunable linear\nprojection T-Linear\u2217:Rd\u2192RdasRT E:\nBase\u0000\u2206T(t,s),2i\u0001=sin\u0010\n\u2206Tt,s/100002i\nd\u0011\n(6)\nBase\u0000\u2206T(t,s),2i+1\u0001=cos\u0010\n\u2206Tt,s/100002i+1\nd\u0011\n(7)\nRT E\u0000\u2206T(t,s)\u0001=T-Linear\u0010\nBase(\u2206Tt,s)\u0011\n(8)\nFinally, the temporal encoding relative to the target node tis added\nto the source node s\u2019 representation as follows:\nbH(l\u22121)[s]=H(l\u22121)[s]+RT E\u0000\u2206T(t,s)\u0001(9)\nIn this way, the temporal augmented representation bH(l\u22121)will\ncapture the relative temporal information of source node sand\ntarget node t. The RTE procedure is illustrated in the Figure 3.\n4 WEB-SCALE HGT TRAINING\nIn this section, we present HGT\u2019s strategies for training Web-\nscale heterogeneous graphs with dynamic information, including\nan efficient Heterogeneous Mini-Batch Graph Sampling algorithm\u2014\nHGSampling\u2014and an inductive timestamp assignment method.\n4.1 HGSampling\nThe full-batch GNN [ 9] training requires the calculation of all\nnode representations per layer, making it not scalable for Web-scale\ngraphs. To address this issue, different sampling-based methods [ 1,\n2,7,29] have been proposed to train GNNs on a subset of nodes.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "ecc7d6b0-a501-4eba-856e-8986c908bc51": {"__data__": {"id_": "ecc7d6b0-a501-4eba-856e-8986c908bc51", "embedding": null, "metadata": {"page_label": "6", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "610d896b-01cd-43ed-a0e6-3847712913a6", "node_type": "4", "metadata": {"page_label": "6", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "c14210eb722eadddb07bb607a0a10354c9147de1afbed69ef7eebfb20d641da7"}, "2": {"node_id": "e611770c-8153-4e7a-b7e4-9bb00a639a23", "node_type": "1", "metadata": {"page_label": "6", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "4464b0a75435307c8c066ad529aae5d6ed37d25cc1182b2d0069cfc7bb0e74ae"}, "3": {"node_id": "901519b1-7622-49a8-9330-7c8742988373", "node_type": "1", "metadata": {"page_label": "6", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "2b404ffc1a06cb0292ef35c9f7ec9f4db462ee3686090495b0846ab85a05b235"}}, "hash": "e650c9aacc7b4d589bf819777a111711381969ad1592671fd34dd4f868444d87", "text": "However, directly using them for heterogeneous graphs is prone to\nget sub-graphs that are extremely imbalanced regarding different\nnode types, due to that the degree distribution and the total number\nof nodes for each type can vary dramatically.\nTo address this issue, we propose an efficient Heterogeneous\nMini-Batch Graph Sampling algorithm\u2014HGSampling\u2014to enable\nboth HGT and traditional GNNs to handle Web-scale heterogeneous\ngraphs. HGSampling is able to 1) keep a similar number of nodes\nand edges for each type and 2) keep the sampled sub-graph dense\nto minimize the information loss and reduce the sample variance.\nAlgorithm 1 outlines the HGSampling algorithm. Its basic idea\nis to keep a separate node budget B[\u03c4]for each node type \u03c4and\nto sample an equal number of nodes per type with an importance\nsampling strategy to reduce variance. Given node talready sampled,\nwe add all its direct neighbors into the corresponding budget with\nAlgorithm 2, and add t\u2019s normalized degree to these neighbors in\nline 8, which will then be used to calculate the sampling probability.\nSuch normalization is equivalent to accumulate the random walk\nprobability of each sampled node to its neighborhood, avoiding the\nsampling being dominated by high-degree nodes. Intuitively, the\nhigher such value is, the more a candidate node is correlated with\nthe currently sampled nodes, and thus should be given a higher\nprobability to be sampled.\n\u2217For simplicity, we denote a linear projection L :Ra\u2192Rbas a function to conduct\nlinear transformation to vector x\u2208Raas: L(x)=W x+b, where matrix W\u2208Ra+b\nand bias b\u2208Rb.Wandbare learnable parameters for L.Algorithm 1 Heterogeneous Mini-Batch Graph Sampling\nRequire: Adjacency matrix Afor each\u27e8\u03c4(s),\u03d5(e),\u03c4(t)\u27e9relation\npair; Output node Set OS; Sample number nper node type;\nSample depth L.\nEnsure: Sampled node set NS; Sampled adjacency matrix \u02c6A.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "901519b1-7622-49a8-9330-7c8742988373": {"__data__": {"id_": "901519b1-7622-49a8-9330-7c8742988373", "embedding": null, "metadata": {"page_label": "6", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "610d896b-01cd-43ed-a0e6-3847712913a6", "node_type": "4", "metadata": {"page_label": "6", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "c14210eb722eadddb07bb607a0a10354c9147de1afbed69ef7eebfb20d641da7"}, "2": {"node_id": "ecc7d6b0-a501-4eba-856e-8986c908bc51", "node_type": "1", "metadata": {"page_label": "6", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "e650c9aacc7b4d589bf819777a111711381969ad1592671fd34dd4f868444d87"}, "3": {"node_id": "964a0eb7-9428-4dd4-9e99-7c1c305368b7", "node_type": "1", "metadata": {"page_label": "6", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "820661108183802aacbfa5a36aec905c9716d357e8d203bc050c58a2dddd3eb0"}}, "hash": "2b404ffc1a06cb0292ef35c9f7ec9f4db462ee3686090495b0846ab85a05b235", "text": "1:NS\u2190OS// Initialize sampled node set as output node set.\n2:Initialize an empty Budget Bstoring nodes for each node type\nwith normalized degree.\n3:fort\u2208NSdo\n4: Add-In-Budget( B,t,A,NS) // Add neighbors of ttoB.\n5:end for\n6:forl\u21901toLdo\n7:forsource node type \u03c4\u2208Bdo\n8: forsource node s\u2208B[\u03c4]do\n9: prob(l\u22121)[\u03c4][s]\u2190B[\u03c4][s]2\n\u2225B[\u03c4]\u22252\n2// Calculate sampling prob-\nability for each source node sof node type \u03c4.\n10: end for\n11: Sample nnodes{ti}n\ni=1from B[\u03c4]using prob(l\u22121)[\u03c4].\n12: fort\u2208{ti}n\ni=1do\n13: OS[\u03c4].add(t)// Add node tinto Output node set.\n14: Add-In-Budget( B,t,A,NS) // Add neighbors of ttoB.\n15: B[\u03c4].pop(t)// Remove sampled node tfrom Budget.\n16: end for\n17: end for\n18:end for\n19:Reconstruct the sampled adjacency matrix \u02c6Aamong the sam-\npled nodes OSfrom A.\n20:return OSand \u02c6A;\nAfter the budget is updated, we then calculate the sampling\nprobability in Algorithm 1 line 9, where we calculate the square of\nthe cumulative normalized degree of each node sin each budget.\nAs proved in [ 29], using such sampling probability can reduce the\nsampling variance. Then, we sample nnodes in type \u03c4by using the\ncalculated probability, add them into the output node set, update\nits neighborhood to the budget, and remove it out of the budget\nin lines 12\u201315. Repeating such procedure for Ltimes, we get a\nsampled sub-graph with Ldepth from the initial nodes.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "964a0eb7-9428-4dd4-9e99-7c1c305368b7": {"__data__": {"id_": "964a0eb7-9428-4dd4-9e99-7c1c305368b7", "embedding": null, "metadata": {"page_label": "6", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "610d896b-01cd-43ed-a0e6-3847712913a6", "node_type": "4", "metadata": {"page_label": "6", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "c14210eb722eadddb07bb607a0a10354c9147de1afbed69ef7eebfb20d641da7"}, "2": {"node_id": "901519b1-7622-49a8-9330-7c8742988373", "node_type": "1", "metadata": {"page_label": "6", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "2b404ffc1a06cb0292ef35c9f7ec9f4db462ee3686090495b0846ab85a05b235"}}, "hash": "820661108183802aacbfa5a36aec905c9716d357e8d203bc050c58a2dddd3eb0", "text": "Finally, we\nreconstruct the adjacency matrix among the sampled nodes. By\nusing the above algorithm, the sampled sub-graph contains a similar\nnumber of nodes per type (based on the separate node budget), and\nis sufficiently dense to reduce the sampling variance (based on the\nnormalized degree and importance sampling), making it suitable\nfor training GNNs on Web-scale heterogeneous graphs.\n4.2 Inductive Timestamp Assignment\nTill now we have assumed that each node tis assigned with\na timestamp T(t). However, in real-world heterogeneous graphs,\nmany nodes are not associated with a fixed time. Therefore, we\nneed to assign different timestamps to it. We denote these nodes as\nplain nodes. For example, the WWW conference is held in both 1974\nand 2019, and the WWW node in these two years has dramatically\ndifferent research topics. Consequently, we need to decide which\ntimestamp(s) to attach to the WWW node.", "start_char_idx": null, "end_char_idx": null, "text_template": "{metadata_str}\n\n{content}", "metadata_template": "{key}: {value}", "metadata_seperator": "\n"}, "__type__": "1"}, "e87ef8c8-9326-45ce-b9c3-6cee2f3657cc": {"__data__": {"id_": "e87ef8c8-9326-45ce-b9c3-6cee2f3657cc", "embedding": null, "metadata": {"page_label": "7", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "excluded_embed_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "excluded_llm_metadata_keys": ["creation_date", "last_modified_date", "last_accessed_date"], "relationships": {"1": {"node_id": "be623874-dc7e-4bcd-8475-12c168009c09", "node_type": "4", "metadata": {"page_label": "7", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "c2df2dcb54ce0499daf86ff9549dbd4f0bbdb90573278a8d17f7b8b45eb9301f"}, "3": {"node_id": "8d443c3d-7a59-44d4-8805-914e661ee2f0", "node_type": "1", "metadata": {"page_label": "7", "file_name": "Heterogeneous Graph Transformer.pdf", "file_path": "docs\\Heterogeneous Graph Transformer.pdf", "creation_date": "2023-11-06", "last_modified_date": "2023-10-20", "last_accessed_date": "2023-11-30"}, "hash": "8c8529ad7f2f2821e38d32d2fce11a0dfadb81776fa1eedeb00d3ce4bbcc268f"}}, "hash": "f580fe8c798ceb3aed542a5e5c00d693126e0efb6980d508421ff0098859cb2c", "text": "Heterogeneous Graph Transformer WWW \u201920, April 20\u201324, 2020, Taipei, Taiwan\nFigure 4: HGSampling with Inductive Timestamp Assignment.\nAlgorithm 2 Add-In-Budget\nRequire: Budget Bstoring nodes for each type with normal-\nized degree; Added node t; Adjacency matrix Afor each\n\u27e8\u03c4(s),\u03d5(e),\u03c4(t)\u27e9relation pair; Sampled node set NS.\nEnsure: Updated Budget B.\n1:foreach possible source node type \u03c4and edge type \u03d5do\n2: \u02c6Dt\u21901/len\u0010\nA\u27e8\u03c4,\u03d5,\u03c4(t)\u27e9[t]\u0011\n// get normalized degree of\nadded node tregarding to\u27e8\u03c4,\u03d5,\u03c4(t)\u27e9.\n3:forsource node sinA\u27e8\u03c4,\u03d5,\u03c4(t)\u27e9[t]do\n4: ifshas not been sampled ( s