<template>
  <div class="container mt-5">
    <div class="post">
      <header class="post-header">
        <h1 class="post-title">Students</h1>
        <p class="post-description"></p>
      </header>
      <article>
        <h4 id="interns-at-msra">Interns at MSRA</h4>
        <p>Current interns:</p>
        <ul>
          <li>
            2023.05 – present,
            <a href="#" target="_blank" rel="noopener noreferrer">Hao Chen</a>,
            PhD @ Carnegie Mellon University.
          </li>
          <li>
            2023.03 – present, Kaijie Zhu, Master @ Institute of Automation,
            CAS.
          </li>
        </ul>
        <p>Alumni:</p>
        <ul>
          <li>2023.03 – 2023.04, Lu Tan, Master @ Tsinghua University.</li>
          <li>
            2022.10 – 2023.03,
            <a href="#" target="_blank" rel="noopener noreferrer">Xixu Hu</a>,
            Ph.D @ City University of Hong Kong.
          </li>
          <li>
            2022.07 – 2023.03,
            <a href="#" target="_blank" rel="noopener noreferrer"
              >Runkai Zheng</a
            >, Master @ Chinese University of Hong Kong (Shenzhen).
          </li>
          <li>
            2021.11 – 2022.10,
            <a href="#" target="_blank" rel="noopener noreferrer">Yidong Wang</a
            >, Master @ Tokyo Institute of Technology. Now: Ph.D in PKU. [<a
              href="#"
              target="_blank"
              rel="noopener noreferrer"
              >MSRA official blog</a
            >]
            <ul>
              <li>Topics: semi-supervised learning, long-tail learning.</li>
              <li>
                Publications during internship: NeurIPS’22, ACML’22, COLING’22
              </li>
            </ul>
          </li>
          <li>
            2021.06 – 2021.11,
            <a href="#" target="_blank" rel="noopener noreferrer">Wang Lu</a>,
            Ph.D @ ICT, Chinese Academy of Sciences. Now: continue his Ph.D in
            ICT.
            <ul>
              <li>
                Topics: domain generalization, federated learning, transfer
                learning.
              </li>
              <li>
                Publications during internship: TKDE’22, TMLR’22, Ubicomp’22,
                IEEE TBD’22, ICASSP’22, IJCAI’22 workshop.
              </li>
            </ul>
          </li>
          <li>
            2020.12 – 2021.05,
            <a href="#" target="_blank" rel="noopener noreferrer">Wenxin Hou</a
            >, Master @ Tokyo Institute of Technology. Now: SDE at Microsoft.
            <ul>
              <li>Topics: speech recognition, semi-supervised learning.</li>
              <li>
                Publications during internship: NeurIPS’21, TASLP’22,
                Interspeech’21.
              </li>
            </ul>
          </li>
          <li>
            2020.10 – 2020.11, Danni Li, Bachelor @ City University of Hong
            Kong. Now: Amazon.
          </li>
          <li>
            2020.05 – 2020.09, Yuntao Du, Ph.D @ Nanjing University. Now:
            continue his Ph.D in NJU.
            <ul>
              <li>Topics: domain adaptation, time series analysis.</li>
              <li>Publications during internship: CIKM’21.</li>
            </ul>
          </li>
          <li>
            2019.10 – 2020.01, Weixin Lu, Bachelor @ Peking University. Now:
            Master @ New York University.
          </li>
        </ul>
        <h4 id="collaborating-students">Collaborating students</h4>
        <ul>
          <li>Ph.D students at ICT, CAS: Xin Qin, Wang Lu, Yuxin Zhang.</li>
          <li>Master student at Tsinghua University: Lu Tan.</li>
          <li>Ph.D student at Institute of Acoustics, CAS: Han Zhu.</li>
          <li>Ph.D student at Carnegie Mellon University: Hao Chen.</li>
          <li>
            Master/Ph.D students at Institute of Automation, CAS: YiFan Zhang,
            Kaijie Zhu.
          </li>
          <li>Ph.D student at University of Tokyo: Yivan Zhang.</li>
        </ul>
      </article>
    </div>
  </div>
</template>

<script>
export default {};
</script>

<style>
</style>