<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml">
  <head>

    <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
    <meta content="Cask Data, Inc." name="author" />
<meta content="Copyright © 2014-2017 Cask Data, Inc." name="copyright" />


    <meta name="git_release" content="6.1.1">
    <meta name="git_hash" content="05fbac36f9f7aadeb44f5728cea35136dbc243e5">
    <meta name="git_timestamp" content="2020-02-09 08:22:47 +0800">
    <title>Partitioned FileSet</title>

    <link rel="stylesheet" href="../../_static/cdap-bootstrap.css" type="text/css" />
    <link rel="stylesheet" href="../../_static/pygments.css" type="text/css" />
    <link rel="stylesheet" href="../../_static/bootstrap-3.3.6/css/bootstrap.min.css" type="text/css" />
    <link rel="stylesheet" href="../../_static/bootstrap-3.3.6/css/bootstrap-theme.min.css" type="text/css" />
    <link rel="stylesheet" href="../../_static/css/bootstrap-sphinx.css" type="text/css" />
    <link rel="stylesheet" href="../../_static/css/cdap-dynamicscrollspy-4.css" type="text/css" />
    <link rel="stylesheet" href="../../_static/css/jquery.mCustomScrollbar.css" type="text/css" />
    <link rel="stylesheet" href="../../_static/css/cdap-jquery.mCustomScrollbar.css" type="text/css" />
    <link rel="stylesheet" href="../../_static/css/abixTreeList-2.css" type="text/css" />
    <link rel="stylesheet" href="../../_static/cdap-bootstrap.css" type="text/css" />

    <script type="text/javascript">
      var DOCUMENTATION_OPTIONS = {
        URL_ROOT:    '',
        VERSION:     '6.1.1',
        COLLAPSE_INDEX: false,
        FILE_SUFFIX: '.html',
        HAS_SOURCE:  false
      };
    </script>
    <script type="text/javascript" src="../../_static/jquery.js"></script>
    <script type="text/javascript" src="../../_static/underscore.js"></script>
    <script type="text/javascript" src="../../_static/doctools.js"></script>
    <script type="text/javascript" src="../../_static/language_data.js"></script>

    <link rel="shortcut icon" href="../../_static/favicon.ico"/>
    <link rel="index" title="Index" href="../../genindex.html" />
    <link rel="search" title="Search" href="../../search.html" />
    <link rel="top" title="Cask Data Application Platform 6.1.1 Documentation" href="../../index.html" />
    <link rel="up" title="Datasets" href="index.html" />
    <link rel="next" title="TimePartitioned FileSet" href="time-partitioned-fileset.html" />
    <link rel="prev" title="FileSet Dataset" href="fileset.html" />
    <!-- block extrahead -->
    <meta charset='utf-8'>
    <meta http-equiv='X-UA-Compatible' content='IE=edge,chrome=1'>
    <meta name='viewport' content='width=device-width, initial-scale=1.0, maximum-scale=1'>
    <meta name="apple-mobile-web-app-capable" content="yes">
    <!-- block extrahead end -->

</head>
<body role="document">

<!-- block navbar -->
<div id="navbar" class="navbar navbar-inverse navbar-default navbar-fixed-top">
    <div class="container-fluid">
      <div class="row">
        <div class="navbar-header">
          <!-- .btn-navbar is used as the toggle for collapsed navbar content -->
          <a class="navbar-brand" href="../../table-of-contents/../../index.html">
            <span><img alt="CDAP logo" src="../../_static/cdap_logo.svg"/></span>
          </a>

          <button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".nav-collapse">
            <span class="icon-bar"></span>
            <span class="icon-bar"></span>
            <span class="icon-bar"></span>
          </button>

          <div class="pull-right">
            <div class="dropdown version-dropdown">
              <a href="#" class="dropdown-toggle" data-toggle="dropdown"
                role="button" aria-haspopup="true" aria-expanded="false">
                v 6.1.1 <span class="caret"></span>
              </a>
              <ul class="dropdown-menu">
                <li><a href="//docs.cdap.io/cdap/5.1.2/en/index.html">v 5.1.2</a></li>
                <li><a href="//docs.cdap.io/cdap/4.3.4/en/index.html">v 4.3.4</a></li>
              </ul>
            </div>
          </div>
          <form class="navbar-form navbar-right navbar-search" action="../../search.html" method="get">
            <div class="form-group">
              <div class="navbar-search-image material-icons"></div>
              <input type="text" name="q" class="form-control" placeholder="  Search" />
            </div>
            <input type="hidden" name="check_keywords" value="yes" />
            <input type="hidden" name="area" value="default" />
          </form>

          <div class="collapse navbar-collapse nav-collapse navbar-right navbar-navigation">
            <ul class="nav navbar-nav"><li class="docsite-nav-tab-container"><a class="docsite-nav-tab-link " href="../../table-of-contents/../../index.html">简介</a></li><li class="docsite-nav-tab-container"><a class="docsite-nav-tab-link current" href="../../table-of-contents/../../guides.html">手册</a></li><li class="docsite-nav-tab-container"><a class="docsite-nav-tab-link " href="../../table-of-contents/../../reference-manual/index.html">参考</a></li><li class="docsite-nav-tab-container"><a class="docsite-nav-tab-link " href="../../table-of-contents/../../faqs/index.html">帮助</a></li>
            </ul>
          </div>

        </div>
      </div>
    </div>
  </div><!-- block navbar end -->
<!-- block main content -->
<div class="main-container container">
  <div class="row"><div class="col-md-2">
      <div id="sidebar" class="bs-sidenav scrollable-y-outside" role="complementary">
<!-- theme_manual: developer-manual -->
<!-- theme_manual_highlight: guides -->
<!-- sidebar_title_link: ../../table-of-contents/../../guides.html -->

  <div role="note" aria-label="manuals links"><h3><a href="../../table-of-contents/../../guides.html">Guides</a></h3>

    <ul class="this-page-menu">
      <li class="toctree-l1"><a href="../../table-of-contents/../../user-guide/index.html" rel="nofollow">用户手册</a>
      </li>
      <li class="toctree-l1"><b><a href="../../table-of-contents/../../developer-manual/index.html" rel="nofollow">开发手册</a></b>
      <nav class="pagenav">
      <ul class="current">
<li class="toctree-l1"><a class="reference internal" href="../../index.html"> 简介</a></li>
<li class="toctree-l1"><a class="reference internal" href="../../getting-started/index.html"> 入门指南</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../getting-started/sandbox/index.html">CDAP Sandbox</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../getting-started/sandbox/zip.html">二进制 Zip 文件</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../getting-started/sandbox/zip.html#cdap-sandbox">启动和停止 CDAP Sandbox</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../getting-started/sandbox/virtual-machine.html">虚拟机镜像</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../getting-started/sandbox/docker.html">Docker 镜像</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../getting-started/quick-start.html">快速入门</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../getting-started/dev-env.html">搭建开发环境</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../getting-started/start-stop-cdap.html">启动和停止 CDAP</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../getting-started/building-apps.html">构建并运行应用</a></li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="../../overview/index.html"> 概述</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../overview/anatomy.html"> 大数据应用剖析</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../overview/modes.html"> 模式和组件</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../overview/abstractions.html"> 核心概念</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../overview/interfaces.html"> 编程接口</a></li>
</ul>
</li>
<li class="toctree-l1 current"><a class="reference internal" href="../index.html"> 抽象概念</a><ul class="current">
<li class="toctree-l2"><a class="reference internal" href="../core.html"> Core Abstractions</a></li>
<li class="toctree-l2"><a class="reference internal" href="../applications.html"> Applications</a></li>
<li class="toctree-l2 current"><a class="reference internal" href="index.html"> Datasets</a><ul class="current">
<li class="toctree-l3"><a class="reference internal" href="overview.html"> Overview</a></li>
<li class="toctree-l3"><a class="reference internal" href="table.html"> Table API</a></li>
<li class="toctree-l3"><a class="reference internal" href="fileset.html"> FileSets</a></li>
<li class="toctree-l3 current"><a class="current reference internal" href="#"> Partitioned FileSets</a></li>
<li class="toctree-l3"><a class="reference internal" href="time-partitioned-fileset.html"> TimePartitioned FileSets</a></li>
<li class="toctree-l3"><a class="reference internal" href="system-custom.html"> System and Custom Datasets</a></li>
<li class="toctree-l3"><a class="reference internal" href="permissions.html"> Dataset Permissions</a></li>
<li class="toctree-l3"><a class="reference internal" href="cube.html"> Cube Dataset</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../mapreduce-programs.html"> MapReduce Programs</a></li>
<li class="toctree-l2"><a class="reference internal" href="../plugins.html"> Plugins</a></li>
<li class="toctree-l2"><a class="reference internal" href="../schedules.html"> Schedules</a></li>
<li class="toctree-l2"><a class="reference internal" href="../secure-keys.html"> Secure Keys</a></li>
<li class="toctree-l2"><a class="reference internal" href="../services.html"> Services</a></li>
<li class="toctree-l2"><a class="reference internal" href="../spark-programs.html"> Spark Programs</a></li>
<li class="toctree-l2"><a class="reference internal" href="../workers.html"> Workers</a></li>
<li class="toctree-l2"><a class="reference internal" href="../workflows.html"> Workflows</a></li>
<li class="toctree-l2"><a class="reference internal" href="../artifacts.html"> Artifacts</a></li>
<li class="toctree-l2"><a class="reference internal" href="../program-lifecycle.html"> Program Lifecycle</a></li>
<li class="toctree-l2"><a class="reference internal" href="../namespaces.html"> Namespaces</a></li>
<li class="toctree-l2"><a class="reference internal" href="../transaction-system.html"> Transaction System</a></li>
<li class="toctree-l2"><a class="reference internal" href="../transactional-messaging-system.html"> Transactional Messaging System</a></li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="../../metadata/index.html"> 元数据</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../metadata/system-metadata.html"> System Metadata</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../metadata/discovery-lineage.html"> Discovery and Lineage</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../metadata/field-lineage.html"> Field Level Lineage</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../metadata/audit-logging.html"> Audit Logging</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../metadata/metadata-ui.html"> CDAP Metadata UI</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../metadata/programmatic-metadata.html"> Accessing metadata programmatically</a></li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="../../pipelines/index.html"> 数据流管道</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../pipelines/concepts-design.html"> Concepts and Design</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../pipelines/getting-started.html"> Getting Started</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../pipelines/studio.html"> CDAP Studio</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../pipelines/creating-pipelines.html"> Creating Pipelines</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../pipelines/running-pipelines.html"> Running Pipelines</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../pipelines/plugin-management.html"> Plugin Management</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../pipelines/plugins/index.html"> Plugin Reference</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../pipelines/plugins/actions/index.html"> Action Plugins</a><ul class="simple">
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../pipelines/plugins/sources/index.html"> Source Plugins</a><ul class="simple">
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../pipelines/plugins/transforms/index.html"> Transform Plugins</a><ul class="simple">
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../pipelines/plugins/analytics/index.html"> Analytic Plugins</a><ul class="simple">
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../pipelines/plugins/sinks/index.html"> Sink Plugins</a><ul class="simple">
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../pipelines/plugins/shared-plugins/index.html"> Shared Plugins</a><ul>
<li class="toctree-l4"><a class="reference internal" href="../../pipelines/plugins/shared-plugins/core.html">CoreValidator</a></li>
</ul>
</li>
<li class="toctree-l3"><a class="reference internal" href="../../pipelines/plugins/post-run-plugins/index.html"> Post-run Plugins</a><ul class="simple">
</ul>
</li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../pipelines/developing-pipelines.html"> Developing Pipelines</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../pipelines/developing-plugins/index.html"> Developing Plugins</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../pipelines/developing-plugins/plugin-basics.html">Plugin Basics</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../pipelines/developing-plugins/creating-a-plugin.html">Creating a Plugin</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../pipelines/developing-plugins/presentation-plugins.html">Plugin Presentation</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../pipelines/developing-plugins/testing-plugins.html">Testing Plugins</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../pipelines/developing-plugins/packaging-plugins.html">Packaging Plugins</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../pipelines/how-cdap-pipelines-work.html"> How CDAP Pipelines Work</a></li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="../../cloud-runtimes/index.html"> 云平台运行</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../cloud-runtimes/concepts/index.html"> Concepts</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../cloud-runtimes/provisioners/index.html"> Provisioners</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../cloud-runtimes/provisioners/gcp-dataproc.html">Google Dataproc</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../cloud-runtimes/provisioners/aws-emr.html">Amazon Elastic MapReduce</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../cloud-runtimes/provisioners/remote-hadoop.html">Remote Hadoop</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../cloud-runtimes/profiles/index.html"> Profiles</a><ul>
<li class="toctree-l3"><a class="reference internal" href="../../cloud-runtimes/profiles/creating-profiles.html">Creating Profiles</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../cloud-runtimes/profiles/assigning-profiles.html">Assigning Profiles</a></li>
<li class="toctree-l3"><a class="reference internal" href="../../cloud-runtimes/profiles/admin-controls.html">Admin Controls</a></li>
</ul>
</li>
<li class="toctree-l2"><a class="reference internal" href="../../cloud-runtimes/example/index.html"> Example</a></li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="../../security/index.html"> 安全</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../security/client-authentication.html">Client Authentication</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../security/cdap-authentication-clients-java.html">CDAP Authentication Client for Java</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../security/cdap-authentication-clients-python.html">CDAP Authentication Client for Python</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../security/custom-authentication.html">Custom Authentication</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../security/authorization-extensions.html">Authorization Extensions</a></li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="../../testing/index.html"> 测试和调试</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../testing/testing.html"> Testing a CDAP Application</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../testing/debugging.html"> Debugging</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../testing/troubleshooting.html"> Troubleshooting</a></li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="../../ingesting-tools/index.html"> 数据融合</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../ingesting-tools/cdap-stream-clients-java.html">CDAP Stream Client for Java</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../ingesting-tools/cdap-stream-clients-python.html">CDAP Stream Client for Python</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../ingesting-tools/cdap-stream-clients-ruby.html">CDAP Stream Client for Ruby</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../ingesting-tools/cdap-flume.html">CDAP Flume</a></li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="../../data-exploration/index.html"> 数据探索</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../data-exploration/filesets.html"> Fileset Exploration</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../data-exploration/tables.html"> Table Exploration</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../data-exploration/object-mapped-tables.html"> ObjectMappedTable Exploration</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../data-exploration/custom-datasets.html"> Custom Dataset Exploration</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../data-exploration/hive-execution-engines.html"> Hive Execution Engines</a></li>
</ul>
</li>
<li class="toctree-l1"><a class="reference internal" href="../../advanced/index.html"> 高级主题</a><ul>
<li class="toctree-l2"><a class="reference internal" href="../../advanced/application-logback.html"> Application Logback</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../advanced/best-practices.html"> Best Practices</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../advanced/class-loading.html"> Class Loading</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../advanced/configuring-resources.html"> Configuring Program Resources</a></li>
<li class="toctree-l2"><a class="reference internal" href="../../advanced/program-retry-policies.html"> Program Retry Policies</a></li>
</ul>
</li>
</ul>
</nav>
      </li>
      <li class="toctree-l1"><a href="../../table-of-contents/../../admin-manual/index.html" rel="nofollow">管理手册</a>
      </li>
      <li class="toctree-l1"><a href="../../table-of-contents/../../integrations/index.html" rel="nofollow">集成手册</a>
      </li>
      <li class="toctree-l1"><a href="../../table-of-contents/../../examples-manual/index.html" rel="nofollow">最佳实践</a>
      </li>
    </ul>
  </div></div>
    </div><div class="col-md-8 content" id="main-content">
    
  <div class="section" id="partitioned-fileset">
<span id="datasets-partitioned-fileset"></span><h1>Partitioned FileSet<a class="headerlink" href="#partitioned-fileset" title="Permalink to this headline">🔗</a></h1>
<p>While a FileSet is a convenient abstraction over actual file system interfaces, it still requires
the application to be aware of file system paths. For example, an application that maintains data
over time might have a new file for every month. One could come up with a naming convention that encodes
the month into each file name, and share that convention across all applications that use this file set.
Yet that can become tedious to manage, especially if the naming convention should ever change—then all
applications would have to be changed simultaneously for proper functioning.</p>
<p>The <code class="docutils literal notranslate"><span class="pre">PartitionedFileSet</span></code> dataset relieves applications from understanding file name conventions. Instead,
it associates a partition key with a path. Because different paths cannot have the same partition key,
this allows applications to address the file(s) at that path uniquely through their partition keys, or
more broadly through conditions over the partition keys. For example, the months of February through June
of a particular year, or the month of November in any year. By inheriting the attributes—such as
format and schema—of FileSets, PartitionedFileSets are a powerful abstraction over data that is
organized into files.</p>
<div class="section" id="creating-a-partitionedfileset">
<h2>Creating a PartitionedFileSet<a class="headerlink" href="#creating-a-partitionedfileset" title="Permalink to this headline">🔗</a></h2>
<p>To create and use a PartitionedFileSet in an application, you create it as part of the application
configuration, similar to FileSets. However, the partitioning has to be given as an additional property:</p>
<div class="highlight-java notranslate"><div class="highlight"><pre><span></span><span class="kd">public</span> <span class="kt">void</span> <span class="nf">configure</span><span class="p">()</span> <span class="p">{</span>
  <span class="p">...</span>
  <span class="n">createDataset</span><span class="p">(</span><span class="s">&quot;results&quot;</span><span class="p">,</span> <span class="n">PartitionedFileSet</span><span class="p">.</span><span class="na">class</span><span class="p">,</span> <span class="n">PartitionedFileSetProperties</span><span class="p">.</span><span class="na">builder</span><span class="p">()</span>
    <span class="c1">// Properties for partitioning</span>
    <span class="p">.</span><span class="na">setPartitioning</span><span class="p">(</span><span class="n">Partitioning</span><span class="p">.</span><span class="na">builder</span><span class="p">().</span><span class="na">addStringField</span><span class="p">(</span><span class="s">&quot;league&quot;</span><span class="p">).</span><span class="na">addIntField</span><span class="p">(</span><span class="s">&quot;season&quot;</span><span class="p">).</span><span class="na">build</span><span class="p">())</span>
    <span class="c1">// Properties for file set</span>
    <span class="p">.</span><span class="na">setInputFormat</span><span class="p">(</span><span class="n">TextInputFormat</span><span class="p">.</span><span class="na">class</span><span class="p">)</span>
    <span class="p">.</span><span class="na">setOutputFormat</span><span class="p">(</span><span class="n">TextOutputFormat</span><span class="p">.</span><span class="na">class</span><span class="p">)</span>
    <span class="p">.</span><span class="na">setOutputProperty</span><span class="p">(</span><span class="n">TextOutputFormat</span><span class="p">.</span><span class="na">SEPERATOR</span><span class="p">,</span> <span class="s">&quot;,&quot;</span><span class="p">)</span>
    <span class="p">.</span><span class="na">build</span><span class="p">());</span>
  <span class="p">...</span>
<span class="p">}</span>
</pre></div>
</div>
<p>This creates a new PartitionedFileSet named <em>results</em>. Similar to FileSets, it specifies <code class="docutils literal notranslate"><span class="pre">TextInputFormat</span></code> and
<code class="docutils literal notranslate"><span class="pre">TextOutputFormat.</span></code>; for the output format, we specify that the separator between fields is a comma.
The difference to a FileSet is that this dataset is partitioned by league and season. This means that every file
added to this dataset must have a partitioning key with a unique combination of league and season.</p>
<p>Note that any of the properties that apply to FileSets can also be used for PartitionedFileSets (they apply to the
embedded FileSet). If you configure a PartitionedFileSet as external using <code class="docutils literal notranslate"><span class="pre">setDataExternal(true)</span></code>, then the
embedded FileSet becomes read-only. You can still add partitions for locations that were written by an
external process. But dropping a partition will only delete the partition’s metadata, whereas the actual file
remains intact. Similarly, if you drop or truncate an external PartitionedFileSet, its files will not be deleted.</p>
<p>Similarly to a <a class="reference internal" href="fileset.html#datasets-fileset-reuse"><span class="std std-ref">FileSet</span></a>, a PartitionedFileSet can reuse an existing location
in HDFS and an existing Hive table for Explore. Use one of these two options:</p>
<ul class="simple">
<li><code class="docutils literal notranslate"><span class="pre">setUseExisting(true)</span></code>: This directs the PartitionedFileSet to accept an existing location as its base
path and an existing table in Hive for exploring. Because the existing location may contain pre-existing files,
and the Hive table may have pre-existing partitions, the location and the Hive table will not be deleted when
the dataset is dropped, and truncating the FileSet will have no effect on the file system or the Hive table.
This is to ensure that no pre-existing data is deleted.</li>
<li><code class="docutils literal notranslate"><span class="pre">setPossessExisting(true)</span></code>: This also allows reuse of an existing location. However, the
PartitionedFileSet will assume ownership of existing files in that location and of the Hive table and
all its existing partitions, which means that these files and partitions will be deleted if the dataset
is either dropped or truncated.</li>
</ul>
<p>In order to make the PartitionedFileSet explorable, additional properties are needed, as described
in <a class="reference internal" href="#exploring-partitionedfilesets"><span class="std std-ref">Exploring PartitionedFileSets</span></a>.</p>
</div>
<div class="section" id="reading-and-writing-partitionedfilesets">
<h2>Reading and Writing PartitionedFileSets<a class="headerlink" href="#reading-and-writing-partitionedfilesets" title="Permalink to this headline">🔗</a></h2>
<p>You can interact with the files in a PartitionedFileSet directly through the <code class="docutils literal notranslate"><span class="pre">Location</span></code> abstraction
of the file system. This is similar to a FileSet, but instead of a relative path, you specify a
partition key to obtain a Partition; you can then get a Location from that Partition.</p>
<p>For example, to read the content of a partition:</p>
<div class="highlight-java notranslate"><div class="highlight"><pre><span></span><span class="n">PartitionKey</span> <span class="n">key</span> <span class="o">=</span> <span class="n">PartitionKey</span><span class="p">.</span><span class="na">builder</span><span class="p">().</span><span class="na">addStringField</span><span class="p">(</span><span class="s">&quot;league&quot;</span><span class="p">,</span> <span class="p">...)</span>
                                         <span class="p">.</span><span class="na">addIntField</span><span class="p">(</span><span class="s">&quot;season&quot;</span><span class="p">,</span> <span class="p">...)</span>
                                         <span class="p">.</span><span class="na">build</span><span class="p">());</span>
<span class="n">Partition</span> <span class="n">partition</span> <span class="o">=</span> <span class="n">dataset</span><span class="p">.</span><span class="na">getPartition</span><span class="p">(</span><span class="n">key</span><span class="p">);</span>
<span class="k">if</span> <span class="p">(</span><span class="n">partition</span> <span class="o">!=</span> <span class="kc">null</span><span class="p">)</span> <span class="p">{</span>
  <span class="k">try</span> <span class="p">{</span>
    <span class="n">Location</span> <span class="n">location</span> <span class="o">=</span> <span class="n">partition</span><span class="p">.</span><span class="na">getLocation</span><span class="p">();</span>
    <span class="n">InputStream</span> <span class="n">inputStream</span> <span class="o">=</span> <span class="n">location</span><span class="p">.</span><span class="na">getInputStream</span><span class="p">();</span>
    <span class="p">...</span>
  <span class="p">}</span> <span class="k">catch</span> <span class="p">(</span><span class="n">IOException</span> <span class="n">e</span><span class="p">)</span> <span class="p">{</span>
    <span class="p">...</span>
  <span class="p">}</span>
<span class="p">}</span>
</pre></div>
</div>
<p>Note that if the partition was written with MapReduce, the location is actually a directory
that contains part files. In that case, list the files in the directory to find the part files:</p>
<div class="highlight-java notranslate"><div class="highlight"><pre><span></span><span class="k">for</span> <span class="p">(</span><span class="n">Location</span> <span class="n">file</span> <span class="p">:</span> <span class="n">location</span><span class="p">.</span><span class="na">list</span><span class="p">())</span> <span class="p">{</span>
  <span class="k">if</span> <span class="p">(</span><span class="n">file</span><span class="p">.</span><span class="na">getName</span><span class="p">().</span><span class="na">startsWith</span><span class="p">(</span><span class="s">&quot;part&quot;</span><span class="p">))</span> <span class="p">{</span>
    <span class="n">InputStream</span> <span class="n">inputStream</span> <span class="o">=</span> <span class="n">location</span><span class="p">.</span><span class="na">getInputStream</span><span class="p">();</span>
    <span class="p">...</span>
  <span class="p">}</span>
<span class="p">}</span>
</pre></div>
</div>
<p>Instead of reading a single partition, you can also specify a PartitionFilter to query the
partitioned file set for all partitions whose keys match that filter. The PartitionFilter
can specify either an exact value (en equality condition) or a range for the value of each
field in the dataset’s partitioning. For example, the following code reads all partitions
for the NFL and the ’80s seasons:</p>
<div class="highlight-java notranslate"><div class="highlight"><pre><span></span><span class="n">PartitionFilter</span> <span class="n">filter</span> <span class="o">=</span> <span class="n">PartitionFilter</span><span class="p">.</span><span class="na">builder</span><span class="p">().</span><span class="na">addValueCondition</span><span class="p">(</span><span class="s">&quot;league&quot;</span><span class="p">,</span> <span class="s">&quot;nfl&quot;</span><span class="p">)</span>
                                                  <span class="p">.</span><span class="na">addRangeCondition</span><span class="p">(</span><span class="s">&quot;season&quot;</span><span class="p">,</span> <span class="mi">1980</span><span class="p">,</span> <span class="mi">1990</span><span class="p">)</span>
                                                  <span class="p">.</span><span class="na">build</span><span class="p">());</span>
<span class="n">Set</span><span class="o">&lt;</span><span class="n">Partition</span><span class="o">&gt;</span> <span class="n">partitions</span> <span class="o">=</span> <span class="n">dataset</span><span class="p">.</span><span class="na">getPartitions</span><span class="p">(</span><span class="n">filter</span><span class="p">);</span>
<span class="k">for</span> <span class="p">(</span><span class="n">partition</span> <span class="p">:</span> <span class="n">partitions</span><span class="p">)</span> <span class="p">{</span>
  <span class="k">try</span> <span class="p">{</span>
    <span class="n">Location</span> <span class="n">location</span> <span class="o">=</span> <span class="n">partition</span><span class="p">.</span><span class="na">getLocation</span><span class="p">();</span>
    <span class="n">InputStream</span> <span class="n">inputStream</span> <span class="o">=</span> <span class="n">location</span><span class="p">.</span><span class="na">getInputStream</span><span class="p">();</span>
    <span class="p">...</span>
  <span class="p">}</span> <span class="k">catch</span> <span class="p">(</span><span class="n">IOException</span> <span class="n">e</span><span class="p">)</span> <span class="p">{</span>
    <span class="p">...</span>
  <span class="p">}</span>
<span class="p">}</span>
</pre></div>
</div>
<p>Note that the upper bound for the seasons (1990) is exclusive; that is, the 1990 season is not
included in the returned partitions. For a range condition, either the lower or the upper bound may
be null, meaning that the filter in unbounded in that direction.</p>
<p>Adding a partition is similar; however, instead of a Partition, you receive a <code class="docutils literal notranslate"><span class="pre">PartitionOutput</span></code>
for the partition key. That object has methods to obtain a Location and to add the partition once
you have written to that Location.
For example, this code writes to a file named <code class="docutils literal notranslate"><span class="pre">part</span></code> under the location returned from the
<code class="docutils literal notranslate"><span class="pre">PartitionOutput</span></code>:</p>
<div class="highlight-java notranslate"><div class="highlight"><pre><span></span><span class="n">PartitionKey</span> <span class="n">key</span> <span class="o">=</span> <span class="p">...</span>
<span class="n">PartitionOutput</span> <span class="n">output</span> <span class="o">=</span> <span class="n">dataset</span><span class="p">.</span><span class="na">getPartitionOutput</span><span class="p">(</span><span class="n">key</span><span class="p">);</span>
<span class="k">try</span> <span class="p">{</span>
  <span class="n">Location</span> <span class="n">location</span> <span class="o">=</span> <span class="n">output</span><span class="p">.</span><span class="na">getLocation</span><span class="p">().</span><span class="na">append</span><span class="p">(</span><span class="s">&quot;part&quot;</span><span class="p">);</span>
  <span class="n">OutputStream</span> <span class="n">outputStream</span> <span class="o">=</span> <span class="n">location</span><span class="p">.</span><span class="na">getOutputStream</span><span class="p">());</span>
  <span class="p">...</span>
<span class="p">}</span> <span class="k">catch</span> <span class="p">(</span><span class="n">IOException</span> <span class="n">e</span><span class="p">)</span> <span class="p">{</span>
  <span class="p">...</span>
<span class="p">}</span>
<span class="n">output</span><span class="p">.</span><span class="na">addPartition</span><span class="p">();</span>
</pre></div>
</div>
</div>
<div class="section" id="using-partitionedfilesets-in-mapreduce">
<h2>Using PartitionedFileSets in MapReduce<a class="headerlink" href="#using-partitionedfilesets-in-mapreduce" title="Permalink to this headline">🔗</a></h2>
<p>A partitioned file set can be accessed in MapReduce in a similar fashion to a FileSet. The difference
is that instead of input and output paths, you specify a partition filter for the input and a
partition key for the output. For example, the MapReduce program of the SportResults example
reads as input all partitions for the league given in its runtime arguments, and writes as output
a partition with that league as the only key:</p>
<div class="highlight-java notranslate"><div class="highlight"><pre><span></span><span class="nd">@Override</span>
<span class="kd">public</span> <span class="kt">void</span> <span class="nf">initialize</span><span class="p">()</span> <span class="kd">throws</span> <span class="n">Exception</span> <span class="p">{</span>
  <span class="n">MapReduceContext</span> <span class="n">context</span> <span class="o">=</span> <span class="n">getContext</span><span class="p">();</span>
  <span class="p">...</span>
  <span class="n">String</span> <span class="n">league</span> <span class="o">=</span> <span class="n">context</span><span class="p">.</span><span class="na">getRuntimeArguments</span><span class="p">().</span><span class="na">get</span><span class="p">(</span><span class="s">&quot;league&quot;</span><span class="p">);</span>

  <span class="c1">// Configure the input to read all seasons for the league</span>
  <span class="n">Map</span><span class="o">&lt;</span><span class="n">String</span><span class="p">,</span> <span class="n">String</span><span class="o">&gt;</span> <span class="n">inputArgs</span> <span class="o">=</span> <span class="n">Maps</span><span class="p">.</span><span class="na">newHashMap</span><span class="p">();</span>
  <span class="n">PartitionedFileSetArguments</span><span class="p">.</span><span class="na">setInputPartitionFilter</span><span class="p">(</span>
    <span class="n">inputArgs</span><span class="p">,</span> <span class="n">PartitionFilter</span><span class="p">.</span><span class="na">builder</span><span class="p">().</span><span class="na">addValueCondition</span><span class="p">(</span><span class="s">&quot;league&quot;</span><span class="p">,</span> <span class="n">league</span><span class="p">).</span><span class="na">build</span><span class="p">());</span>
  <span class="n">context</span><span class="p">.</span><span class="na">addInput</span><span class="p">(</span><span class="n">Input</span><span class="p">.</span><span class="na">ofDataset</span><span class="p">(</span><span class="s">&quot;results&quot;</span><span class="p">,</span> <span class="n">inputArgs</span><span class="p">));</span>

  <span class="c1">// Each run writes its output to a partition for the league</span>
  <span class="n">Map</span><span class="o">&lt;</span><span class="n">String</span><span class="p">,</span> <span class="n">String</span><span class="o">&gt;</span> <span class="n">outputArgs</span> <span class="o">=</span> <span class="n">Maps</span><span class="p">.</span><span class="na">newHashMap</span><span class="p">();</span>
  <span class="n">outputKey</span> <span class="o">=</span> <span class="n">PartitionKey</span><span class="p">.</span><span class="na">builder</span><span class="p">().</span><span class="na">addStringField</span><span class="p">(</span><span class="s">&quot;league&quot;</span><span class="p">,</span> <span class="n">league</span><span class="p">).</span><span class="na">build</span><span class="p">();</span>
  <span class="n">PartitionedFileSetArguments</span><span class="p">.</span><span class="na">setOutputPartitionKey</span><span class="p">(</span><span class="n">outputArgs</span><span class="p">,</span> <span class="n">outputKey</span><span class="p">);</span>
  <span class="n">context</span><span class="p">.</span><span class="na">addOutput</span><span class="p">(</span><span class="n">Output</span><span class="p">.</span><span class="na">ofDataset</span><span class="p">(</span><span class="s">&quot;totals&quot;</span><span class="p">,</span> <span class="n">outputArgs</span><span class="p">));</span>
<span class="p">}</span>
</pre></div>
</div>
<p>Here, the <code class="docutils literal notranslate"><span class="pre">initialize</span></code> method of the MapReduce generates the runtime arguments for the
partitioned file sets that specify the input partition filter and output partition key. This
is convenient for starting the MapReduce, because only a single argument has to be given for
the MapReduce run. If that code was not in the <code class="docutils literal notranslate"><span class="pre">initialize()</span></code>, you could still achieve the
same result by specifying the partition filter and key explicitly in the MapReduce runtime arguments.
For example, give these arguments when starting the MapReduce through a RESTful call:</p>
<div class="highlight-java notranslate"><div class="highlight"><pre><span></span><span class="p">{</span>
  <span class="s">&quot;dataset.results.input.partition.filter.league.value&quot;</span><span class="p">:</span> <span class="s">&quot;nfl&quot;</span><span class="p">,</span>
  <span class="s">&quot;dataset.results.input.partition.filter.season.lower&quot;</span><span class="p">:</span> <span class="s">&quot;1980&quot;</span><span class="p">,</span>
  <span class="s">&quot;dataset.results.input.partition.filter.season.upper&quot;</span><span class="p">:</span> <span class="s">&quot;1990&quot;</span><span class="p">,</span>
  <span class="s">&quot;dataset.totals.output.partition.key.league&quot;</span> <span class="p">:</span> <span class="s">&quot;nfl&quot;</span>
<span class="p">}</span>
</pre></div>
</div>
</div>
<div class="section" id="dynamic-partitioning-of-mapreduce-output">
<h2>Dynamic Partitioning of MapReduce Output<a class="headerlink" href="#dynamic-partitioning-of-mapreduce-output" title="Permalink to this headline">🔗</a></h2>
<p>A MapReduce job can write to multiple partitions of a PartitionedFileSet using the
<code class="docutils literal notranslate"><span class="pre">DynamicPartitioner</span></code> class. To do so, define a class that implements <code class="docutils literal notranslate"><span class="pre">DynamicPartitioner</span></code>.
The core method to override is the <code class="docutils literal notranslate"><span class="pre">getPartitionKey</span></code> method; it maps a record’s key and value
to a <code class="docutils literal notranslate"><span class="pre">PartitionKey</span></code>, which defines which <code class="docutils literal notranslate"><span class="pre">Partition</span></code> the record should be written to:</p>
<div class="highlight-java notranslate"><div class="highlight"><pre><span></span><span class="kd">public</span> <span class="kd">static</span> <span class="kd">final</span> <span class="kd">class</span> <span class="nc">TimeAndZipPartitioner</span> <span class="kd">extends</span> <span class="n">DynamicPartitioner</span><span class="o">&lt;</span><span class="n">NullWritable</span><span class="p">,</span> <span class="n">Text</span><span class="o">&gt;</span> <span class="p">{</span>

  <span class="kd">private</span> <span class="n">Long</span> <span class="n">time</span><span class="p">;</span>
  <span class="kd">private</span> <span class="n">JsonParser</span> <span class="n">jsonParser</span><span class="p">;</span>

  <span class="nd">@Override</span>
  <span class="kd">public</span> <span class="kt">void</span> <span class="nf">initialize</span><span class="p">(</span><span class="n">MapReduceTaskContext</span><span class="o">&lt;</span><span class="n">NullWritable</span><span class="p">,</span> <span class="n">Text</span><span class="o">&gt;</span> <span class="n">mapReduceTaskContext</span><span class="p">)</span> <span class="p">{</span>
    <span class="k">this</span><span class="p">.</span><span class="na">time</span> <span class="o">=</span> <span class="n">mapReduceTaskContext</span><span class="p">.</span><span class="na">getLogicalStartTime</span><span class="p">();</span>
    <span class="k">this</span><span class="p">.</span><span class="na">jsonParser</span> <span class="o">=</span> <span class="k">new</span> <span class="n">JsonParser</span><span class="p">();</span>
  <span class="p">}</span>

  <span class="nd">@Override</span>
  <span class="kd">public</span> <span class="n">PartitionKey</span> <span class="nf">getPartitionKey</span><span class="p">(</span><span class="n">NullWritable</span> <span class="n">key</span><span class="p">,</span> <span class="n">Text</span> <span class="n">value</span><span class="p">)</span> <span class="p">{</span>
    <span class="kt">int</span> <span class="n">zip</span> <span class="o">=</span> <span class="n">jsonParser</span><span class="p">.</span><span class="na">parse</span><span class="p">(</span><span class="n">value</span><span class="p">.</span><span class="na">toString</span><span class="p">()).</span><span class="na">getAsJsonObject</span><span class="p">().</span><span class="na">get</span><span class="p">(</span><span class="s">&quot;zip&quot;</span><span class="p">).</span><span class="na">getAsInt</span><span class="p">();</span>
    <span class="k">return</span> <span class="n">PartitionKey</span><span class="p">.</span><span class="na">builder</span><span class="p">().</span><span class="na">addLongField</span><span class="p">(</span><span class="s">&quot;time&quot;</span><span class="p">,</span> <span class="n">time</span><span class="p">).</span><span class="na">addIntField</span><span class="p">(</span><span class="s">&quot;zip&quot;</span><span class="p">,</span> <span class="n">zip</span><span class="p">).</span><span class="na">build</span><span class="p">();</span>
  <span class="p">}</span>
<span class="p">}</span>
</pre></div>
</div>
<p>Then set the class of the custom partitioner as runtime arguments of the output PartitionedFileSet:</p>
<div class="highlight-java notranslate"><div class="highlight"><pre><span></span><span class="n">Map</span><span class="o">&lt;</span><span class="n">String</span><span class="p">,</span> <span class="n">String</span><span class="o">&gt;</span> <span class="n">cleanRecordsArgs</span> <span class="o">=</span> <span class="k">new</span> <span class="n">HashMap</span><span class="o">&lt;&gt;</span><span class="p">();</span>
<span class="n">PartitionedFileSetArguments</span><span class="p">.</span><span class="na">setDynamicPartitioner</span><span class="p">(</span><span class="n">cleanRecordsArgs</span><span class="p">,</span> <span class="n">TimeAndZipPartitioner</span><span class="p">.</span><span class="na">class</span><span class="p">);</span>
<span class="n">context</span><span class="p">.</span><span class="na">addOutput</span><span class="p">(</span><span class="n">Output</span><span class="p">.</span><span class="na">ofDataset</span><span class="p">(</span><span class="n">DataCleansing</span><span class="p">.</span><span class="na">CLEAN_RECORDS</span><span class="p">,</span> <span class="n">cleanRecordsArgs</span><span class="p">));</span>
</pre></div>
</div>
<p>With this, each record processed by the MapReduce job will be written to a path corresponding
to the <code class="docutils literal notranslate"><span class="pre">Partition</span></code> that it was mapped to by the <code class="docutils literal notranslate"><span class="pre">DynamicPartitioner</span></code>, and the set of new <code class="docutils literal notranslate"><span class="pre">Partition</span></code>s
will be registered with the output <code class="docutils literal notranslate"><span class="pre">PartitionedFileSet</span></code> at the end of the job.</p>
<p>Note that by default, any partitions written to must not previously exist. Otherwise, the MapReduce job will
fail at the end of the job and none of the partitions will be added to the <code class="docutils literal notranslate"><span class="pre">PartitionedFileSet</span></code>.
However, a DynamicPartitioner can also be configured to allow appending to or overwriting existing partitions.
For instance, below is an example of configuring the same DynamicPartitioner to allow appending to a partition
if it already exists. If it does not already exist, the partition will be created:</p>
<div class="highlight-java notranslate"><div class="highlight"><pre><span></span><span class="n">Map</span><span class="o">&lt;</span><span class="n">String</span><span class="p">,</span> <span class="n">String</span><span class="o">&gt;</span> <span class="n">cleanRecordsArgs</span> <span class="o">=</span> <span class="k">new</span> <span class="n">HashMap</span><span class="o">&lt;&gt;</span><span class="p">();</span>
<span class="n">PartitionedFileSetArguments</span><span class="p">.</span><span class="na">setDynamicPartitioner</span><span class="p">(</span><span class="n">cleanRecordsArgs</span><span class="p">,</span> <span class="n">TimeAndZipPartitioner</span><span class="p">.</span><span class="na">class</span><span class="p">,</span>
                                                  <span class="n">DynamicPartitioner</span><span class="p">.</span><span class="na">PartitionWriteOption</span><span class="p">.</span><span class="na">CREATE_OR_APPEND</span><span class="p">);</span>
<span class="n">context</span><span class="p">.</span><span class="na">addOutput</span><span class="p">(</span><span class="n">Output</span><span class="p">.</span><span class="na">ofDataset</span><span class="p">(</span><span class="n">DataCleansing</span><span class="p">.</span><span class="na">CLEAN_RECORDS</span><span class="p">,</span> <span class="n">cleanRecordsArgs</span><span class="p">));</span>
</pre></div>
</div>
<p>Likewise, <cite>CREATE_OR_OVERWRITE</cite> has the effect of overwriting any contents of any
previously-existing partition.</p>
</div>
<div class="section" id="incrementally-processing-partitionedfilesets">
<h2>Incrementally Processing PartitionedFileSets<a class="headerlink" href="#incrementally-processing-partitionedfilesets" title="Permalink to this headline">🔗</a></h2>
<div class="section" id="processing-using-mapreduce">
<h3>Processing using MapReduce<a class="headerlink" href="#processing-using-mapreduce" title="Permalink to this headline">🔗</a></h3>
<p>One way to process a partitioned file set is with a repeatedly-running MapReduce program that,
in each run, reads all partitions that have been added since its previous run. This requires
that the MapReduce program persists between runs which partitions have already been consumed.
An easy way is to use the <code class="docutils literal notranslate"><span class="pre">PartitionBatchInput</span></code>, an experimental feature introduced in CDAP 3.3.0.
Your MapReduce program is responsible for providing an implementation of <code class="docutils literal notranslate"><span class="pre">DatasetStatePersistor</span></code> to
persist and then read back its state. In this example, the state is persisted to a row in a
KeyValue Table, using the convenience class <code class="docutils literal notranslate"><span class="pre">KVTableStatePersistor</span></code>; however, other types of
Datasets can also be used. In the <code class="docutils literal notranslate"><span class="pre">initialize</span></code> method of the MapReduce, specify the
partitioned file set to be used as input as well as the <code class="docutils literal notranslate"><span class="pre">DatasetStatePersistor</span></code> to be used:</p>
<div class="highlight-java notranslate"><div class="highlight"><pre><span></span><span class="nd">@Override</span>
<span class="kd">public</span> <span class="kt">void</span> <span class="nf">initialize</span><span class="p">()</span> <span class="kd">throws</span> <span class="n">Exception</span> <span class="p">{</span>
  <span class="n">MapReduceContext</span> <span class="n">context</span> <span class="o">=</span> <span class="n">getContext</span><span class="p">();</span>
  <span class="p">...</span>
  <span class="n">partitionCommitter</span> <span class="o">=</span>
    <span class="n">PartitionBatchInput</span><span class="p">.</span><span class="na">setInput</span><span class="p">(</span><span class="n">context</span><span class="p">,</span> <span class="n">DataCleansing</span><span class="p">.</span><span class="na">RAW_RECORDS</span><span class="p">,</span>
                                 <span class="k">new</span> <span class="n">KVTableStatePersistor</span><span class="p">(</span><span class="n">DataCleansing</span><span class="p">.</span><span class="na">CONSUMING_STATE</span><span class="p">,</span> <span class="s">&quot;state.key&quot;</span><span class="p">));</span>
  <span class="p">...</span>
<span class="p">}</span>
</pre></div>
</div>
<p>This will read back the previously persisted state, determine the new partitions to read based upon this
state, and compute a new state to store in memory until a call to the <code class="docutils literal notranslate"><span class="pre">onFinish</span></code> method of the returned
<code class="docutils literal notranslate"><span class="pre">PartitionCommitter</span></code>. The dataset is instantiated with the set of new partitions to read as input and
set as input for the MapReduce job.</p>
<p>To save the state of partition processing, call the returned PartitionCommitter’s <code class="docutils literal notranslate"><span class="pre">onFinish</span></code> method.
This ensures that the next time the MapReduce job runs, it processes only the newly committed partitions:</p>
<div class="highlight-java notranslate"><div class="highlight"><pre><span></span><span class="nd">@Override</span>
<span class="kd">public</span> <span class="kt">void</span> <span class="nf">destroy</span><span class="p">()</span> <span class="p">{</span>
  <span class="kt">boolean</span> <span class="n">succeeded</span> <span class="o">=</span> <span class="n">getContext</span><span class="p">().</span><span class="na">getState</span><span class="p">().</span><span class="na">getStatus</span><span class="p">()</span> <span class="o">==</span> <span class="n">ProgramStatus</span><span class="p">.</span><span class="na">COMPLETED</span><span class="p">;</span>
  <span class="n">partitionCommitter</span><span class="p">.</span><span class="na">onFinish</span><span class="p">(</span><span class="n">succeeded</span><span class="p">);</span>
<span class="p">}</span>
</pre></div>
</div>
</div>
<div class="section" id="processing-using-other-programs">
<h3>Processing using Other Programs<a class="headerlink" href="#processing-using-other-programs" title="Permalink to this headline">🔗</a></h3>
<p>Partitions of a partitioned file set can also be incrementally processed from other program types
using the generic <code class="docutils literal notranslate"><span class="pre">PartitionConsumer</span></code> APIs. The implementation of these APIs that can be used from multiple instances
of a program is <code class="docutils literal notranslate"><span class="pre">ConcurrentPartitionConsumer</span></code>. To use, you simply need to provide the instance of the
partitioned file set you want to consume from, along with a <code class="docutils literal notranslate"><span class="pre">StatePersistor</span></code>, responsible for managing
persistence of the consumer’s state:</p>
<div class="highlight-java notranslate"><div class="highlight"><pre><span></span><span class="c1">// This can be in any program where we have access to Datasets,</span>
<span class="c1">// such as a Worker, Workflow Action, or even in a MapReduce</span>
<span class="n">PartitionConsumer</span> <span class="n">consumer</span> <span class="o">=</span>
  <span class="k">new</span> <span class="n">ConcurrentPartitionConsumer</span><span class="p">(</span><span class="n">partitionedFileSet</span><span class="p">,</span> <span class="k">new</span> <span class="n">CustomStatePersistor</span><span class="p">(</span><span class="n">persistenceTable</span><span class="p">));</span>

<span class="c1">// Call consumePartitions to get a list of partitions to process</span>
<span class="kd">final</span> <span class="n">List</span><span class="o">&lt;</span><span class="n">PartitionDetail</span><span class="o">&gt;</span> <span class="n">partitions</span> <span class="o">=</span> <span class="n">partitionConsumer</span><span class="p">.</span><span class="na">consumePartitions</span><span class="p">().</span><span class="na">getPartitions</span><span class="p">();</span>

<span class="c1">// Process partitions</span>
<span class="p">...</span>

<span class="c1">// Once done processing, onFinish must be called with a boolean value indicating success or failure, so that</span>
<span class="c1">// the partitions&#39; can be marked accordingly for completion or retries in the future</span>
<span class="n">partitionConsumer</span><span class="p">.</span><span class="na">onFinish</span><span class="p">(</span><span class="n">partitions</span><span class="p">,</span> <span class="kc">true</span><span class="p">);</span>
</pre></div>
</div>
<p>The <code class="docutils literal notranslate"><span class="pre">consumePartitions</span></code> method of the <code class="docutils literal notranslate"><span class="pre">PartitionConsumer</span></code> can optionally take in a limit (an int), which will
limit the number of returned partitions. It can also take in a <code class="docutils literal notranslate"><span class="pre">PartitionAcceptor</span></code>, which allows you to
define a custom method to limit the number of partitions. For instance, it may be useful to limit the number of
partitions to process at a time, and have it be based on the size of the partitions:</p>
<div class="highlight-java notranslate"><div class="highlight"><pre><span></span><span class="kd">public</span> <span class="kd">class</span> <span class="nc">SizeLimitingAcceptor</span> <span class="kd">implements</span> <span class="n">PartitionAcceptor</span> <span class="p">{</span>

  <span class="kd">private</span> <span class="kd">final</span> <span class="kt">int</span> <span class="n">sizeLimitMB</span><span class="p">;</span>
  <span class="kd">private</span> <span class="kt">int</span> <span class="n">acceptedMBSoFar</span><span class="p">;</span>

  <span class="kd">public</span> <span class="nf">SizeLimitingAcceptor</span><span class="p">(</span><span class="kt">int</span> <span class="n">sizeLimitMB</span><span class="p">)</span> <span class="p">{</span>
    <span class="k">this</span><span class="p">.</span><span class="na">sizeLimitMB</span> <span class="o">=</span> <span class="n">sizeLimitMB</span><span class="p">;</span>
    <span class="k">this</span><span class="p">.</span><span class="na">acceptedMBSoFar</span> <span class="o">=</span> <span class="mi">0</span><span class="p">;</span>
  <span class="p">}</span>

  <span class="nd">@Override</span>
  <span class="kd">public</span> <span class="n">Return</span> <span class="nf">accept</span><span class="p">(</span><span class="n">PartitionDetail</span> <span class="n">partitionDetail</span><span class="p">)</span> <span class="p">{</span>
    <span class="c1">// assuming that the metadata contains the size of that partition</span>
    <span class="n">acceptedMBSoFar</span> <span class="o">+=</span> <span class="n">Integer</span><span class="p">.</span><span class="na">valueOf</span><span class="p">(</span><span class="n">partitionDetail</span><span class="p">.</span><span class="na">getMetadata</span><span class="p">().</span><span class="na">get</span><span class="p">(</span><span class="s">&quot;sizeMB&quot;</span><span class="p">));</span>
    <span class="k">if</span> <span class="p">(</span><span class="n">acceptedMBSoFar</span> <span class="o">&gt;</span> <span class="n">sizeLimitMB</span><span class="p">)</span> <span class="p">{</span>
      <span class="k">return</span> <span class="n">Return</span><span class="p">.</span><span class="na">STOP</span><span class="p">;</span>
    <span class="p">}</span>
    <span class="k">return</span> <span class="n">Return</span><span class="p">.</span><span class="na">ACCEPT</span><span class="p">;</span>
  <span class="p">}</span>
<span class="p">}</span>
</pre></div>
</div>
<p>It can then be used as:</p>
<div class="highlight-java notranslate"><div class="highlight"><pre><span></span><span class="c1">// return only partitions, to process up to 500MB of data</span>
<span class="n">partitions</span> <span class="o">=</span> <span class="n">consumer</span><span class="p">.</span><span class="na">consumePartitions</span><span class="p">(</span><span class="k">new</span> <span class="n">SizeLimitingAcceptor</span><span class="p">(</span><span class="mi">500</span><span class="p">));</span>
</pre></div>
</div>
</div>
</div>
<div class="section" id="exploring-partitionedfilesets">
<span id="id1"></span><h2>Exploring PartitionedFileSets<a class="headerlink" href="#exploring-partitionedfilesets" title="Permalink to this headline">🔗</a></h2>
<p>A partitioned file set can be explored with ad-hoc queries if you enable it at creation time:</p>
<div class="highlight-java notranslate"><div class="highlight"><pre><span></span><span class="n">createDataset</span><span class="p">(</span><span class="s">&quot;results&quot;</span><span class="p">,</span> <span class="n">PartitionedFileSet</span><span class="p">.</span><span class="na">class</span><span class="p">,</span> <span class="n">PartitionedFileSetProperties</span><span class="p">.</span><span class="na">builder</span><span class="p">()</span>
  <span class="c1">// Properties for partitioning</span>
  <span class="p">.</span><span class="na">setPartitioning</span><span class="p">(</span><span class="n">Partitioning</span><span class="p">.</span><span class="na">builder</span><span class="p">().</span><span class="na">addStringField</span><span class="p">(</span><span class="s">&quot;league&quot;</span><span class="p">).</span><span class="na">addIntField</span><span class="p">(</span><span class="s">&quot;season&quot;</span><span class="p">).</span><span class="na">build</span><span class="p">())</span>
  <span class="c1">// Properties for file set</span>
  <span class="p">.</span><span class="na">setInputFormat</span><span class="p">(</span><span class="n">TextInputFormat</span><span class="p">.</span><span class="na">class</span><span class="p">)</span>
  <span class="p">.</span><span class="na">setOutputFormat</span><span class="p">(</span><span class="n">TextOutputFormat</span><span class="p">.</span><span class="na">class</span><span class="p">)</span>
  <span class="p">.</span><span class="na">setOutputProperty</span><span class="p">(</span><span class="n">TextOutputFormat</span><span class="p">.</span><span class="na">SEPERATOR</span><span class="p">,</span> <span class="s">&quot;,&quot;</span><span class="p">)</span>
  <span class="c1">// Properties for Explore (to create a partitioned Hive table)</span>
  <span class="p">.</span><span class="na">setEnableExploreOnCreate</span><span class="p">(</span><span class="kc">true</span><span class="p">)</span>
  <span class="p">.</span><span class="na">setExploreFormat</span><span class="p">(</span><span class="s">&quot;csv&quot;</span><span class="p">)</span>
  <span class="p">.</span><span class="na">setExploreSchema</span><span class="p">(</span><span class="s">&quot;date STRING, winner STRING, loser STRING, winnerpoints INT, loserpoints INT&quot;</span><span class="p">)</span>
  <span class="p">.</span><span class="na">build</span><span class="p">());</span>
</pre></div>
</div>
<p>The essential part (to enable exploration) of the above sample are these lines:</p>
<div class="highlight-java notranslate"><div class="highlight"><pre><span></span><span class="p">.</span> <span class="p">.</span> <span class="p">.</span>
<span class="c1">// Properties for Explore (to create a partitioned Hive table)</span>
<span class="p">.</span><span class="na">setEnableExploreOnCreate</span><span class="p">(</span><span class="kc">true</span><span class="p">)</span>
<span class="p">.</span><span class="na">setExploreFormat</span><span class="p">(</span><span class="s">&quot;csv&quot;</span><span class="p">)</span>
<span class="p">.</span><span class="na">setExploreSchema</span><span class="p">(</span><span class="s">&quot;date STRING, winner STRING, loser STRING, winnerpoints INT, loserpoints INT&quot;</span><span class="p">)</span>
<span class="p">.</span> <span class="p">.</span> <span class="p">.</span>
</pre></div>
</div>
<p>This results in the creation of an external table in Hive with the schema given in the
<code class="docutils literal notranslate"><span class="pre">setExploreSchema()</span></code>. The supported formats (set by <code class="docutils literal notranslate"><span class="pre">setExploreFormat()</span></code>) are <code class="docutils literal notranslate"><span class="pre">csv</span></code>
and <code class="docutils literal notranslate"><span class="pre">text</span></code>. Both define that the format is text. For <code class="docutils literal notranslate"><span class="pre">csv</span></code>, the field delimiter is a
comma, whereas for <code class="docutils literal notranslate"><span class="pre">text</span></code>, you can specify the field delimiter using <code class="docutils literal notranslate"><span class="pre">setExploreFormatProperty()</span></code>.</p>
<p>For example, to use a colon as the field separator:</p>
<div class="highlight-java notranslate"><div class="highlight"><pre><span></span><span class="p">.</span><span class="na">setExploreFormat</span><span class="p">(</span><span class="s">&quot;text&quot;</span><span class="p">)</span>
<span class="p">.</span><span class="na">setExploreFormatProperty</span><span class="p">(</span><span class="s">&quot;delimiter&quot;</span><span class="p">,</span> <span class="s">&quot;:&quot;</span><span class="p">);</span>
</pre></div>
</div>
<p>If your file format is not text, you can still explore the dataset, but you need to give
detailed instructions when creating the dataset. For example, to use Avro as the file
format:</p>
<div class="highlight-java notranslate"><div class="highlight"><pre><span></span><span class="p">.</span><span class="na">setEnableExploreOnCreate</span><span class="p">(</span><span class="kc">true</span><span class="p">)</span>
<span class="p">.</span><span class="na">setSerDe</span><span class="p">(</span><span class="s">&quot;org.apache.hadoop.hive.serde2.avro.AvroSerDe&quot;</span><span class="p">)</span>
<span class="p">.</span><span class="na">setExploreInputFormat</span><span class="p">(</span><span class="s">&quot;org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat&quot;</span><span class="p">)</span>
<span class="p">.</span><span class="na">setExploreOutputFormat</span><span class="p">(</span><span class="s">&quot;org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat&quot;</span><span class="p">)</span>
<span class="p">.</span><span class="na">setTableProperty</span><span class="p">(</span><span class="s">&quot;avro.schema.literal&quot;</span><span class="p">,</span> <span class="n">SCHEMA_STRING</span><span class="p">)</span>
</pre></div>
</div>
<p>You need to specify the SerDe, the input format, the output format, and any additional properties
any of these may need as table properties. This is an experimental feature and only tested for
Avro; see the <a class="reference internal" href="../../data-exploration/filesets.html#fileset-exploration"><span class="std std-ref">FileSet Exploration</span></a> for more details.</p>
</div>
<div class="section" id="partitionedfilesets-and-transactions">
<span id="datasets-partitioned-fileset-transactions"></span><h2>PartitionedFileSets and Transactions<a class="headerlink" href="#partitionedfilesets-and-transactions" title="Permalink to this headline">🔗</a></h2>
<p>A PartitionedFileSet is a hybrid of a non-transactional FileSet and a transactional Table
that stores the partition metadata. As a consequence, operations that need access to the
partition table (such as adding a partition or listing partitions) can only be performed
in the context of a transaction, while operations that only require access to the
FileSet (such as <code class="docutils literal notranslate"><span class="pre">getPartitionOutput()</span></code> or <code class="docutils literal notranslate"><span class="pre">getEmbeddedFileSet()</span></code>) can be performed
without a transaction.</p>
<p>Because a FileSet is not a transactional dataset, it normally does not participate in a
transaction rollback: files written in a transaction are not rolled back if the transaction
fails; and files deleted in a transaction are not restored. However, in the context of a
PartitionedFileSet, consistency between the partition files and the partition metadata
is desired. As a consequence, the FileSet embedded in a PartitionedFileSet behaves
transactionally as follows:</p>
<ul class="simple">
<li>If <code class="docutils literal notranslate"><span class="pre">PartitionOutput.addPartition()</span></code> is used to add a new partition, and the
transaction fails, then the location of that PartitionOutput is deleted.</li>
<li>If a partition is added as the output of a MapReduce program, and the MapReduce fails,
then the partition and its files are removed as part of the job cleanup.</li>
<li>However, if a partition is added using <code class="docutils literal notranslate"><span class="pre">PartitionedFileSet.addPartition()</span></code> with
an existing relative path in the FileSet, then the files at that location are not
removed on transaction failure.</li>
<li>If a partition is deleted using <code class="docutils literal notranslate"><span class="pre">dropPartition()</span></code>, then the partition and its files
are restored if the transaction fails.</li>
</ul>
</div>
</div>

</div>
    <div class="col-md-2">
      <div id="right-sidebar" class="bs-sidenav scrollable-y" role="complementary">
        <div id="localtoc-scrollspy">
        </div>
      </div>
    </div></div>
</div>
<!-- block main content end -->
<!-- block footer -->
<footer class="footer">
      <div class="container">
        <div class="row">
          <div class="col-md-2 footer-left"><a title="FileSet Dataset" href="fileset.html" />Previous</a></div>
          <div class="col-md-8 footer-center"><a class="footer-tab-link" href="../../table-of-contents/../../reference-manual/licenses/index.html">Copyright</a> &copy; 2014-2020 Cask Data, Inc.&bull; <a class="footer-tab-link" href="//docs.cask.co/cdap/6.1.1/cdap-docs-6.1.1-web.zip" rel="nofollow">Download</a> an archive or
<a class="footer-tab-link" href="//docs.cask.co/cdap">switch the version</a> of the documentation
          </div>
          <div class="col-md-2 footer-right"><a title="TimePartitioned FileSet" href="time-partitioned-fileset.html" />Next</a></div>
        </div>
      </div>
    </footer>
<!-- block footer end -->
<script type="text/javascript" src="../../_static/bootstrap-3.3.6/js/bootstrap.min.js"></script><script type="text/javascript" src="../../_static/js/bootstrap-sphinx.js"></script><script type="text/javascript" src="../../_static/js/abixTreeList-2.js"></script><script type="text/javascript" src="../../_static/js/cdap-dynamicscrollspy-4.js"></script><script type="text/javascript" src="../../_static/js/cdap-version-menu.js"></script><script type="text/javascript" src="../../_static/js/copy-to-clipboard.js"></script><script type="text/javascript" src="../../_static/js/jquery.mousewheel.min.js"></script><script type="text/javascript" src="../../_static/js/jquery.mCustomScrollbar.js"></script><script type="text/javascript" src="../../_static/js/js.cookie.js"></script><script type="text/javascript" src="../../_static/js/tabbed-parsed-literal-0.2.js"></script><script type="text/javascript" src="../../_static/js/cdap-onload-javascript.js"></script><script type="text/javascript" src="../../_static/js/cdap-version-menu.js"></script>
    <script src="https://cdap.gitee.io/docs/cdap/json-versions.js"/></script>
  </body>
</html>