<!DOCTYPE html><html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en" data-whc_version="25.0">
    <head><link rel="shortcut icon" href="../../../oxygen-webhelp/template/images/favicon.png"/><link rel="icon" href="../../../oxygen-webhelp/template/images/favicon.png"/><meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/><meta name="viewport" content="width=device-width, initial-scale=1.0"/><meta http-equiv="X-UA-Compatible" content="IE=edge"/><meta name="copyright" content="(C) Copyright 2024"/><meta name="generator" content="DITA-OT"/><meta name="description" content="By default, StreamSets Control Hub on-premises can work with registered Data Collector s from version 2.1.0.0 to the current version of Control Hub . If you use Control Hub on-premises and you upgrade ..."/><meta name="prodname" content="Data Collector"/><meta name="version" content="3"/><meta name="release" content="16"/><meta name="modification" content="0"/>        
      <title>Post Upgrade Tasks</title><!--  Generated with Oxygen version 25.1, build number 2023042410.  --><meta name="wh-path2root" content="../../../"/><meta name="wh-toc-id" content="concept_zll_vn5_zw-d16893e8032"/><meta name="wh-source-relpath" content="datacollector/UserGuide/Upgrade/PostUpgrade.dita"/><meta name="wh-out-relpath" content="datacollector/UserGuide/Upgrade/PostUpgrade.html"/>

    <link rel="stylesheet" type="text/css" href="../../../oxygen-webhelp/app/commons.css?buildId=2023042410"/>
    <link rel="stylesheet" type="text/css" href="../../../oxygen-webhelp/app/topic.css?buildId=2023042410"/>

    <script src="../../../oxygen-webhelp/app/options/properties.js?buildId=20240802104629"></script>
    <script src="../../../oxygen-webhelp/app/localization/strings.js?buildId=2023042410"></script>
    <script src="../../../oxygen-webhelp/app/search/index/keywords.js?buildId=20240802104629"></script>
    <script defer="defer" src="../../../oxygen-webhelp/app/commons.js?buildId=2023042410"></script>
    <script defer="defer" src="../../../oxygen-webhelp/app/topic.js?buildId=2023042410"></script>
<link rel="stylesheet" type="text/css" href="../../../oxygen-webhelp/template/light.css?buildId=2023042410"/><link rel="stylesheet" type="text/css" href="../../../skin.css"/></head>

    <body class="wh_topic_page frmBody">
        
        
        

        
<nav class="navbar navbar-default wh_header" data-whc_version="25.0">
    <div class="container-fluid">
        <div class="wh_header_flex_container navbar-nav navbar-expand-md navbar-dark">
            <div class="wh_logo_and_publication_title_container">
                <div class="wh_logo_and_publication_title">
                    
                    <!--
                            This component will be generated when the next parameters are specified in the transformation scenario:
                            'webhelp.logo.image' and 'webhelp.logo.image.target.url'.
                            See: http://oxygenxml.com/doc/versions/17.1/ug-editor/#topics/dita_webhelp_output.html.
                    -->
                    
                    <div class=" wh_publication_title "><a href="../../../index.html"><span class="booktitle">  <span class="ph mainbooktitle"><span class="ph">Data Collector</span> User Guide</span>  </span></a></div>
                    
                </div>
                
                <!-- The menu button for mobile devices is copied in the output only when the 'webhelp.show.top.menu' parameter is set to 'yes' -->
                
            </div>

            <div class="wh_top_menu_and_indexterms_link collapse navbar-collapse">
                
                
                <div class=" wh_indexterms_link "><a href="../../../indexTerms.html" title="Index" aria-label="Go to index terms page"><span>Index</span></a></div>
                
            </div>
        </div>
    </div>
</nav>

        <div class=" wh_search_input navbar-form wh_topic_page_search search " role="form">


<form id="searchForm" method="get" role="search" action="../../../search.html"><div><input type="search" placeholder="Search " class="wh_search_textfield" id="textToSearch" name="searchQuery" aria-label="Search query" required="required"/><button type="submit" class="wh_search_button" aria-label="Search"><span class="search_input_text">Search</span></button></div></form>

</div>
        
        <div class="container-fluid">
            <div class="row">

                <nav class="wh_tools d-print-none">
                    
<div data-tooltip-position="bottom" class=" wh_breadcrumb "><ol class="d-print-none"><li><span class="home"><a href="../../../index.html"><span>Home</span></a></span></li><li><div class="topicref" data-id="concept_ejk_f1f_5v"><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/Upgrade_title.html">Upgrade</a></div></div></li><li class="active"><div class="topicref" data-id="concept_zll_vn5_zw"><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_zll_vn5_zw">Post Upgrade Tasks</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li></ol></div>



                    <div class="wh_right_tools "><button class="wh_hide_highlight" aria-label="Toggle search highlights" title="Toggle search highlights"></button><button class="webhelp_expand_collapse_sections" data-next-state="collapsed" aria-label="Collapse sections" title="Collapse sections"></button><div class=" wh_navigation_links "><span id="topic_navigation_links" class="navheader">
  
<span class="navprev"><a class="- topic/link link" href="../../../datacollector/UserGuide/Upgrade/CMUpgrade.html#concept_c45_chv_xv" title="Upgrade an Installation with Cloudera Manager" aria-label="Previous topic: Upgrade an Installation with Cloudera Manager" rel="prev"></a></span>  
<span class="navnext"><a class="- topic/link link" href="../../../datacollector/UserGuide/Upgrade/Upgrade-ExternalSystems.html#task_ijh_wtw_xy" title="Working with Upgraded External Systems" aria-label="Next topic: Working with Upgraded External Systems" rel="next"></a></span>  </span></div>
<!--External resource link-->
<div class=" wh_print_link print d-none d-md-inline-block "><button onClick="window.print()" title="Print this page" aria-label="Print this page"></button></div>
                        
                        
                        
                        
                    </div>
                </nav>
            </div>

            

<div class="wh_content_area">
                <div class="row">
                    


                        <nav role="navigation" id="wh_publication_toc" class="col-lg-3 col-md-3 col-sm-12 d-md-block d-none d-print-none">
<div id="wh_publication_toc_content">


                            <div class=" wh_publication_toc " data-tooltip-position="right"><span class="expand-button-action-labels"><span id="button-expand-action" role="button" aria-label="Expand"></span><span id="button-collapse-action" role="button" aria-label="Collapse"></span><span id="button-pending-action" role="button" aria-label="Pending"></span></span><ul role="tree" aria-label="Table of Contents"><li role="treeitem" aria-expanded="false"><div data-tocid="concept_htw_ghg_jq-d16893e53" class="topicref" data-id="concept_htw_ghg_jq" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_htw_ghg_jq-d16893e53-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Getting_Started/GettingStarted_Title.html#concept_htw_ghg_jq" id="concept_htw_ghg_jq-d16893e53-link">Getting Started</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_l2v_nlp_mpb-d16893e331" class="topicref" data-id="concept_l2v_nlp_mpb" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_l2v_nlp_mpb-d16893e331-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/ReleaseNotes/ReleaseNotes.html#concept_l2v_nlp_mpb" id="concept_l2v_nlp_mpb-d16893e331-link">Release Notes</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_l4q_flb_kr-d16893e2582" class="topicref" data-id="concept_l4q_flb_kr" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_l4q_flb_kr-d16893e2582-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Installation/Install_title.html" id="concept_l4q_flb_kr-d16893e2582-link">Installation</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_ylh_yyz_ky-d16893e3984" class="topicref" data-id="concept_ylh_yyz_ky" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_ylh_yyz_ky-d16893e3984-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Configuration/Config_title.html" id="concept_ylh_yyz_ky-d16893e3984-link">Configuration</a></div></div></li><li role="treeitem" aria-expanded="true"><div data-tocid="concept_ejk_f1f_5v-d16893e7058" class="topicref" data-id="concept_ejk_f1f_5v" data-state="expanded"><span role="button" tabindex="0" aria-labelledby="button-collapse-action concept_ejk_f1f_5v-d16893e7058-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/Upgrade_title.html" id="concept_ejk_f1f_5v-d16893e7058-link">Upgrade</a></div></div><ul role="group" class="navbar-nav nav-list"><li role="treeitem"><div data-tocid="concept_a2n_3fk_5v-d16893e7080" class="topicref" data-id="concept_a2n_3fk_5v" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/Upgrade.html#concept_a2n_3fk_5v" id="concept_a2n_3fk_5v-d16893e7080-link">Upgrade</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_vyy_sfp_hz-d16893e7104" class="topicref" data-id="concept_vyy_sfp_hz" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_vyy_sfp_hz-d16893e7104-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PreUpgrade.html#concept_vyy_sfp_hz" id="concept_vyy_sfp_hz-d16893e7104-link">Pre Upgrade Tasks</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_ryn_4fk_5v-d16893e7281" class="topicref" data-id="concept_ryn_4fk_5v" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_ryn_4fk_5v-d16893e7281-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/Tarball.html#concept_ryn_4fk_5v" id="concept_ryn_4fk_5v-d16893e7281-link">Upgrade an Installation from the Tarball</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_ws4_vq5_xv-d16893e7579" class="topicref" data-id="concept_ws4_vq5_xv" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_ws4_vq5_xv-d16893e7579-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/RPM.html#concept_ws4_vq5_xv" id="concept_ws4_vq5_xv-d16893e7579-link">Upgrade an Installation from the RPM Package</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_c45_chv_xv-d16893e7812" class="topicref" data-id="concept_c45_chv_xv" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_c45_chv_xv-d16893e7812-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/CMUpgrade.html#concept_c45_chv_xv" id="concept_c45_chv_xv-d16893e7812-link">Upgrade an Installation with Cloudera Manager</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem" aria-expanded="true" class="active"><div data-tocid="concept_zll_vn5_zw-d16893e8032" class="topicref" data-id="concept_zll_vn5_zw" data-state="expanded"><span role="button" tabindex="0" aria-labelledby="button-collapse-action concept_zll_vn5_zw-d16893e8032-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_zll_vn5_zw" id="concept_zll_vn5_zw-d16893e8032-link">Post Upgrade Tasks</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div><ul role="group" class="navbar-nav nav-list"><li role="treeitem"><div data-tocid="concept_dzn_cdy_1cb-d16893e8056" class="topicref" data-id="concept_dzn_cdy_1cb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_dzn_cdy_1cb" id="concept_dzn_cdy_1cb-d16893e8056-link">Update <span class="ph">Control Hub</span> On-Premises</a><div class="wh-tooltip"><p class="shortdesc">By default, <span class="ph">StreamSets Control Hub</span>         on-premises can work with registered <span class="ph">Data Collector</span>s from         version 2.1.0.0 to the current version of <span class="ph">Control Hub</span>. If you         use <span class="ph">Control Hub</span>         on-premises and you upgrade registered <span class="ph">Data Collector</span>s to a         version higher than your current version of <span class="ph">Control Hub</span>, you         might need to modify the <span class="ph">Data Collector</span> version         range within your <span class="ph">Control Hub</span>         installation.</p></div></div></div></li><li role="treeitem"><div data-tocid="concept_wnl_zk4_5bb-d16893e8109" class="topicref" data-id="concept_wnl_zk4_5bb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_wnl_zk4_5bb" id="concept_wnl_zk4_5bb-d16893e8109-link">Update Pipelines using Legacy Stage Libraries</a></div></div></li><li role="treeitem"><div data-tocid="concept_s25_4ll_fcc-d16893e8131" class="topicref" data-id="concept_s25_4ll_fcc" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_s25_4ll_fcc" id="concept_s25_4ll_fcc-d16893e8131-link">Removed Databricks ML Evaluator processor</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_y5q_bml_fcc-d16893e8155" class="topicref" data-id="concept_y5q_bml_fcc" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_y5q_bml_fcc" id="concept_y5q_bml_fcc-d16893e8155-link">Install the Oracle JDBC driver for upgraded Oracle Multitable Consumer origins and Oracle destinations</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_fgg_ntp_xbc-d16893e8179" class="topicref" data-id="concept_fgg_ntp_xbc" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_fgg_ntp_xbc" id="concept_fgg_ntp_xbc-d16893e8179-link">Review Pipelines with Google BigQuery or Snowflake Destinations Writing JSON Data</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_s3n_htp_xbc-d16893e8203" class="topicref" data-id="concept_s3n_htp_xbc" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_s3n_htp_xbc" id="concept_s3n_htp_xbc-d16893e8203-link">Review Snowflake File Uploader Staging Details</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_us5_ctp_xbc-d16893e8227" class="topicref" data-id="concept_us5_ctp_xbc" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_us5_ctp_xbc" id="concept_us5_ctp_xbc-d16893e8227-link">Review Pipeline Notification Email Configurations</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_emc_vsp_xbc-d16893e8251" class="topicref" data-id="concept_emc_vsp_xbc" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_emc_vsp_xbc" id="concept_emc_vsp_xbc-d16893e8251-link">Review the Batch Wait Time for Directory Origins</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_cyx_y1d_1bc-d16893e8275" class="topicref" data-id="concept_cyx_y1d_1bc" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_cyx_y1d_1bc" id="concept_cyx_y1d_1bc-d16893e8275-link">Review the Oracle CDC Client Record Cache Size</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_lwn_dbd_1bc-d16893e8299" class="topicref" data-id="concept_lwn_dbd_1bc" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_lwn_dbd_1bc" id="concept_lwn_dbd_1bc-d16893e8299-link">Review Search Mode Behavior for Start Jobs Pipelines</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_es4_vvk_f1c-d16893e8324" class="topicref" data-id="concept_es4_vvk_f1c" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_es4_vvk_f1c" id="concept_es4_vvk_f1c-d16893e8324-link">Review the Maximum Batch Vault Size for Oracle CDC Origin Pipelines</a></div></div></li><li role="treeitem"><div data-tocid="concept_pm3_2wk_f1c-d16893e8346" class="topicref" data-id="concept_pm3_2wk_f1c" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_pm3_2wk_f1c" id="concept_pm3_2wk_f1c-d16893e8346-link">Review Amazon, Azure, Data Parser, JMS Consumer, and Pulsar Consumer Origin Pipelines</a></div></div></li><li role="treeitem"><div data-tocid="concept_fvp_nmw_xbc-d16893e8368" class="topicref" data-id="concept_fvp_nmw_xbc" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_fvp_nmw_xbc" id="concept_fvp_nmw_xbc-d16893e8368-link">Review JDBC Lookup Processor SQL Query Configuration</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_xgw_njm_nzb-d16893e8392" class="topicref" data-id="concept_xgw_njm_nzb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_xgw_njm_nzb" id="concept_xgw_njm_nzb-d16893e8392-link">Review Oracle Bulkload Origin Pipelines</a></div></div></li><li role="treeitem"><div data-tocid="concept_lkb_z5t_31c-d16893e8414" class="topicref" data-id="concept_lkb_z5t_31c" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_lkb_z5t_31c" id="concept_lkb_z5t_31c-d16893e8414-link">Update stages that were using Enterprise stage libraries</a></div></div></li><li role="treeitem"><div data-tocid="concept_m14_bqx_xyb-d16893e8436" class="topicref" data-id="concept_m14_bqx_xyb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_m14_bqx_xyb" id="concept_m14_bqx_xyb-d16893e8436-link">Grant Users View Access for the Oracle CDC Origin</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_adq_4qx_xyb-d16893e8460" class="topicref" data-id="concept_adq_4qx_xyb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_adq_4qx_xyb" id="concept_adq_4qx_xyb-d16893e8460-link">Review Amazon S3, Azure Blob Storage, and Azure Data Lake Storage Gen2 Origin Pipelines</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_okh_h2z_hbc-d16893e8484" class="topicref" data-id="concept_okh_h2z_hbc" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_okh_h2z_hbc" id="concept_okh_h2z_hbc-d16893e8484-link">Review the Batch Wait Time for <span class="ph">ALDS Gen1, </span>ALDS Gen2 (Legacy), Directory, and Hadoop FS Standalone Origins</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_mpq_h3m_2yb-d16893e8511" class="topicref" data-id="concept_mpq_h3m_2yb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_mpq_h3m_2yb" id="concept_mpq_h3m_2yb-d16893e8511-link">Review Amazon S3 and Databricks Delta Lake Stages</a></div></div></li><li role="treeitem"><div data-tocid="concept_nl1_gft_wxb-d16893e8533" class="topicref" data-id="concept_nl1_gft_wxb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_nl1_gft_wxb" id="concept_nl1_gft_wxb-d16893e8533-link">Install the Databricks Stage Library</a></div></div></li><li role="treeitem"><div data-tocid="concept_qqd_5ft_wxb-d16893e8555" class="topicref" data-id="concept_qqd_5ft_wxb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_qqd_5ft_wxb" id="concept_qqd_5ft_wxb-d16893e8555-link">Review Databricks Stages</a></div></div></li><li role="treeitem"><div data-tocid="concept_g1z_cgt_wxb-d16893e8578" class="topicref" data-id="concept_g1z_cgt_wxb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_g1z_cgt_wxb" id="concept_g1z_cgt_wxb-d16893e8578-link">Update the Databricks Delta Lake Connection</a></div></div></li><li role="treeitem"><div data-tocid="concept_wbv_vty_m1c-d16893e8600" class="topicref" data-id="concept_wbv_vty_m1c" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_wbv_vty_m1c" id="concept_wbv_vty_m1c-d16893e8600-link">Review Scripts in Jython Stages</a></div></div></li><li role="treeitem"><div data-tocid="concept_zw1_ngt_wxb-d16893e8622" class="topicref" data-id="concept_zw1_ngt_wxb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_zw1_ngt_wxb" id="concept_zw1_ngt_wxb-d16893e8622-link">Install the JDBC Oracle Stage Library</a></div></div></li><li role="treeitem"><div data-tocid="concept_nvx_kzc_yxb-d16893e8644" class="topicref" data-id="concept_nvx_kzc_yxb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_nvx_kzc_yxb" id="concept_nvx_kzc_yxb-d16893e8644-link">Grant Users View Access for the Oracle CDC Origin</a></div></div></li><li role="treeitem"><div data-tocid="concept_z4q_zgt_wxb-d16893e8666" class="topicref" data-id="concept_z4q_zgt_wxb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_z4q_zgt_wxb" id="concept_z4q_zgt_wxb-d16893e8666-link">Update Origins and Processors that Read Compressed Files</a></div></div></li><li role="treeitem"><div data-tocid="concept_ng4_b5g_gxb-d16893e8688" class="topicref" data-id="concept_ng4_b5g_gxb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_ng4_b5g_gxb" id="concept_ng4_b5g_gxb-d16893e8688-link">Install the Azure stage library </a></div></div></li><li role="treeitem"><div data-tocid="concept_nkt_k5g_gxb-d16893e8710" class="topicref" data-id="concept_nkt_k5g_gxb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_nkt_k5g_gxb" id="concept_nkt_k5g_gxb-d16893e8710-link">Review Salesforce pipelines</a></div></div></li><li role="treeitem"><div data-tocid="concept_l5r_chc_dxb-d16893e8732" class="topicref" data-id="concept_l5r_chc_dxb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_l5r_chc_dxb" id="concept_l5r_chc_dxb-d16893e8732-link">Review OPC UA Client Pipelines</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_mgz_4gp_qwb-d16893e8756" class="topicref" data-id="concept_mgz_4gp_qwb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_mgz_4gp_qwb" id="concept_mgz_4gp_qwb-d16893e8756-link">Install the Snowflake Stage Library to Use Snowflake</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_k3p_dkr_rvb-d16893e8780" class="topicref" data-id="concept_k3p_dkr_rvb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_k3p_dkr_rvb" id="concept_k3p_dkr_rvb-d16893e8780-link">Install the Google Cloud Stage Library to Use BigQuery</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_cdq_5kr_rvb-d16893e8804" class="topicref" data-id="concept_cdq_5kr_rvb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_cdq_5kr_rvb" id="concept_cdq_5kr_rvb-d16893e8804-link">Review JDBC Multitable Consumer Pipelines</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_g2p_rlr_rvb-d16893e8829" class="topicref" data-id="concept_g2p_rlr_rvb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_g2p_rlr_rvb" id="concept_g2p_rlr_rvb-d16893e8829-link">Review Missing Field Behavior for Field Replacer Processors</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_gm4_1qr_rvb-d16893e8853" class="topicref" data-id="concept_gm4_1qr_rvb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_gm4_1qr_rvb" id="concept_gm4_1qr_rvb-d16893e8853-link">Review runtime:loadResource Pipelines</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_bcb_blq_f1c-d16893e8877" class="topicref" data-id="concept_bcb_blq_f1c" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_bcb_blq_f1c" id="concept_bcb_blq_f1c-d16893e8877-link">Manage Underscores in Snowflake Connection Information</a></div></div></li><li role="treeitem"><div data-tocid="concept_hmq_ds1_1vb-d16893e8899" class="topicref" data-id="concept_hmq_ds1_1vb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_hmq_ds1_1vb" id="concept_hmq_ds1_1vb-d16893e8899-link">Review MySQL Binary Log Pipelines</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_yjm_ms1_1vb-d16893e8923" class="topicref" data-id="concept_yjm_ms1_1vb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_yjm_ms1_1vb" id="concept_yjm_ms1_1vb-d16893e8923-link">Review Blob and Clob Processing in Oracle CDC Client Pipelines</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_trf_yq1_45b-d16893e8947" class="topicref" data-id="concept_trf_yq1_45b" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_trf_yq1_45b" id="concept_trf_yq1_45b-d16893e8947-link">Review Error Handling for Snowflake CDC Pipelines </a></div></div></li><li role="treeitem"><div data-tocid="concept_b11_rtq_k5b-d16893e8969" class="topicref" data-id="concept_b11_rtq_k5b" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_b11_rtq_k5b" id="concept_b11_rtq_k5b-d16893e8969-link">Review SQL Server Pipelines with Unencrypted Connections</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_clc_1vv_j5b-d16893e8993" class="topicref" data-id="concept_clc_1vv_j5b" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_clc_1vv_j5b" id="concept_clc_1vv_j5b-d16893e8993-link">Review Dockerfiles for Custom Docker Images </a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_ufb_3yf_vtb-d16893e9017" class="topicref" data-id="concept_ufb_3yf_vtb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_ufb_3yf_vtb" id="concept_ufb_3yf_vtb-d16893e9017-link">Review Oracle CDC Client Local Buffer Pipelines</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_brw_yhh_ftb-d16893e9041" class="topicref" data-id="concept_brw_yhh_ftb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_brw_yhh_ftb" id="concept_brw_yhh_ftb-d16893e9041-link">Update Oracle CDC Client Origin User Accounts</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_dmp_zlt_jsb-d16893e9065" class="topicref" data-id="concept_dmp_zlt_jsb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_dmp_zlt_jsb" id="concept_dmp_zlt_jsb-d16893e9065-link">Review Couchbase Pipelines</a></div></div></li><li role="treeitem"><div data-tocid="concept_dzk_zjp_krb-d16893e9088" class="topicref" data-id="concept_dzk_zjp_krb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_dzk_zjp_krb" id="concept_dzk_zjp_krb-d16893e9088-link">Update Keystore Location</a></div></div></li><li role="treeitem"><div data-tocid="concept_zmv_jgj_krb-d16893e9110" class="topicref" data-id="concept_zmv_jgj_krb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_zmv_jgj_krb" id="concept_zmv_jgj_krb-d16893e9110-link">Review Tableau CRM Pipelines</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_jym_22h_rpb-d16893e9134" class="topicref" data-id="concept_jym_22h_rpb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_jym_22h_rpb" id="concept_jym_22h_rpb-d16893e9134-link">Resolve Kafka and MapR Streams Conflicts</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_lmd_dxh_rpb-d16893e9158" class="topicref" data-id="concept_lmd_dxh_rpb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_lmd_dxh_rpb" id="concept_lmd_dxh_rpb-d16893e9158-link">Review HTTP Client Processor Pipelines</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_bz1_15m_h4b-d16893e9182" class="topicref" data-id="concept_bz1_15m_h4b" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_bz1_15m_h4b" id="concept_bz1_15m_h4b-d16893e9182-link">Verify Elasticsearch Security</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_ijl_rzf_m4b-d16893e9206" class="topicref" data-id="concept_ijl_rzf_m4b" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_ijl_rzf_m4b" id="concept_ijl_rzf_m4b-d16893e9206-link">Adjust PostgreSQL CDC Pipelines or PostgreSQL Configuration</a></div></div></li><li role="treeitem"><div data-tocid="concept_ilj_5cl_j4b-d16893e9228" class="topicref" data-id="concept_ilj_5cl_j4b" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_ilj_5cl_j4b" id="concept_ilj_5cl_j4b-d16893e9228-link">Review Processing of MySQL Data (JDBC Processors)</a></div></div></li><li role="treeitem"><div data-tocid="concept_nzs_nsz_vnb-d16893e9250" class="topicref" data-id="concept_nzs_nsz_vnb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_nzs_nsz_vnb" id="concept_nzs_nsz_vnb-d16893e9250-link">Review Google Pub/Sub Producer Pipelines</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_vdt_n5h_wnb-d16893e9274" class="topicref" data-id="concept_vdt_n5h_wnb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_vdt_n5h_wnb" id="concept_vdt_n5h_wnb-d16893e9274-link">Review JDBC Multitable Consumer Pipelines</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_k53_vhk_cgb-d16893e9298" class="topicref" data-id="concept_k53_vhk_cgb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_k53_vhk_cgb" id="concept_k53_vhk_cgb-d16893e9298-link">Update Oracle CDC Client Pipelines </a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_vs5_2tz_lnb-d16893e9322" class="topicref" data-id="concept_vs5_2tz_lnb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_vs5_2tz_lnb" id="concept_vs5_2tz_lnb-d16893e9322-link">Update Cluster EMR Batch Pipelines</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_q1s_vcg_kmb-d16893e9347" class="topicref" data-id="concept_q1s_vcg_kmb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_q1s_vcg_kmb" id="concept_q1s_vcg_kmb-d16893e9347-link">Review Processing of MySQL Data (JDBC Origins)</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_xb1_zvx_gmb-d16893e9371" class="topicref" data-id="concept_xb1_zvx_gmb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_xb1_zvx_gmb" id="concept_xb1_zvx_gmb-d16893e9371-link">Update Elasticsearch Security Properties (Optional)</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_hkf_ylq_xhb-d16893e9395" class="topicref" data-id="concept_hkf_ylq_xhb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_hkf_ylq_xhb" id="concept_hkf_ylq_xhb-d16893e9395-link">Update Syslog Pipelines</a></div></div></li><li role="treeitem"><div data-tocid="concept_azm_ncy_vhb-d16893e9417" class="topicref" data-id="concept_azm_ncy_vhb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_azm_ncy_vhb" id="concept_azm_ncy_vhb-d16893e9417-link">JDBC Tee and JDBC Producer Cache Change</a><div class="wh-tooltip"><p class="shortdesc">Starting with version 3.9.0, the JDBC Tee processor and the JDBC Producer destination         no longer cache prepared statements when performing single-row operations. As a result, the         Max Cache Size Per Batch property has been removed from both stages.</p></div></div></div></li><li role="treeitem"><div data-tocid="concept_d2k_ssk_1hb-d16893e9442" class="topicref" data-id="concept_d2k_ssk_1hb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_d2k_ssk_1hb" id="concept_d2k_ssk_1hb-d16893e9442-link">Pipeline Export</a><div class="wh-tooltip"><p class="shortdesc">Starting with version 3.8.0, <span class="ph">Data Collector</span> has         changed the behavior of the pipeline <span class="ph uicontrol">Export</span> option. <span class="ph">Data Collector</span> now         strips all plain text credentials from exported pipelines. Previously, <span class="ph">Data Collector</span>         included plain text credentials in exported pipelines.</p></div></div></div></li><li role="treeitem"><div data-tocid="concept_k1s_j4s_rgb-d16893e9479" class="topicref" data-id="concept_k1s_j4s_rgb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_k1s_j4s_rgb" id="concept_k1s_j4s_rgb-d16893e9479-link">Update TCP Server Pipelines</a><div class="wh-tooltip"><p class="shortdesc">Starting with version 3.7.2, the TCP Server origin has changed the valid values for         the Read Timeout property. The property now allows a minimum of 1 second and a maximum of         3,600 seconds.</p></div></div></div></li><li role="treeitem"><div data-tocid="concept_clv_tzk_cgb-d16893e9504" class="topicref" data-id="concept_clv_tzk_cgb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_clv_tzk_cgb" id="concept_clv_tzk_cgb-d16893e9504-link">Update Cluster Pipelines</a><div class="wh-tooltip"><p class="shortdesc">Starting with version 3.7.0, <span class="ph">Data Collector</span> now         requires that the Java temporary directory on the gateway node in the cluster is         writable.</p></div></div></div></li><li role="treeitem"><div data-tocid="concept_lxn_s5h_gnb-d16893e9532" class="topicref" data-id="concept_lxn_s5h_gnb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_lxn_s5h_gnb" id="concept_lxn_s5h_gnb-d16893e9532-link">Update Kafka Consumer or Kafka Multitopic Consumer Pipelines</a></div></div></li><li role="treeitem"><div data-tocid="task_tzp_2dd_vhb-d16893e9554" class="topicref" data-id="task_tzp_2dd_vhb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#task_tzp_2dd_vhb" id="task_tzp_2dd_vhb-d16893e9554-link">Update JDBC Pipelines</a><div class="wh-tooltip"><p class="shortdesc">Starting with version 3.5.0, <span class="ph">Data Collector</span>         requires the maximum lifetime for a connection to be at least 30 minutes in stages that use         a JDBC connection. <span class="ph">Data Collector</span> does         not validate stages with lower non-zero values configured. </p></div></div></div></li><li role="treeitem"><div data-tocid="concept_b34_vly_cfb-d16893e9585" class="topicref" data-id="concept_b34_vly_cfb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_b34_vly_cfb" id="concept_b34_vly_cfb-d16893e9585-link">Update Spark Executor with Databricks Pipelines</a><div class="wh-tooltip"><p class="shortdesc">Starting with version 3.5.0, <span class="ph">Data Collector</span>         introduces a new Databricks Job Launcher executor and has removed the ability to use the         Spark executor with Databricks.</p></div></div></div></li><li role="treeitem"><div data-tocid="concept_jzf_b3b_xdb-d16893e9613" class="topicref" data-id="concept_jzf_b3b_xdb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_jzf_b3b_xdb" id="concept_jzf_b3b_xdb-d16893e9613-link">Update Pipelines to Use Spark 2.1 or Later</a></div></div></li><li role="treeitem"><div data-tocid="concept_hxf_3yd_qcb-d16893e9636" class="topicref" data-id="concept_hxf_3yd_qcb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_hxf_3yd_qcb" id="concept_hxf_3yd_qcb-d16893e9636-link">Update Value Replacer Pipelines</a><div class="wh-tooltip"><p class="shortdesc">Starting with version 3.1.0.0, <span class="ph">Data Collector</span>         introduces a new Field Replacer processor and has deprecated the Value Replacer processor. </p></div></div></div></li><li role="treeitem"><div data-tocid="concept_dzq_djt_vcb-d16893e9664" class="topicref" data-id="concept_dzq_djt_vcb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_dzq_djt_vcb" id="concept_dzq_djt_vcb-d16893e9664-link">Update Tableau CRM Pipelines</a><div class="wh-tooltip"><p class="shortdesc">Starting with version 3.1.0.0, the Tableau CRM destination, previously known as the         Einstein Analytics destination, introduces a new append operation that lets you combine data         into a single dataset. Configuring the destination to use dataflows to combine data into a         single dataset has been deprecated.</p></div></div></div></li><li role="treeitem"><div data-tocid="concept_wnp_scs_wbb-d16893e9689" class="topicref" data-id="concept_wnp_scs_wbb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_wnp_scs_wbb" id="concept_wnp_scs_wbb-d16893e9689-link">Disable Cloudera Navigator Integration</a></div></div></li><li role="treeitem"><div data-tocid="concept_hky_ljl_wbb-d16893e9711" class="topicref" data-id="concept_hky_ljl_wbb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_hky_ljl_wbb" id="concept_hky_ljl_wbb-d16893e9711-link">JDBC Multitable Consumer Query Interval Change</a></div></div></li><li role="treeitem"><div data-tocid="concept_ys3_bjl_wbb-d16893e9733" class="topicref" data-id="concept_ys3_bjl_wbb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_ys3_bjl_wbb" id="concept_ys3_bjl_wbb-d16893e9733-link">Update JDBC Query Consumer Pipelines used for SQL Server CDC Data</a></div></div></li><li role="treeitem"><div data-tocid="concept_ncs_5jl_wbb-d16893e9755" class="topicref" data-id="concept_ncs_5jl_wbb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_ncs_5jl_wbb" id="concept_ncs_5jl_wbb-d16893e9755-link">Update MongoDB Destination Upsert Pipelines</a></div></div></li><li role="treeitem"><div data-tocid="concept_o1s_gkl_wbb-d16893e9777" class="topicref" data-id="concept_o1s_gkl_wbb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_o1s_gkl_wbb" id="concept_o1s_gkl_wbb-d16893e9777-link">Time Zones in Stages</a></div></div></li><li role="treeitem"><div data-tocid="concept_epj_gqd_rx-d16893e9799" class="topicref" data-id="concept_epj_gqd_rx" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_epj_gqd_rx" id="concept_epj_gqd_rx-d16893e9799-link">Update Kudu Pipelines</a></div></div></li><li role="treeitem"><div data-tocid="concept_hhc_15s_dbb-d16893e9821" class="topicref" data-id="concept_hhc_15s_dbb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_hhc_15s_dbb" id="concept_hhc_15s_dbb-d16893e9821-link">Update JDBC Multitable Consumer Pipelines</a></div></div></li><li role="treeitem"><div data-tocid="concept_yyv_v45_zw-d16893e9843" class="topicref" data-id="concept_yyv_v45_zw" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_yyv_v45_zw" id="concept_yyv_v45_zw-d16893e9843-link">Update Vault Pipelines</a></div></div></li><li role="treeitem"><div data-tocid="concept_cmh_ryd_pz-d16893e9865" class="topicref" data-id="concept_cmh_ryd_pz" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_cmh_ryd_pz" id="concept_cmh_ryd_pz-d16893e9865-link">Configure JDBC Producer Schema Names</a></div></div></li><li role="treeitem"><div data-tocid="concept_gk3_s5l_nz-d16893e9888" class="topicref" data-id="concept_gk3_s5l_nz" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_gk3_s5l_nz" id="concept_gk3_s5l_nz-d16893e9888-link">Evaluate Precondition Error Handling</a></div></div></li><li role="treeitem"><div data-tocid="concept_czx_bbn_gz-d16893e9910" class="topicref" data-id="concept_czx_bbn_gz" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_czx_bbn_gz" id="concept_czx_bbn_gz-d16893e9910-link">Authentication for Docker Image</a></div></div></li><li role="treeitem"><div data-tocid="concept_zbn_fpw_xy-d16893e9932" class="topicref" data-id="concept_zbn_fpw_xy" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#concept_zbn_fpw_xy" id="concept_zbn_fpw_xy-d16893e9932-link">Configure Pipeline Permissions</a></div></div></li><li role="treeitem"><div data-tocid="task_afy_k12_ry-d16893e9954" class="topicref" data-id="task_afy_k12_ry" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/PostUpgrade.html#task_afy_k12_ry" id="task_afy_k12_ry-d16893e9954-link">Update Elasticsearch Pipelines</a><div class="wh-tooltip"><p class="shortdesc"><span class="ph">Data Collector</span>         version 2.3.0.0 includes an enhanced Elasticsearch destination that uses the Elasticsearch         HTTP API. To upgrade pipelines that use the Elasticsearch destination from <span class="ph">Data Collector</span>         versions earlier than 2.3.0.0, you must review the value of the Default Operation         property.</p></div></div></div></li></ul></li><li role="treeitem" aria-expanded="false"><div data-tocid="task_ijh_wtw_xy-d16893e9984" class="topicref" data-id="task_ijh_wtw_xy" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action task_ijh_wtw_xy-d16893e9984-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/Upgrade-ExternalSystems.html#task_ijh_wtw_xy" id="task_ijh_wtw_xy-d16893e9984-link">Working with Upgraded External Systems</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_dgz_p45_gy-d16893e10080" class="topicref" data-id="concept_dgz_p45_gy" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/UpgradeTroubleshooting.html#concept_dgz_p45_gy" id="concept_dgz_p45_gy-d16893e10080-link">Troubleshooting an Upgrade</a></div></div></li></ul></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_qsw_cjy_bt-d16893e10103" class="topicref" data-id="concept_qsw_cjy_bt" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_qsw_cjy_bt-d16893e10103-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Pipeline_Design/PipelineDesign_title.html" id="concept_qsw_cjy_bt-d16893e10103-link">Pipeline Concepts and Design</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_qn1_wn4_kq-d16893e11199" class="topicref" data-id="concept_qn1_wn4_kq" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_qn1_wn4_kq-d16893e11199-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Pipeline_Configuration/PipelineConfiguration_title.html" id="concept_qn1_wn4_kq-d16893e11199-link">Pipeline Configuration</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_hdr_gyw_41b-d16893e13057" class="topicref" data-id="concept_hdr_gyw_41b" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_hdr_gyw_41b-d16893e13057-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Data_Formats/DataFormats-Title.html" id="concept_hdr_gyw_41b-d16893e13057-link">Data Formats</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_yjl_nc5_jq-d16893e14164" class="topicref" data-id="concept_yjl_nc5_jq" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_yjl_nc5_jq-d16893e14164-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Origins/Origins_title.html" id="concept_yjl_nc5_jq-d16893e14164-link">Origins</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_yjl_nc5_jq-d16893e35197" class="topicref" data-id="concept_yjl_nc5_jq" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_yjl_nc5_jq-d16893e35197-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Processors/Processors_title.html" id="concept_yjl_nc5_jq-d16893e35197-link">Processors</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_agj_cfj_br-d16893e44037" class="topicref" data-id="concept_agj_cfj_br" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_agj_cfj_br-d16893e44037-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Destinations/Destinations-title.html" id="concept_agj_cfj_br-d16893e44037-link">Destinations</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_umc_1lk_fx-d16893e56072" class="topicref" data-id="concept_umc_1lk_fx" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_umc_1lk_fx-d16893e56072-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Executors/Executors-title.html" id="concept_umc_1lk_fx-d16893e56072-link">Executors</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_xxd_f5r_kx-d16893e59696" class="topicref" data-id="concept_xxd_f5r_kx" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_xxd_f5r_kx-d16893e59696-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Event_Handling/EventFramework-Title.html#concept_xxd_f5r_kx" id="concept_xxd_f5r_kx-d16893e59696-link">Dataflow Triggers</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_zq5_pb4_flb-d16893e60134" class="topicref" data-id="concept_zq5_pb4_flb" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_zq5_pb4_flb-d16893e60134-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Solutions/Solutions-title.html" id="concept_zq5_pb4_flb-d16893e60134-link">Solutions</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_ugp_kwf_xw-d16893e61337" class="topicref" data-id="concept_ugp_kwf_xw" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_ugp_kwf_xw-d16893e61337-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/DPM/DPM_title.html" id="concept_ugp_kwf_xw-d16893e61337-link">StreamSets Control Hub</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_fyf_gkq_4bb-d16893e62693" class="topicref" data-id="concept_fyf_gkq_4bb" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_fyf_gkq_4bb-d16893e62693-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Edge_Mode/EdgePipelines_title.html" id="concept_fyf_gkq_4bb-d16893e62693-link"><span class="ph">StreamSets Data Collector Edge</span></a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_wwq_gxc_py-d16893e63980" class="topicref" data-id="concept_wwq_gxc_py" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_wwq_gxc_py-d16893e63980-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Multithreaded_Pipelines/MultithreadedPipelines.html#concept_wwq_gxc_py" id="concept_wwq_gxc_py-d16893e63980-link">Multithreaded Pipelines</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_gzw_tdm_p2b-d16893e64187" class="topicref" data-id="concept_gzw_tdm_p2b" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_gzw_tdm_p2b-d16893e64187-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Microservice/Microservice_Title.html#concept_gzw_tdm_p2b" id="concept_gzw_tdm_p2b-d16893e64187-link">Microservice Pipelines</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="Orchestrators_Title-d16893e64348" class="topicref" data-id="Orchestrators_Title" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action Orchestrators_Title-d16893e64348-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Orchestration_Pipelines/OrchestrationPipelines_Title.html#Orchestrators_Title" id="Orchestrators_Title-d16893e64348-link">Orchestration Pipelines</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_wr1_ktz_bt-d16893e64489" class="topicref" data-id="concept_wr1_ktz_bt" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_wr1_ktz_bt-d16893e64489-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/RPC_Pipelines/SDC_RPCpipelines_title.html#concept_wr1_ktz_bt" id="concept_wr1_ktz_bt-d16893e64489-link">SDC RPC Pipelines</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_fpz_5r4_vs-d16893e64679" class="topicref" data-id="concept_fpz_5r4_vs" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_fpz_5r4_vs-d16893e64679-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Cluster_Mode/ClusterPipelines_title.html" id="concept_fpz_5r4_vs-d16893e64679-link">Cluster Pipelines</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_jjk_23z_sq-d16893e65172" class="topicref" data-id="concept_jjk_23z_sq" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_jjk_23z_sq-d16893e65172-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Data_Preview/DataPreview_Title.html#concept_jjk_23z_sq" id="concept_jjk_23z_sq-d16893e65172-link">Data Preview</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_pgk_brx_rr-d16893e65458" class="topicref" data-id="concept_pgk_brx_rr" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_pgk_brx_rr-d16893e65458-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Alerts/RulesAlerts_title.html#concept_pgk_brx_rr" id="concept_pgk_brx_rr-d16893e65458-link">Rules and Alerts</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_asx_fdz_sq-d16893e65960" class="topicref" data-id="concept_asx_fdz_sq" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_asx_fdz_sq-d16893e65960-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Pipeline_Monitoring/PipelineMonitoring_title.html#concept_asx_fdz_sq" id="concept_asx_fdz_sq-d16893e65960-link">Pipeline Monitoring</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_o3l_dtr_5q-d16893e66304" class="topicref" data-id="concept_o3l_dtr_5q" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_o3l_dtr_5q-d16893e66304-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Pipeline_Maintenance/PipelineMaintenance_title.html#concept_o3l_dtr_5q" id="concept_o3l_dtr_5q-d16893e66304-link">Pipeline Maintenance</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_yms_ftm_sq-d16893e66768" class="topicref" data-id="concept_yms_ftm_sq" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_yms_ftm_sq-d16893e66768-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Administration/Administration_title.html#concept_yms_ftm_sq" id="concept_yms_ftm_sq-d16893e66768-link">Administration</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_nls_w1r_ks-d16893e67508" class="topicref" data-id="concept_nls_w1r_ks" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_nls_w1r_ks-d16893e67508-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Tutorial/Tutorial-title.html" id="concept_nls_w1r_ks-d16893e67508-link">Tutorial</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_sh3_frm_tq-d16893e68001" class="topicref" data-id="concept_sh3_frm_tq" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_sh3_frm_tq-d16893e68001-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Troubleshooting/Troubleshooting_title.html#concept_sh3_frm_tq" id="concept_sh3_frm_tq-d16893e68001-link">Troubleshooting</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_xbx_rs1_tq-d16893e68798" class="topicref" data-id="concept_xbx_rs1_tq" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_xbx_rs1_tq-d16893e68798-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Glossary/Glossary_title.html#concept_xbx_rs1_tq" id="concept_xbx_rs1_tq-d16893e68798-link">Glossary</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_jn1_nzb_kv-d16893e68843" class="topicref" data-id="concept_jn1_nzb_kv" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_jn1_nzb_kv-d16893e68843-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Apx-DataFormats/DataFormat_Title.html#concept_jn1_nzb_kv" id="concept_jn1_nzb_kv-d16893e68843-link">Data Formats by Stage</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_pvm_yt3_wq-d16893e68958" class="topicref" data-id="concept_pvm_yt3_wq" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_pvm_yt3_wq-d16893e68958-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Expression_Language/ExpressionLanguage_title.html" id="concept_pvm_yt3_wq-d16893e68958-link">Expression Language</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_vcj_1ws_js-d16893e69669" class="topicref" data-id="concept_vcj_1ws_js" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_vcj_1ws_js-d16893e69669-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Apx-RegEx/RegEx-Title.html#concept_vcj_1ws_js" id="concept_vcj_1ws_js-d16893e69669-link">Regular Expressions</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_chv_vmj_wr-d16893e69787" class="topicref" data-id="concept_chv_vmj_wr" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_chv_vmj_wr-d16893e69787-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Apx-GrokPatterns/GrokPatterns_title.html#concept_chv_vmj_wr" id="concept_chv_vmj_wr-d16893e69787-link">Grok Patterns</a></div></div></li></ul></div>
                        

</div>
</nav>
                    


                    
                    <div id="wh_topic_body" class="col-lg-7 col-md-9 col-sm-12">
<button id="wh_close_publication_toc_button" class="close-toc-button d-none" aria-label="Toggle publishing table of content" aria-controls="wh_publication_toc" aria-expanded="true"><span class="close-toc-icon-container"><span class="close-toc-icon"></span></span></button><button id="wh_close_topic_toc_button" class="close-toc-button d-none" aria-label="Toggle topic table of content" aria-controls="wh_topic_toc" aria-expanded="true"><span class="close-toc-icon-container"><span class="close-toc-icon"></span></span></button>

                        
<div class=" wh_topic_content body "><main role="main"><article class="" role="article" aria-labelledby="ariaid-title1"><article class="nested0" aria-labelledby="ariaid-title1" id="concept_zll_vn5_zw">
    <h1 class="- topic/title title topictitle1" id="ariaid-title1">Post Upgrade Tasks</h1>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        
        
    </div>
<article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title2" id="concept_dzn_cdy_1cb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title2">Update <span class="- topic/ph ph">Control Hub</span> On-Premises</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc">By default, <span class="- topic/ph ph">StreamSets Control Hub</span>
        on-premises can work with registered <span class="- topic/ph ph">Data Collector</span>s from
        version 2.1.0.0 to the current version of <span class="- topic/ph ph">Control Hub</span>. If you
        use <span class="- topic/ph ph">Control Hub</span>
        on-premises and you upgrade registered <span class="- topic/ph ph">Data Collector</span>s to a
        version higher than your current version of <span class="- topic/ph ph">Control Hub</span>, you
        might need to modify the <span class="- topic/ph ph">Data Collector</span> version
        range within your <span class="- topic/ph ph">Control Hub</span>
        installation.</p>
        <p class="- topic/p p">For example, if you use <span class="- topic/ph ph">Control Hub</span>
            on-premises version 3.8.0 and you upgrade registered <span class="- topic/ph ph">Data Collector</span>s to
            version <span class="- topic/ph ph">5.11.0</span>, you must update the maximum <span class="- topic/ph ph">Data Collector</span>
            version that can work with <span class="- topic/ph ph">Control Hub</span>. As
            a best practice, configure the maximum <span class="- topic/ph ph">Data Collector</span>
            version to <span class="- topic/ph ph">5.99.999</span> to ensure that <span class="- topic/ph ph">Data Collector</span>
            upgrades to later minor versions, such as <span class="- topic/ph ph">5.12.0</span> or <span class="- topic/ph ph">5.13.0</span>, will continue to work with <span class="- topic/ph ph">Control Hub</span>.</p>
        <div class="- topic/note note note note_note"><span class="note__title">Note:</span> If you register <span class="- topic/ph ph">Data Collector</span>
            version 3.19.x or later with <span class="- topic/ph ph">Control Hub</span>
            on-premises version 3.18.x or earlier, then some stages in the <span class="- topic/ph ph">Control Hub</span>
            <span class="- topic/ph ph">Pipeline Designer</span> display a Connection property that is not supported. Do not change the property from
            the default value of None. If you select Choose Value or use a parameter to define the
            property, <span class="- topic/ph ph">Pipeline Designer</span> hides the remaining connection properties and the pipeline fails to run.</div>
        <p class="- topic/p p">To modify the <span class="- topic/ph ph">Data Collector</span>
            version range:</p>
        <div class="- topic/p p">
            <ol class="- topic/ol ol" id="concept_dzn_cdy_1cb__ol_xbl_f2y_1cb" data-ofbid="concept_dzn_cdy_1cb__ol_xbl_f2y_1cb">
                <li class="- topic/li li">Log in to <span class="- topic/ph ph">Control Hub</span> as the default system administrator - the admin@admin user account.</li>
                <li class="- topic/li li">In the Navigation panel, click <span class="+ topic/ph ui-d/menucascade ph menucascade"><span class="+ topic/ph ui-d/uicontrol ph uicontrol">Administration</span><abbr title="and then"> &gt; </abbr><span class="+ topic/ph ui-d/uicontrol ph uicontrol">Data Collectors</span></span>.</li>
                <li class="- topic/li li">Click the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Component Version Range</span> icon: <img class="- topic/image image" id="concept_dzn_cdy_1cb__image_hd3_dfy_1cb" src="../Graphics/icon_OrganizationConfig.png" height="16" width="14"/>.</li>
                <li class="- topic/li li">Enter the maximum <span class="- topic/ph ph">Data Collector</span> version that can work with <span class="- topic/ph ph">Control Hub</span>, such as <span class="- topic/ph ph">5.99.999</span>.</li>
            </ol>
        </div>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title3" id="concept_wnl_zk4_5bb">
 <h2 class="- topic/title title topictitle2" id="ariaid-title3">Update Pipelines using Legacy Stage Libraries</h2>
 <div class="- topic/body concept/conbody body conbody">
        <div class="- topic/p p">When you upgrade, review the complete list of <a class="- topic/xref xref" href="../Installation/AddtionalStageLibs.html#concept_fw3_zt3_tbb">legacy stage
                libraries</a>. If your upgraded pipelines use these legacy stage libraries, the
            pipelines will not run until you perform one of the following tasks:<dl class="- topic/dl dl">
                
                    <dt class="- topic/dt dt dlterm">Use a current stage library</dt>
                    <dd class="- topic/dd dd">We strongly recommend that you upgrade your system and use a current stage
                        library in the pipeline:<ol class="- topic/ol ol" id="concept_wnl_zk4_5bb__ol_fct_hl4_5bb" data-ofbid="concept_wnl_zk4_5bb__ol_fct_hl4_5bb">
                            <li class="- topic/li li">Upgrade the system to a more current version.</li>
                            <li class="- topic/li li"><a class="- topic/xref xref" href="../Installation/AddtionalStageLibs.html#concept_fb2_qmn_bz">Install the stage library</a> for the upgraded system. </li>
                            <li class="- topic/li li">In the pipeline, edit the stage and select the appropriate stage
                                library. </li>
                        </ol></dd>
                
                
                    <dt class="- topic/dt dt dlterm">Install the legacy stage library</dt>
                    <dd class="- topic/dd dd">Though not recommended, you can install the older stage libraries. For more
                        information, see <a class="- topic/xref xref" href="../Installation/AddtionalStageLibs.html#concept_fw3_zt3_tbb">Legacy
                            Stage Libraries</a>.</dd>
                
            </dl></div>
 </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title4" id="concept_s25_4ll_fcc">
    <h2 class="- topic/title title topictitle2" id="ariaid-title4">Removed Databricks ML Evaluator processor</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">The Databricks ML Evaluator processor, which was deprecated in an earlier version, has
            been removed from Data Collector with version 5.12.0. As an alternative, you can use
            StreamSets Transformer. For more information, see the <a class="- topic/xref xref" href="https://streamsets.com/documentation/transformer/latest/help/index.html?contextID=concept_a1b_zf4_pgb" target="_blank" rel="external noopener">Transformer documentation</a>.</p>
        <p class="- topic/p p">After upgrading to version 5.12.0 or later, verify that no pipelines are using the
            Databricks ML Evaluator processor.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title5" id="concept_y5q_bml_fcc">
    <h2 class="- topic/title title topictitle2" id="ariaid-title5">Install the Oracle JDBC driver for upgraded Oracle Multitable Consumer origins and Oracle
        destinations</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 5.12.0, the Oracle JDBC driver is no longer included in the
                <code class="+ topic/ph pr-d/codeph ph codeph">streamsets-datacollector-jdbc-branded-oracle-lib</code> stage library. You
            must manually install the driver into the stage library before using an Oracle
            Multitable Consumer origin or Oracle destination.</p>
        <p class="- topic/p p">After upgrading to version 5.12.0 or later, if you have upgraded pipelines using an <a class="- topic/xref xref" href="../Origins/OracleMultitableConsumer.html#concept_sj3_qrl_fcc">Oracle Multitable Consumer
                origin</a> or <a class="- topic/xref xref" href="../Destinations/Oracle.html#concept_msy_vsl_fcc">Oracle
                destination</a>, install the Oracle JDBC driver as an <a class="- topic/xref xref" href="../Configuration/ExternalLibs.html#concept_pdv_qlw_ft">external library</a> for the
                <code class="+ topic/ph pr-d/codeph ph codeph">streamsets-datacollector-jdbc-branded-oracle-lib</code> stage library.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title6" id="concept_fgg_ntp_xbc">
    <h2 class="- topic/title title topictitle2" id="ariaid-title6">Review Pipelines with Google BigQuery or Snowflake Destinations Writing JSON Data</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 5.11.0, you cannot configure characters to represent null values or
            newline characters for Google BigQuery or Snowflake destinations when writing JSON data.
            Upgraded destinations do not change null values or newline characters.</p>
        <p class="- topic/p p">After upgrading to version 5.11.0 or later, review pipelines using Google BigQuery or
            Snowflake destinations writing JSON data to ensure the destination does not receive any
            null values or newline characters from the pipeline that should not be passed to the
            external system.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title7" id="concept_s3n_htp_xbc">
    <h2 class="- topic/title title topictitle2" id="ariaid-title7">Review Snowflake File Uploader Staging Details</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 5.11.0, the Database and Schema properties have been removed from
            the Snowflake File Uploader destination. In Data Collector 5.10.0, if the Stage Database
            and Stage Schema properties were not configured, the destination used the database and
            schema values configured for the destination or Control Hub connection instead.</p>
        <p class="- topic/p p">When upgrading from version 5.10.0 to 5.11.0 or later, Snowflake File Uploader
            destinations that do not use a Control Hub connection and do not have values configured
            for the Stage Database or Stage Schema properties are assigned a staging database value
            equal to the configured database value and a staging schema value equal to the
            configured schema value. Snowflake File Uploader destinations that use a Control Hub
            connection and do not have values configured for the Stage Database or Stage Schema
            properties are not assigned any values for these properties and must have them
            configured after upgrading.</p>
        <p class="- topic/p p">After upgrading to version 5.11.0 or later from version 5.10.0, review Snowflake File
            Uploader destinations that use Control Hub connections and make sure the Stage Database
            and Stage Schema properties are configured.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title8" id="concept_us5_ctp_xbc">
    <h2 class="- topic/title title topictitle2" id="ariaid-title8">Review Pipeline Notification Email Configurations</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 5.11.0, the format of pipeline notification emails has changed to
            include a new error code format.</p>
        <p class="- topic/p p">After upgrading to version 5.11.0 or later, review notification email configurations for
            upgraded pipelines to ensure they behave as expected.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title9" id="concept_emc_vsp_xbc">
    <h2 class="- topic/title title topictitle2" id="ariaid-title9">Review the Batch Wait Time for Directory Origins</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 5.11.0, the origin correctly interprets the batch wait time value
            as seconds. In earlier releases, the origin incorrectly interpreted the value as
            milliseconds.</p>
        <p class="- topic/p p">After upgrading to version 5.11.0 or later, review Directory origins to ensure they are
            configured with an appropriate batch wait time.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title10" id="concept_cyx_y1d_1bc">
    <h2 class="- topic/title title topictitle2" id="ariaid-title10">Review the Oracle CDC Client Record Cache Size</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 5.10.0, you can configure the maximum size of the record cache for
            an Oracle CDC Client origin using the Records Cache Size property. Upgraded pipelines
            are given the default value of -2, which represents two times the batch size.</p>
        <p class="- topic/p p">After upgrading to Data Collector 5.10.0 or later, verify that Oracle CDC Client origins
            are configured with the appropriate record cache size.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title11" id="concept_lwn_dbd_1bc">
    <h2 class="- topic/title title topictitle2" id="ariaid-title11">Review Search Mode Behavior for Start Jobs Pipelines</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 5.10.0, Start Jobs stages have updated search mode options.
            Pipelines upgraded from version 5.2.x or earlier that were configured with the contain
            search mode option are updated to use the new contains unique search mode option.</p>
        <p class="- topic/p p">After upgrading to Data Collector 5.10.0 or later from Data Collector 5.2.x or earlier,
            verify that Start Jobs pipelines are using the appropriate search mode.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title12" id="concept_es4_vvk_f1c">
    <h2 class="- topic/title title topictitle2" id="ariaid-title12">Review the Maximum Batch Vault Size for Oracle CDC Origin Pipelines</h2>
    <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p">Starting with version 5.9.0, the Oracle CDC origin has a Max Batch Vault Size property
            that allows you to configure the maximum number of batches the origin pre-generates
            while the pipeline is processing other batches. In upgraded pipelines, the origin uses
            the default maximum batch vault size of 64.</p>
        <p class="- topic/p p">After you upgrade to version 5.9.0 or later, review Oracle CDC origin pipelines. If the
            maximum batch vault size is not appropriate, update the pipelines accordingly.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title13" id="concept_pm3_2wk_f1c">
    <h2 class="- topic/title title topictitle2" id="ariaid-title13">Review Amazon, Azure, Data Parser, JMS Consumer, and Pulsar Consumer Origin
        Pipelines</h2>
    <div class="- topic/body concept/conbody body conbody">
        <div class="- topic/p p">Starting with version 5.9.0, the following origins no longer read tables that contain
            multiple columns with the same name:<ul class="- topic/ul ul" id="concept_pm3_2wk_f1c__ul_asp_bx2_f1c" data-ofbid="concept_pm3_2wk_f1c__ul_asp_bx2_f1c">
                <li class="- topic/li li">
                    <p class="- topic/p p">Amazon S3</p>
                </li>
                <li class="- topic/li li">
                    <p class="- topic/p p">Amazon SQS Consumer</p>
                </li>
                <li class="- topic/li li">
                    <p class="- topic/p p">Azure Blob Storage</p>
                </li>
                <li class="- topic/li li">
                    <p class="- topic/p p">Azure Data Lake Storage Gen2</p>
                </li>
                <li class="- topic/li li">
                    <p class="- topic/p p">Data Parser</p>
                </li>
                <li class="- topic/li li">
                    <p class="- topic/p p">JMS Consumer</p>
                </li>
                <li class="- topic/li li">
                    <p class="- topic/p p">Pulsar Consumer</p>
                </li>
                <li class="- topic/li li">
                    <p class="- topic/p p">Pulsar Consumer (Legacy)</p>
                </li>
            </ul></div><p class="- topic/p p">When configured to read tables that contain duplicate column names,
                            the origin treats the tables as invalid and generates an
                            error.</p><p class="- topic/p p">After you upgrade to version 5.9.0 or later, review pipelines that use these origins. If any
            pipelines require the ability to read tables containing multiple columns with the same
            name, configure the origins to ignore column headers.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title14" id="concept_fvp_nmw_xbc">
    <h2 class="- topic/title title topictitle2" id="ariaid-title14">Review JDBC Lookup Processor SQL Query Configuration</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 5.9.0, stability and performance improvements to the JDBC Lookup
            processor cause the processor to strictly enforce the requirement of a WHERE clause in
            SQL queries.</p>
        <p class="- topic/p p">After upgrading to version 5.9.0 or later, verify that the SQL Query property for each
            JDBC Lookup processor is configured with a WHERE clause.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title15" id="concept_xgw_njm_nzb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title15">Review Oracle Bulkload Origin Pipelines</h2>
    <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p">Starting with version 5.8.0, pipelines using the Oracle Bulkload origin no longer fail
            when the origin encounters an empty table. This change might cause Oracle Bulkload
            pipelines created with earlier versions of Data Collector to behave in unexpected
            ways.</p>
        <p class="- topic/p p">After you upgrade to version 5.8.0 or later, review any pipelines that use the Oracle
            Bulkload origin to ensure they behave as expected.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title16" id="concept_lkb_z5t_31c">
    <h2 class="- topic/title title topictitle2" id="ariaid-title16">Update stages that were using Enterprise stage libraries</h2>
    <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p">Starting with version 5.8.0, <span class="- topic/ph ph">Data Collector</span> no
            longer supports Enterprise stage libraries.</p>
        <div class="- topic/p p">After you upgrade to 5.8.0 or later, update stages using any of the following Enterprise
            stage libraries by installing the stage library as a <a class="- topic/xref xref" href="../Configuration/CustomStageLibraries.html#task_qvz_xdb_1x">custom stage library</a>:<ul class="- topic/ul ul" id="concept_lkb_z5t_31c__ul_a1p_zst_31c" data-ofbid="concept_lkb_z5t_31c__ul_a1p_zst_31c">
                <li class="- topic/li li">GPSS</li>
                <li class="- topic/li li">MemSQL</li>
                <li class="- topic/li li">Protector </li>
                <li class="- topic/li li">Microsoft SQL Server 2019 Big Data Cluster </li>
                <li class="- topic/li li">Teradata </li>
            </ul></div>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title17" id="concept_m14_bqx_xyb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title17">Grant Users View Access for the Oracle CDC Origin</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 5.7.0, the Oracle CDC origin must use a user account with access to
            the all_tab_cols view.</p>
        <div class="- topic/p p">After you upgrade to version 5.7.0 or later, run the following command in Oracle to grant
            the user account access to the
            view:<pre class="+ topic/pre pr-d/codeblock pre codeblock" id="concept_m14_bqx_xyb__codeblock_ixr_gqx_xyb" data-ofbid="concept_m14_bqx_xyb__codeblock_ixr_gqx_xyb"><code>grant select on all_tab_cols to &lt;user name&gt;;</code></pre></div>
        <p class="- topic/p p">For CDB databases, run the command from the root container, <code class="+ topic/ph pr-d/codeph ph codeph">cdb$root</code>.
            Then run it again from the pluggable database. For non-CDB databases, run the command
            from the primary database.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title18" id="concept_adq_4qx_xyb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title18">Review Amazon S3, Azure Blob Storage, and Azure Data Lake Storage Gen2 Origin
        Pipelines</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p dir="ltr" class="- topic/p p">Starting with version 5.7.0, Amazon S3, Azure Blob Storage, and Azure Data Lake
            Storage Gen2 origins have a File Processing Delay property that allows you to configure
            the minimum number of milliseconds that must pass from the time a file is created before
            it is processed. In upgraded pipelines these origins receive the default file processing
            delay of 10,000 milliseconds. </p>
        <p dir="ltr" class="- topic/p p">After you upgrade to version 5.7.0 or later, review pipelines that include the
            Amazon S3, Azure Blob Storage, and Azure Data Lake Storage Gen2 origins. If the 10,000
            millisecond delay is not appropriate, update the pipelines accordingly.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title19" id="concept_okh_h2z_hbc">
    <h2 class="- topic/title title topictitle2" id="ariaid-title19">Review the Batch Wait Time for <span class="- topic/ph ph">ALDS Gen1, </span>ALDS Gen2 (Legacy),
        Directory, and Hadoop FS Standalone Origins</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Versions of <span class="- topic/ph ph">Data Collector</span>
            prior to 5.7.0 incorrectly treated the batch wait time value configured for <span class="- topic/ph ph">ALDS Gen1, </span>ALDS Gen2 (Legacy), Directory, and Hadoop FS Standalone
            origins as milliseconds instead of seconds. Starting with version 5.7.0, <span class="- topic/ph ph">Data Collector</span>
            treats the batch wait time value as seconds, which can increase the wait time for empty
            batches in upgraded pipelines.</p><p class="- topic/p p">After upgrading to version 5.7.0 or later, review the batch wait time for <span class="- topic/ph ph">ALDS
                Gen1, </span>ALDS Gen2 (Legacy), Directory, and Hadoop FS Standalone origins in
            upgraded pipelines, and update the value if necessary.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title20" id="concept_mpq_h3m_2yb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title20">Review Amazon S3 and Databricks Delta Lake Stages</h2>
    <div class="- topic/body concept/conbody body conbody">
        <div class="- topic/p p">Starting with version 5.6.0, you can no longer include the forward slash (/) in the
            following properties due to an Amazon Web Services (AWS) SDK upgrade:<ul class="- topic/ul ul" id="concept_mpq_h3m_2yb__ul_ops_hgm_2yb" data-ofbid="concept_mpq_h3m_2yb__ul_ops_hgm_2yb">
                <li class="- topic/li li">Bucket property for the Amazon S3 origin</li>
                <li class="- topic/li li">Bucket and path property for the Amazon S3 destination and executor</li>
                <li class="- topic/li li">Bucket property for the Databricks Delta Lake destination when staging files to
                    Amazon S3</li>
            </ul></div>
        <p class="- topic/p p">For more information about this change, see the <a class="- topic/xref xref" href="https://github.com/aws/aws-sdk-java/blob/master/CHANGELOG.md#bugfixes-1" target="_blank" rel="external noopener">aws-sdk-java list of Amazon S3 bug fixes</a>. </p>
        <div class="- topic/p p">As a result, you can define only the bucket name in these bucket properties. Use the
            following properties for each stage to define the path to an object inside the bucket:
                <ul class="- topic/ul ul" id="concept_mpq_h3m_2yb__ul_np9_kgm_2yb" data-ofbid="concept_mpq_h3m_2yb__ul_np9_kgm_2yb">
                <li class="- topic/li li">Amazon S3 origin - Common Prefix and Prefix Pattern properties</li>
                <li class="- topic/li li">Amazon S3 destination - Common Prefix and Partition Prefix properties</li>
                <li class="- topic/li li">Amazon S3 executor - Object property on the Tasks tab</li>
                <li class="- topic/li li">Databricks Delta Lake destination - Stage File Prefix property on the Staging
                    tab</li>
            </ul></div>
        <p class="- topic/p p">After you upgrade to version 5.6.0 or later, review the bucket property in these stages
            to ensure that the property defines the bucket name only. Modify the properties as
            needed to define only the bucket name in the bucket property and to define the path in
            the remaining properties.</p>
        <div class="- topic/p p">For example, if an Amazon S3 origin configured in an earlier <span class="- topic/ph ph">Data Collector</span>
            version defines the properties as follows: <ul class="- topic/ul ul" id="concept_mpq_h3m_2yb__ul_akl_4gm_2yb" data-ofbid="concept_mpq_h3m_2yb__ul_akl_4gm_2yb">
                <li class="- topic/li li">Bucket: <code class="+ topic/ph pr-d/codeph ph codeph">orders/US/West</code></li>
                <li class="- topic/li li">Common Prefix:</li>
                <li class="- topic/li li">Prefix Pattern: <code class="+ topic/ph pr-d/codeph ph codeph">**/*.log</code></li>
            </ul></div>
        <div class="- topic/p p">Update the properties as follows:
            <ul class="- topic/ul ul" id="concept_mpq_h3m_2yb__ul_zrw_vbv_2yb" data-ofbid="concept_mpq_h3m_2yb__ul_zrw_vbv_2yb">
                <li class="- topic/li li">Bucket: <code class="+ topic/ph pr-d/codeph ph codeph">orders</code></li>
                <li class="- topic/li li">Common Prefix: <code class="+ topic/ph pr-d/codeph ph codeph">US/West/</code></li>
                <li class="- topic/li li">Prefix Pattern: <code class="+ topic/ph pr-d/codeph ph codeph">**/*.log</code></li>
            </ul></div>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title21" id="concept_nl1_gft_wxb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title21">Install the Databricks Stage Library</h2>
    <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p">Starting with version 5.6.0, the Databricks Delta Lake destination, Databricks Query
            executor, and Databricks Delta Lake connection require the Databricks stage library. In
            previous releases, they required the Databricks Enterprise stage library.</p>
        <p class="- topic/p p">After you upgrade to version 5.6.0 or later, install the Databricks stage library,
                <code class="+ topic/ph pr-d/codeph ph codeph">streamsets-datacollector-sdc-databricks-lib</code>, to enable pipelines
            and jobs that use these Databricks stages or the Databricks connection to run as
            expected.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title22" id="concept_qqd_5ft_wxb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title22">Review Databricks Stages</h2>
    <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p">Starting with version 5.6.0, the scheme of the URL or connection string for the
            Databricks Delta Lake destination and Databricks Query executor is
                <code class="+ topic/ph pr-d/codeph ph codeph">jdbc:databricks</code> rather than <code class="+ topic/ph pr-d/codeph ph codeph">jdbc:spark</code>.</p>
        <div class="- topic/p p">After you upgrade to version 5.6.0 or later, review the JDBC URL property in the
            Databricks Delta Lake destination and the JDBC Connection String property in the
            Databricks Query executor to ensure that the scheme resolves to
                <code class="+ topic/ph pr-d/codeph ph codeph">jdbc:databricks</code>. <div class="- topic/note note note note_note" id="concept_qqd_5ft_wxb__note_e13_k2s_wxb" data-ofbid="concept_qqd_5ft_wxb__note_e13_k2s_wxb"><span class="note__title">Note:</span> The upgrade process
                does not update runtime parameters. You must manually change runtime parameters that
                define the URL or connection string.</div></div>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title23" id="concept_g1z_cgt_wxb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title23">Update the Databricks Delta Lake Connection</h2>
    <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p">Starting with version 5.6.0, the scheme of the URL is <code class="+ topic/ph pr-d/codeph ph codeph">jdbc:databricks</code>
            rather than <code class="+ topic/ph pr-d/codeph ph codeph">jdbc.spark</code>.</p>
        <p class="- topic/p p">After you update a connection to use a version 5.6.0 or later authoring <span class="- topic/ph ph">Data Collector</span>,
            edit the JDBC URL property to use the <code class="+ topic/ph pr-d/codeph ph codeph">jdbc:databricks </code>scheme.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title24" id="concept_wbv_vty_m1c">
    <h2 class="- topic/title title topictitle2" id="ariaid-title24">Review Scripts in Jython Stages</h2>
    <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p">Starting with version 5.6.0, Jython stages uses Jython 2.7.3 to process data. </p>
        <p class="- topic/p p">After you upgrade to version 5.6.0 or later, review the scripts used in the Jython
            Scripting origin and the Jython Evaluator processor to ensure that they process data as
            expected. </p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title25" id="concept_zw1_ngt_wxb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title25">Install the JDBC Oracle Stage Library</h2>
    <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p">Starting with version 5.6.0, the Oracle Bulkload origin requires the JDBC Oracle stage
            library. In previous releases, the origin required the Oracle Enterprise stage
            library.</p>
        <p class="- topic/p p">After you upgrade to version 5.6.0 or later, install the JDBC Oracle stage library,
                <code class="+ topic/ph pr-d/codeph ph codeph">streamsets-datacollector-sdc-databricks-lib</code>, to enable pipelines
            and jobs that use the Oracle Bulkload origin to run as expected. </p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title26" id="concept_nvx_kzc_yxb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title26">Grant Users View Access for the Oracle CDC Origin</h2>
    <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p">Starting with version 5.6.0, the Oracle CDC origin requires that the configured database
            user has access to the <code class="+ topic/ph pr-d/codeph ph codeph">v$containers</code> view.</p>
        <div class="- topic/p p">After you upgrade to version 5.6.0 or later, run the following command in Oracle to grant
            the user account access to the
            view:<pre class="+ topic/pre pr-d/codeblock pre codeblock" id="concept_nvx_kzc_yxb__codeblock_ldz_vlr_yxb" data-ofbid="concept_nvx_kzc_yxb__codeblock_ldz_vlr_yxb"><code>grant select on v$containers to &lt;user name&gt;;</code></pre></div>
        <p class="- topic/p p">For CDB databases, run the command from the root container, <code class="+ topic/ph pr-d/codeph ph codeph">cdb$root</code>.
            Then run it again from the pluggable database.  For non-CDB databases, run the command
            from the primary database. </p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title27" id="concept_z4q_zgt_wxb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title27">Update Origins and Processors that Read Compressed Files</h2>
    <div class="- topic/body concept/conbody body conbody">
        <div class="- topic/p p">Starting with version 5.6.0, origins that read compressed files require you to set the
            Compression Library property to properly read files compressed with the Airlift version
            of Snappy. Destinations compress files with the Airlift version of Snappy. This affects
            the HTTP Client processor and the following origins: <ul class="- topic/ul ul" id="concept_z4q_zgt_wxb__ul_blw_s2s_wxb" data-ofbid="concept_z4q_zgt_wxb__ul_blw_s2s_wxb">
                <li dir="ltr" class="- topic/li li">Amazon S3</li>
                <li dir="ltr" class="- topic/li li">Azure Blob Storage</li>
                <li dir="ltr" class="- topic/li li">Azure Data Lake Storage Gen1</li>
                <li dir="ltr" class="- topic/li li">Azure Data Lake Storage Gen2 (Legacy)</li>
                <li dir="ltr" class="- topic/li li">Azure IoT/Event Hub Consumer</li>
                <li dir="ltr" class="- topic/li li">CoAP Server</li>
                <li dir="ltr" class="- topic/li li">Directory</li>
                <li dir="ltr" class="- topic/li li">File Tail</li>
                <li dir="ltr" class="- topic/li li">Hadoop FS Standalone</li>
                <li dir="ltr" class="- topic/li li">Google Cloud Storage</li>
                <li dir="ltr" class="- topic/li li">Google Pub/Sub Subscriber</li>
                <li dir="ltr" class="- topic/li li">gRPC Client</li>
                <li dir="ltr" class="- topic/li li">HTTP Client</li>
                <li dir="ltr" class="- topic/li li">HTTP Server </li>
                <li dir="ltr" class="- topic/li li">Kafka Multitopic Consumer</li>
                <li dir="ltr" class="- topic/li li">MQTT Subscriber</li>
                <li dir="ltr" class="- topic/li li">REST Service</li>
                <li dir="ltr" class="- topic/li li">SFTP/FTP/FTPS Client</li>
                <li dir="ltr" class="- topic/li li">TCP Server</li>
                <li dir="ltr" class="- topic/li li">WebSocket Client</li>
                <li dir="ltr" class="- topic/li li">WebSocket Server</li>
            </ul></div>
        <p class="- topic/p p">After you upgrade to version 5.6.0 or later, review your pipelines. In any origins and
            processors that read files compressed using the Airlift version of Snappy, including
            files produced by destinations, set the Compression Library property to Snappy (Airlift
            Snappy). </p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title28" id="concept_ng4_b5g_gxb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title28">Install the Azure stage library </h2>
    <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p">Starting with version 5.5.0, the Azure Synapse SQL destination and Azure Synapse
            connection require the installation of the Azure stage library. In previous releases,
            the destination and connection required the Azure Synapse Enterprise stage library.</p>
        <p class="- topic/p p">After you upgrade to version 5.5.0 or later, install the Azure stage library,
                <code class="+ topic/ph pr-d/codeph ph codeph">streamsets-datacollector-azure-lib</code>, so that pipelines and jobs that
            use the Azure Synapse SQL destination or connection run as expected.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title29" id="concept_nkt_k5g_gxb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title29">Review Salesforce pipelines</h2>
    <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p">Starting with version 5.5.0, Salesforce stages correctly import date values as dates
            rather than as strings.</p>
        <p class="- topic/p p">After you upgrade to version 5.5.0 or later, review pipelines with Salesforce stages and
            ensure that they do not expect dates to be imported as strings.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title30" id="concept_l5r_chc_dxb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title30">Review OPC UA Client Pipelines</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 5.5.0, the OPC UA Client origin no longer includes the Max Array
            Length or Max String Length properties. These properties were removed because they are
            redundant. The existing Max Message Size property properly limits the message size
            regardless of the data type of the message.</p>
        <p class="- topic/p p">After you upgrade to version 5.5.0 or later, review OPC UA Client pipelines to ensure
            that the configuration for the Max Message Size property is appropriate for the
            pipeline. The default maximum message size is 2097152.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title31" id="concept_mgz_4gp_qwb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title31">Install the Snowflake Stage Library to Use Snowflake</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 5.4.0, using Snowflake stages and Snowflake connections requires
            installing the Snowflake stage library. In previous releases, Snowflake stages and
            connections were available with the Snowflake Enterprise stage library.</p>
        <p class="- topic/p p">After you upgrade to 5.4.0 or later, install the Snowflake stage library,
                <code class="+ topic/ph pr-d/codeph ph codeph">streamsets-datacollector-sdc-snowflake-lib</code>, to enable pipelines and
            jobs that use Snowflake stages or connections to run as expected.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title32" id="concept_k3p_dkr_rvb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title32">Install the Google Cloud Stage Library to Use BigQuery</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 5.3.0, using Google BigQuery stages and Google BigQuery connections
            requires installing the Google Cloud stage library. In previous releases, BigQuery
            stages and connections were available with the Google BigQuery Enterprise stage
            library.</p>
        <p class="- topic/p p">After you upgrade to version 5.3.0 or later, install the Google Cloud stage library,
                <code class="+ topic/ph pr-d/codeph ph codeph">streamsets-datacollector-google-cloud-lib</code>, to enable pipelines and
            jobs using BigQuery stages or connections to run as expected. </p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title33" id="concept_cdq_5kr_rvb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title33">Review JDBC Multitable Consumer Pipelines</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 5.3.0, the Minimum Idle Connections property in the JDBC Multitable
            Consumer origin cannot be set higher than the Number of Threads property. In previous
            releases, there was no limit to the number of minimum idle connections that you could
            configure.</p>
        <p class="- topic/p p">Upgraded pipelines have the Minimum Idle Connections property set to the same value as
            the Number of Threads property. </p>
        <p class="- topic/p p">After you upgrade to version 5.3.0 or later, review JDBC Multitable Consumer origin
            pipelines to ensure that the new value for the Minimum Idle Connections property is
            appropriate for each pipeline. </p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title34" id="concept_g2p_rlr_rvb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title34">Review Missing Field Behavior for Field Replacer Processors</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <div class="- topic/p p">Starting with version 5.3.0, the advanced Field Does Not Exist property in the Field
            Replacer processor has the following two new options that replace the Include without
            Processing option: <ul class="- topic/ul ul" id="concept_g2p_rlr_rvb__ul_pzm_ngq_rvb" data-ofbid="concept_g2p_rlr_rvb__ul_pzm_ngq_rvb">
                <li dir="ltr" class="- topic/li li">Add New Field - Adds the fields defined on the Replace tab to records
                    if they do not exist.</li>
                <li dir="ltr" class="- topic/li li">Ignore New Field - Ignores any fields defined on the Replace tab if
                    they do not exist.</li>
            </ul></div>
        <p class="- topic/p p">After you upgrade to version 5.3.0 or later, the Field Does Not Exist property is set to
            Add New Field. Review Field Replacer pipelines to ensure that this behavior is
            appropriate.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title35" id="concept_gm4_1qr_rvb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title35">Review runtime:loadResource Pipelines</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 5.3.0, pipelines that include the
                <code class="+ topic/ph pr-d/codeph ph codeph">runtime:loadResource</code> function fail with errors when the function
            calls a missing or empty resource file. In previous releases, those pipelines sometimes
            continued to run without errors. </p>
        <p class="- topic/p p">After you upgrade to version 5.3.0 or later, review pipelines that use the
                <code class="+ topic/ph pr-d/codeph ph codeph">runtime:loadresource</code> function and ensure that the function calls
            resource files that include the required information. </p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title36" id="concept_bcb_blq_f1c">
    <h2 class="- topic/title title topictitle2" id="ariaid-title36">Manage Underscores in Snowflake Connection Information</h2>
    <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p"><span class="- topic/ph ph">Starting with the Snowflake JDBC driver 3.13.25
                release in November 2022, the Snowflake JDBC driver converts underscores to hyphens,
                by default.</span>
            <span class="- topic/ph ph">This can adversely affect communicating with Snowflake
                when Snowflake connection information specified in a Snowflake stage or connection,
                such as a URL, includes underscores.</span>
        </p>
        <p class="- topic/p p">After you upgrade to Snowflake JDBC driver 3.13.25 or later, review your Snowflake
            connection information for underscores. </p>
        <p class="- topic/p p">When needed, you can bypass the default driver behavior by setting the
                <code class="+ topic/ph pr-d/codeph ph codeph">allowUnderscoresInHost</code> driver property to <code class="+ topic/ph pr-d/codeph ph codeph">true</code>.
            For more information and alternate solutions, see this <a class="- topic/xref xref" href="https://community.snowflake.com/s/article/Behaviour-Change-Release-information-associated-with-Snowflake-JDBC-driver-version-3-13-25" target="_blank" rel="external noopener">Snowflake community article</a>.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title37" id="concept_hmq_ds1_1vb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title37">Review MySQL Binary Log Pipelines</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <div class="- topic/p p">
            <div class="- topic/div div"><p dir="ltr" class="- topic/p p" id="concept_hmq_ds1_1vb__docs-internal-guid-88e79fa5-7fff-831c-a57c-00a571279a84" data-ofbid="concept_hmq_ds1_1vb__docs-internal-guid-88e79fa5-7fff-831c-a57c-00a571279a84">Starting
                    with version 5.2.0, the MySQL Binary Log origin converts MySQL Enum and Set
                    fields to String fields. </p><p dir="ltr" class="- topic/p p">In previous releases, when reading
                    from a database where the <code class="+ topic/ph pr-d/codeph ph codeph">binlog_row_metadata</code> MySQL database
                    property is set to <code class="+ topic/ph pr-d/codeph ph codeph">MINIMAL</code>, Enum fields are converted to Long,
                    and Set fields are converted to Integer.</p><p dir="ltr" class="- topic/p p">In version 5.2.0 as
                    well as previous releases, when the <code class="+ topic/ph pr-d/codeph ph codeph">binlog_row_metadata</code> MySQL
                    database property is set to <code class="+ topic/ph pr-d/codeph ph codeph">FULL</code>, Enum and Set fields are
                    converted to String.</p>After you upgrade to version 5.2.0, review MySQL Binary
                Log pipelines that process Enum and Set data from a database with
                    <code class="+ topic/ph pr-d/codeph ph codeph">binlog_row_metadata</code> set to <code class="+ topic/ph pr-d/codeph ph codeph">MINIMAL</code>. Update the
                pipeline as needed to ensure that Enum and Set data is processed as expected. </div>
        </div>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title38" id="concept_yjm_ms1_1vb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title38">Review Blob and Clob Processing in Oracle CDC Client Pipelines</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p dir="ltr" class="- topic/p p" id="concept_yjm_ms1_1vb__docs-internal-guid-36e284ea-7fff-6ec8-a42c-eef763a05c5f" data-ofbid="concept_yjm_ms1_1vb__docs-internal-guid-36e284ea-7fff-6ec8-a42c-eef763a05c5f">Starting with
            version 5.2.0, the Oracle CDC Client origin has new advanced properties that enable
            processing Blob and Clob columns. You can use these properties when the origin buffers
            changes locally. They are disabled by default. </p>
        <p dir="ltr" class="- topic/p p">In previous releases, the origin does not process Blob or Clob columns.
            However, when the Unsupported Fields to Records property is enabled, the origin includes
            Blob and Clob field names and raw string values. </p>
        <p dir="ltr" class="- topic/p p">Due to a <a class="- topic/xref xref" href="../ReleaseNotes/ReleaseNotes.html#concept_ld3_rl1_1vb">known issue</a>
            with this release, when the origin is not configured to process Blob and Clob columns
            and when the Unsupported Fields to Records property is enabled, the origin continues to
            include Blob and Clob field names and raw string values. When the property is disabled,
            the origin includes Blob and Clob field names with null values. The expected behavior is
            to always include field names with null values unless the origin is configured to
            process Blob and Clob columns.</p>
        <div class="- topic/p p" dir="ltr">Review Oracle CDC Client pipelines to assess how they should handle Blob and
            Clob columns:<ul class="- topic/ul ul" id="concept_yjm_ms1_1vb__ul_ogb_ts1_1vb" data-ofbid="concept_yjm_ms1_1vb__ul_ogb_ts1_1vb">
                <li class="- topic/li li">To process Blob and Clob columns, enable Blob and Clob processing on the
                    Advanced tab. You can optionally define a maximum LOB size. <p class="- topic/p p">Verify that
                        sufficient memory is available to Data Collector before enabling Blob and
                        Clob processing.</p></li>
                <li class="- topic/li li">If the origin has the Unsupported Fields to Records property enabled, the origin
                    continues to include Blob and Clob field names and raw string values, as in
                    previous releases.<p class="- topic/p p">If the origin has the Unsupported Fields to Records
                        property disabled, and if null values are acceptable for Blob and Clob
                        fields, then no action is required at this time.</p><p class="- topic/p p">In a future release,
                        this behavior will change so the Unsupported Fields to Records property has
                        no effect on how Blob and Clob columns are processed. </p></li>
            </ul></div>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title39" id="concept_trf_yq1_45b">
    <h2 class="- topic/title title topictitle2" id="ariaid-title39">Review Error Handling for Snowflake CDC Pipelines </h2>
    <div class="- topic/body concept/conbody body conbody">
        <p dir="ltr" class="- topic/p p" id="concept_trf_yq1_45b__docs-internal-guid-a150694c-7fff-c3ce-e12a-1659392fff97" data-ofbid="concept_trf_yq1_45b__docs-internal-guid-a150694c-7fff-c3ce-e12a-1659392fff97">In previous
            releases of the Snowflake Enterprise stage library, when the Snowflake destination runs
            a MERGE query that fails to write all CDC data in a batch to Snowflake, the Snowflake
            destination generates a stage error indicating that there was a difference between the
            number of records expected to be written and the number of records actually written to
            Snowflake.</p>
        <p dir="ltr" class="- topic/p p">The destination does not provide additional detail because Snowflake does not
            provide information about the individual records that failed to be written when a query
            fails.</p>
        <p dir="ltr" class="- topic/p p">Starting with version 1.12.0 of the Snowflake Enterprise stage library, when a
            query that writes CDC data fails, in addition to generating the stage error, the
            Snowflake destination passes all records in the batch to error handling. As a result,
            the error records are handled based on the error handling configured for the stage and
            pipeline.</p>
        <p class="- topic/p p">Review stage and pipeline error handling for Snowflake CDC pipelines to ensure that error
            records are handled appropriately.</p>
        <div class="- topic/p p" dir="ltr">
            <div class="- topic/note note note note_note"><span class="note__title">Note:</span> The error records passed to error handling have been processed by the Snowflake
                destination. For example, if the batch includes three records that update the same
                row, they are merged into a single update record. </div>
        </div>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title40" id="concept_b11_rtq_k5b">
    <h2 class="- topic/title title topictitle2" id="ariaid-title40">Review SQL Server Pipelines with Unencrypted Connections</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 5.1.0, <span class="- topic/ph ph"><span class="- topic/ph ph">Data Collector</span> uses
                        Microsoft JDBC Driver for SQL Server version 10.2.1 to connect to Microsoft
                        SQL Server. <a class="- topic/xref xref" href="https://docs.microsoft.com/en-us/sql/connect/jdbc/release-notes-for-the-jdbc-driver?view=sql-server-ver16#changes-in-102" target="_blank" rel="external noopener">According to Microsoft</a>, this
                        version has introduced a breaking backward-incompatible change.</span></p>
        <div class="- topic/p p">As a result, after you upgrade to 5.1.0 or later, <span class="- topic/ph ph">upgraded pipelines that connect to Microsoft
                        SQL Server without SSL/TLS encryption will likely fail with a message such
                        as the
                  following:</span><pre class="+ topic/pre pr-d/codeblock pre codeblock"><code>The driver could not establish a secure connection to SQL Server by using Secure Sockets Layer (SSL) encryption.</code></pre></div>
        <p class="- topic/p p"><span class="- topic/ph ph">This issue can be resolved by configuring SSL/TLS
                        encryption between Microsoft SQL Server and <span class="- topic/ph ph">Data Collector</span>. For details
                        on configuring clients for SSL/TLS encryption, see the Microsoft SQL Server
                        documentation.</span></p>
        <p class="- topic/p p"><span class="- topic/ph ph">Otherwise, you can address this issue at a
                        pipeline level by adding <code class="+ topic/ph pr-d/codeph ph codeph">encrypt=false</code> to the connection
                        string, or by adding <code class="+ topic/ph pr-d/codeph ph codeph">encrypt</code> as an additional JDBC property
                        and setting it to <code class="+ topic/ph pr-d/codeph ph codeph">false</code>.</span></p>
        <p class="- topic/p p"><span class="- topic/ph ph">To avoid having to update all affected pipelines
                        immediately, you can configure <span class="- topic/ph ph">Data Collector</span> to attempt
                        to disable SSL/TLS for all pipelines that use a JDBC driver. To do so, set
                        the <code class="+ topic/ph pr-d/codeph ph codeph">stage.conf_com.streamsets.pipeline.lib.jdbc.disableSSL</code>
                        <span class="- topic/ph ph">Data Collector</span> configuration
                        property to <code class="+ topic/ph pr-d/codeph ph codeph">true</code>. Note that this property affects
                              <em class="+ topic/ph hi-d/i ph i">all</em> JDBC drivers, and should typically be used only as a
                        stopgap measure. For more information about the configuration property, see
                              <a class="- topic/xref xref" href="../Configuration/DCConfig.html#task_lxk_kjw_1r">Configuring Data Collector</a>.</span></p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title41" id="concept_clc_1vv_j5b">
    <h2 class="- topic/title title topictitle2" id="ariaid-title41">Review Dockerfiles for Custom Docker Images </h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 5.1.0, the <span class="- topic/ph ph">Data Collector</span>
            Docker image uses Ubuntu 20.04 LTS (Focal Fossa) as a parent image. In previous
            releases, the <span class="- topic/ph ph">Data Collector</span>
            Docker image used Alpine Linux as a parent image. </p>
        <p class="- topic/p p">If you build custom <span class="- topic/ph ph">Data Collector</span>
            images using <code class="+ topic/ph pr-d/codeph ph codeph">streamsets/datacollector</code> version 5.0.0 or earlier as the
            parent image, review your Dockerfiles and make all required updates to become compatible
            with Ubuntu Focal Fossa before you build a custom image based on
                <code class="+ topic/ph pr-d/codeph ph codeph">streamsets/datacollector:5.1.0</code> or later versions.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title42" id="concept_ufb_3yf_vtb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title42">Review Oracle CDC Client Local Buffer Pipelines</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 5.1.0, <span class="- topic/ph ph">pipelines that include the
                        Oracle CDC Client origin no longer report memory consumption data when the
                        origin uses local buffers. In previous releases, this reporting occurred by
                        default, which slowed pipeline performance. </span></p>
        <p class="- topic/p p">After you upgrade to <span class="- topic/ph ph">Data Collector</span>
            5.1.0 or later, <span class="- topic/ph ph">memory consumption reporting
                        for Oracle CDC Client local buffer usage is no longer performed by default.
                        If you require this information, you can enable it by setting the
                              <code class="+ topic/ph pr-d/codeph ph codeph">stage.conf_com.streamsets.pipeline.stage.origin.jdbc.cdc.oracle.monitorbuffersize</code>
                        <span class="- topic/ph ph">Data Collector</span> configuration
                        property to <code class="+ topic/ph pr-d/codeph ph codeph">true</code>. </span></p>
        <p class="- topic/p p">This property enables memory consumption data reporting
                  for all Oracle CDC Client pipelines that use local buffering. Because it slows
                  pipeline performance, as a best practice, enable the property only for short term
                  troubleshooting.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title43" id="concept_brw_yhh_ftb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title43">Update Oracle CDC Client Origin User Accounts</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 5.0.0, the <span class="- topic/ph ph">Oracle CDC Client origin requires additional
                        Oracle permissions to ensure appropriate handling of self-recovery,
                        failover, and crash recovery.</span></p>
        <div class="- topic/p p">After you upgrade to version 5.0.0 or later, <span class="- topic/ph ph">use the following GRANT statements to update the
                        Oracle user account associated with the
                  origin:</span><pre class="+ topic/pre pr-d/codeblock pre codeblock"><code>GRANT select on GV_$ARCHIVED_LOG to &lt;user name&gt;;
GRANT select on GV_$INSTANCE to &lt;user name&gt;;
GRANT select on GV_$LOG to &lt;user name&gt;; 
GRANT select on V_$INSTANCE to &lt;user name&gt;;</code></pre></div>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title44" id="concept_dmp_zlt_jsb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title44">Review Couchbase Pipelines</h2>
    <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p">Starting with version 4.4.0, the Couchbase stage
            library no longer includes an encryption JAR file that the Couchbase stages do not
            directly use. Removing the JAR file should not affect pipelines using Couchbase
            stages.</p>
        <p class="- topic/p p">However, if Couchbase pipelines display errors about classes or methods not being found,
            you can install the following encryption JAR file as an external library for the
            Couchbase stage library:</p>
        <p class="- topic/p p"><a class="- topic/xref xref" href="https://search.maven.org/artifact/com.couchbase.client/encryption/1.0.0/jar" target="_blank" rel="external noopener">https://search.maven.org/artifact/com.couchbase.client/encryption/1.0.0/jar</a></p>
        <p class="- topic/p p">To install an external library, see <a class="- topic/xref xref" href="../Configuration/ExternalLibs.html#concept_pdv_qlw_ft">Install External Libraries</a>.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title45" id="concept_dzk_zjp_krb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title45">Update Keystore Location</h2>
    <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p">Starting with version 4.2.0, when you enable
            HTTPS for <span class="- topic/ph ph">Data Collector</span>,
            you can store the keystore file in the <span class="- topic/ph ph">Data Collector</span>
            resources directory, <code class="+ topic/ph pr-d/codeph ph codeph">$SDC_RESOURCES</code>. You can
            then enter a path relative to that directory when you define the keystore location in
            the <span class="- topic/ph ph">Data Collector</span>
            <span class="- topic/ph ph"><span class="- topic/ph ph">configuration file</span></span>.</p>
        <p class="- topic/p p">In previous releases, you can store the keystore file in the <span class="- topic/ph ph">Data Collector</span>
            configuration directory, <code class="+ topic/ph pr-d/codeph ph codeph">$SDC_CONF</code>, and then define the location to the file
            using a path relative to that directory. You can continue to store the file in the
            configuration directory, but <span class="- topic/ph ph">StreamSets</span>
            recommends moving it to the resources directory when you upgrade.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title46" id="concept_zmv_jgj_krb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title46">Review Tableau CRM Pipelines</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 4.2.0, the Tableau CRM
            destination, previously known as the Einstein Analytics destination, writes to
            Salesforce differently from versions 3.7.0 - 4.1.x. When upgrading from version 3.7.0 -
            4.1.x, review Tableau CRM pipelines to ensure that the destination behaves
            appropriately. When upgrading from a version prior to 3.7.0, no action is needed.</p>
        <p dir="ltr" class="- topic/p p" id="concept_zmv_jgj_krb__docs-internal-guid-326b185e-7fff-b959-b334-b1def727b2f2" data-ofbid="concept_zmv_jgj_krb__docs-internal-guid-326b185e-7fff-b959-b334-b1def727b2f2">With version 4.2.0
            and later, the destination writes to Salesforce by uploading batches of data to
            Salesforce, then signaling Salesforce to process the dataset after a configurable
            interval when no new data arrives. You configure the interval with the Dataset Wait Time
            stage property.</p>
        <p dir="ltr" class="- topic/p p" id="concept_zmv_jgj_krb__docs-internal-guid-f0b58b92-7fff-d9e6-7475-52bd8b817b58" data-ofbid="concept_zmv_jgj_krb__docs-internal-guid-f0b58b92-7fff-d9e6-7475-52bd8b817b58">In versions 3.7.0
            - 4.1.x, the destination signals Salesforce to process data after uploading each batch,
            effectively treating each batch as a dataset and making the Dataset Wait Time property
            irrelevant. </p>
        <p class="- topic/p p">After upgrading from version 3.7.0 - 4.1.x to version 4.2.0 or later, verify that the
            destination behavior is as expected. If necessary, update the Dataset Wait Time property
            to indicate the interval that Salesforce should wait before processing each dataset.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title47" id="concept_jym_22h_rpb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title47">Resolve Kafka and MapR Streams Conflicts</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 4.0.0, Kafka stages and MapR Streams stages generate an error when
            you specify an additional Kafka or MapR configuration property that conflicts with a
            stage property setting. </p>
        <p class="- topic/p p">In the stage properties, you can use the Override Stage Configurations property to enable
            user-defined Kafka or MapR configuration properties to take precedence. Or, you can
            remove or update the configuration property to allow the stage property to take
            precedence.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title48" id="concept_lmd_dxh_rpb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title48">Review HTTP Client Processor Pipelines</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 4.0.0, the HTTP Processor performs additional checks against the
            specified Batch Wait Time property. In certain cases, this change can generate errors.
            After upgrading from version 3.x to version 4.0.0 or later, verify that pipelines that
            include the HTTP Client processor perform as expected. </p>
        <p class="- topic/p p">The Batch Wait Time property defines the maximum amount of time that the processor uses
            to process all HTTP requests for a single record. When the processing for a record
            exceeds the specified batch wait time, the output records are passed to the stage for
            error handling. </p>
        <p class="- topic/p p">In previous releases, the HTTP Client processor only checked the batch wait time before
            each HTTP request. As a result, the processor did not always notice when the processing
            time exceeded the batch wait time. </p>
        <p class="- topic/p p">Starting with version 4.0.0, the HTTP Client processor checks the batch wait time before
            and after every request. As a result, the processor may generate more errors than in
            previous releases. </p>
        <p class="- topic/p p">Also, in previous releases, the default value for Batch Wait Time was 2,000 milliseconds.
            Starting with version 4.0.0, the default value is 100,000 milliseconds. </p>
        <div class="- topic/p p">
            <div class="- topic/note note important note_important"><span class="note__title">Important:</span> When you upgrade from version 3.x to version 4.0.0 or later, the
                Batch Wait Time property in the HTTP Client processor is set to the new default of
                100,000 milliseconds, unless you changed the property from the default. </div>
        </div>
        <p class="- topic/p p">For example, if you did not touch the Batch Wait Time property in a 3.x pipeline, then it
            is increased from 2,000 to 100,000 milliseconds during the upgrade. However, if you set
            the property to 3000 milliseconds in a 3.x pipeline, then the processor retains the 3000
            millisecond batch wait time after the upgrade. </p>
        <p class="- topic/p p">After upgrading from version 3.x to version 4.0.0 or later, verify that pipelines that
            include the HTTP Client processor perform as expected. If you want the processor to wait
            for all HTTP requests to complete, increase the Batch Wait Time as needed. </p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title49" id="concept_bz1_15m_h4b">
    <h2 class="- topic/title title topictitle2" id="ariaid-title49">Verify Elasticsearch Security</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 3.21.0, Elasticsearch stages include additional security
            validation. As a result, pipelines with Elasticsearch security issues that previously
            ran without error might fail to start after you upgrade to version 3.21.0 or later.</p>
        <p class="- topic/p p">When this occurs, check for additional details in the error messages, then correct the
            security issue or stage configuration, as needed. </p>
        <div class="- topic/p p">For example, in earlier <span class="- topic/ph ph">Data Collector</span>
            versions, an Elasticsearch stage configured to use the AWS Signature V4 security mode
            with SSL/TLS would not generate an error if the certificate was missing from the
            specified truststore. With version 3.21.0 or later, the pipeline fails to start with the
            following error:
            <pre class="+ topic/pre pr-d/codeblock pre codeblock"><code>ELASTICSEARCH_43 - Could not connect to the server(s) &lt;SSL/TLS error details&gt;</code></pre></div>
        <div class="- topic/p p">As another example, in earlier versions, if you specify a port in an HTTP URL that
            doesnât support the HTTPS protocol when configuring an Elasticsearch stage to use
            SSL/TLS, the stage used HTTP without raising an error. With version 3.21.0 or later, the
            pipeline fails to start with an error such
            as:<pre class="+ topic/pre pr-d/codeblock pre codeblock"><code>ELASTICSEARCH_43 - Could not connect to the server(s). 
Unrecognized SSL message, plaintext connection?</code></pre></div>
        <p class="- topic/p p">Note that the details of the message vary based on the originating server.</p>
        <p class="- topic/p p"> </p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title50" id="concept_ijl_rzf_m4b">
    <h2 class="- topic/title title topictitle2" id="ariaid-title50">Adjust PostgreSQL CDC Pipelines or PostgreSQL Configuration</h2>
    <div class="- topic/body concept/conbody body conbody">
        <p dir="ltr" class="- topic/p p" id="concept_ijl_rzf_m4b__docs-internal-guid-3bbc481b-7fff-d917-ad16-872d85ec0aec" data-ofbid="concept_ijl_rzf_m4b__docs-internal-guid-3bbc481b-7fff-d917-ad16-872d85ec0aec">Starting with
            version 3.21.0, the PostgreSQL CDC Client origin includes a new Status Interval property
            that helps ensure that the wal2json logical decoder, which helps process changes, does
            not time out.</p>
        <p dir="ltr" class="- topic/p p">The new Status Interval origin property should be set to less than the
            wal_sender_timeout property in the PostgreSQL <code class="+ topic/ph pr-d/codeph ph codeph">postgresql.conf</code> file.
            Ideally, the Status Interval property should be half of the value configured for the
            wal_sender_timeout property. </p>
        <p dir="ltr" class="- topic/p p">By default, the Status Interval property is 30 seconds. The wal2json
                <code class="+ topic/ph pr-d/codeph ph codeph">README.md</code> file previously recommended setting the
            wal_sender_timeout property to 2000 milliseconds, or 2 seconds. If you use these values
            for both properties, the pipeline can trigger the following error:</p>
        <div class="- topic/p p" dir="ltr">
            <pre class="+ topic/pre pr-d/codeblock pre codeblock"><code>com.streamsets.pipeline.api.StageException: JDBC_606 - Wal Sender is not active</code></pre>
        </div>
        <p dir="ltr" class="- topic/p p">To avoid this issue, update one of the properties so that Status Interval is
            half of wal_sender_timeout. </p>
        <p dir="ltr" class="- topic/p p">When possible, use the default Status Interval value and the default
            wal_sender_timeout value of 60000 milliseconds, or 60 seconds.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title51" id="concept_ilj_5cl_j4b">
    <h2 class="- topic/title title topictitle2" id="ariaid-title51">Review Processing of MySQL Data (JDBC Processors)</h2>
    <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p">Starting with version 3.21.0, JDBC processors convert MySQL unsigned integer data types
            to different <span class="- topic/ph ph">Data Collector</span>
            types than in earlier <span class="- topic/ph ph">Data Collector</span>
            versions. This change occurred for JDBC origins <a class="- topic/xref xref" href="PostUpgrade.html#concept_q1s_vcg_kmb">in an earlier
            version</a>.</p>
        <p class="- topic/p p">When you upgrade to version 3.21.0 or later, review pipelines that use JDBC processors to
            work with MySQL database data to ensure that downstream expressions provide the expected
            results.</p>
        <div class="- topic/p p">The following table describes the data type conversion changes:<div class="table-container"><table class="- topic/table table frame-all" id="concept_ilj_5cl_j4b__table_ukn_mdg_kmb" data-ofbid="concept_ilj_5cl_j4b__table_ukn_mdg_kmb" data-cols="3"><caption></caption><colgroup><col style="width:25%"/><col style="width:37.5%"/><col style="width:37.5%"/></colgroup><thead class="- topic/thead thead">
                        <tr class="- topic/row">
                            <th class="- topic/entry entry align-left colsep-1 rowsep-1" id="concept_ilj_5cl_j4b__table_ukn_mdg_kmb__entry__1">MySQL Data Type</th>
                            <th class="- topic/entry entry align-left colsep-1 rowsep-1" id="concept_ilj_5cl_j4b__table_ukn_mdg_kmb__entry__2">Data Type Conversion Before 3.21.0</th>
                            <th class="- topic/entry entry align-left colsep-0 rowsep-1" id="concept_ilj_5cl_j4b__table_ukn_mdg_kmb__entry__3">Data Type Conversion with 3.21.0 and Later</th>
                        </tr>
                    </thead><tbody class="- topic/tbody tbody">
                        <tr class="- topic/row">
                            <td class="- topic/entry entry align-left colsep-1 rowsep-1" headers="concept_ilj_5cl_j4b__table_ukn_mdg_kmb__entry__1">Bigint Unsigned</td>
                            <td class="- topic/entry entry align-left colsep-1 rowsep-1" headers="concept_ilj_5cl_j4b__table_ukn_mdg_kmb__entry__2">Long</td>
                            <td class="- topic/entry entry align-left colsep-0 rowsep-1" headers="concept_ilj_5cl_j4b__table_ukn_mdg_kmb__entry__3">Decimal</td>
                        </tr>
                        <tr class="- topic/row">
                            <td class="- topic/entry entry align-left colsep-1 rowsep-1" headers="concept_ilj_5cl_j4b__table_ukn_mdg_kmb__entry__1">Int Unsigned</td>
                            <td class="- topic/entry entry align-left colsep-1 rowsep-1" headers="concept_ilj_5cl_j4b__table_ukn_mdg_kmb__entry__2">Integer</td>
                            <td class="- topic/entry entry align-left colsep-0 rowsep-1" headers="concept_ilj_5cl_j4b__table_ukn_mdg_kmb__entry__3">Long</td>
                        </tr>
                        <tr class="- topic/row">
                            <td class="- topic/entry entry align-left colsep-1 rowsep-1" headers="concept_ilj_5cl_j4b__table_ukn_mdg_kmb__entry__1">Mediumint Unsigned</td>
                            <td class="- topic/entry entry align-left colsep-1 rowsep-1" headers="concept_ilj_5cl_j4b__table_ukn_mdg_kmb__entry__2">Integer</td>
                            <td class="- topic/entry entry align-left colsep-0 rowsep-1" headers="concept_ilj_5cl_j4b__table_ukn_mdg_kmb__entry__3">Long</td>
                        </tr>
                        <tr class="- topic/row">
                            <td class="- topic/entry entry align-left colsep-1 rowsep-0" headers="concept_ilj_5cl_j4b__table_ukn_mdg_kmb__entry__1">Smallint Unsigned</td>
                            <td class="- topic/entry entry align-left colsep-1 rowsep-0" headers="concept_ilj_5cl_j4b__table_ukn_mdg_kmb__entry__2">Short</td>
                            <td class="- topic/entry entry align-left colsep-0 rowsep-0" headers="concept_ilj_5cl_j4b__table_ukn_mdg_kmb__entry__3">Short</td>
                        </tr>
                    </tbody></table></div></div>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title52" id="concept_nzs_nsz_vnb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title52">Review Google Pub/Sub Producer Pipelines</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 3.20.0, the Google Pub/Sub Producer destination requires specifying
            a positive integer value for the Max Outstanding Message Count and Max Outstanding
            Request Bytes properties. </p>
        <p class="- topic/p p">In earlier <span class="- topic/ph ph">Data Collector</span>
            versions, you could set these properties to 0 to opt out of using them. With version
            3.20.0 and later, these properties must be set to a positive integer. </p>
        <div class="- topic/p p">Upgraded pipelines with these properties set to positive integers retain the configured
            values. Upgraded pipelines with these properties set to 0 are updated to use the new
            default values, as follows: <ul class="- topic/ul ul" id="concept_nzs_nsz_vnb__ul_w43_ftz_vnb" data-ofbid="concept_nzs_nsz_vnb__ul_w43_ftz_vnb">
                <li class="- topic/li li">Max Outstanding Message Count is set to 1000 messages</li>
                <li class="- topic/li li">Max Outstanding Request Bytes is set to 8000 bytes</li>
            </ul></div>
        <p class="- topic/p p">If upgraded pipelines previously used 0 to opt out of using these properties, review the
            pipelines to ensure that the new default values are appropriate. Update the properties
            as needed.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title53" id="concept_vdt_n5h_wnb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title53">Review JDBC Multitable Consumer Pipelines</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <div class="- topic/p p">Starting with version 3.20.0, the JDBC Multitable Consumer origin behavior while
            performing multithreaded processing with the Switch Tables batch strategy has changed.
            This affects multithreaded table and partition processing in a similar manner:<dl class="- topic/dl dl">
                
                    <dt class="- topic/dt dt dlterm">Multithreaded table processing </dt>
                    <dd class="- topic/dd dd">In earlier <span class="- topic/ph ph">Data Collector</span> versions, when you use the Switch Tables batch strategy with
                        multithreaded table processing, multiple threads can take turns processing
                        data within a single table, caching separate result sets for the table.</dd>
                    <dd class="- topic/dd dd ddexpand">With this release, each table can have only a single result set cached at a
                        time. </dd>
                    <dd class="- topic/dd dd ddexpand">So while a thread switches tables between batches, it now skips tables that
                        already have a result set from another thread. Only one thread can process
                        the data in a table at a time.</dd>
                
                
                    <dt class="- topic/dt dt dlterm">Multithreaded partition processing</dt>
                    <dd class="- topic/dd dd">Similarly, in earlier <span class="- topic/ph ph">Data Collector</span> versions, when you use the Switch Tables batch strategy with
                        multithreaded partition processing, multiple threads can take turns
                        processing data within a single partition, caching separate result sets for
                        the partition. </dd>
                    <dd class="- topic/dd dd ddexpand">With this release, each partition can have only a single result set cached
                        at a time. </dd>
                    <dd class="- topic/dd dd ddexpand">So while a thread switches partitions between batches, it now skips
                        partitions that already have a result set from another thread. Only one
                        thread can process the data in a partition at a time.</dd>
                
            </dl></div>
        <p class="- topic/p p">Review upgraded pipelines that use the Switch Table batch strategy. Depending on factors
            such as the number and size of the tables and partitions being processed, the change
            might negatively impact performance. </p>
        <p class="- topic/p p">For example, say two threads process four tables in multithreaded table processing, and
            one table is much larger than the other tables. In earlier versions, using the Switch
            Tables batch strategy allowed multiple threads to help process the large table. With
            version 3.20.0 or later, only one thread can process data in one table at a time. </p>
        <div class="- topic/p p">If pipeline performance has been negatively impacted, consider the following options:<ul class="- topic/ul ul" id="concept_vdt_n5h_wnb__ul_hnt_513_wnb" data-ofbid="concept_vdt_n5h_wnb__ul_hnt_513_wnb">
                <li class="- topic/li li">If multithreaded table processing has slowed, you may have a mix of small and
                    large tables. <p class="- topic/p p">To enable large tables to be processed by more than one thread,
                        consider using multithreaded partition processing for that table.</p><p class="- topic/p p">To
                        enable threads to cycle through the tables more quickly, you might reduce
                        the number of batches generated from a result set using the Batches from
                        Result Set property.</p></li>
                <li class="- topic/li li">If multithreaded partition processing has slowed, you may have a mix of small
                    and large partitions. <p class="- topic/p p">To enable threads to cycle through the partitions more
                        quickly, you might reduce the number of batches generated from a result set
                        using the Batches from Result Set property.</p></li>
            </ul></div>
        <p class="- topic/p p">For information about batch strategies, see <a class="- topic/xref xref" href="../Origins/MultiTableJDBCConsumer.html#concept_n5t_zgx_4y">Batch Strategy</a>.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title54" id="concept_k53_vhk_cgb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title54">Update Oracle CDC Client Pipelines </h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Consider the following upgrade tasks for pipelines that contain the Oracle CDC Client
            origin, based on the version that you are upgrading from:</p>
        <dl class="- topic/dl dl">
            
                <dt class="- topic/dt dt dlterm">Upgrade from versions earlier than 3.19.0</dt>
                <dd class="- topic/dd dd">Starting with version 3.19.0, Oracle CDC Client origins with the Parse SQL
                    property enabled no longer generate records for SELECT_FOR_UPDATE operations. </dd>
                <dd class="- topic/dd dd ddexpand">If your Oracle CDC Client pipelines do not process SELECT_FOR_UPDATE operations
                    or do not need to process SELECT_FOR_UPDATE operations, no changes are
                    required.</dd>
                <dd class="- topic/dd dd ddexpand">If you want to capture SELECT_FOR_UPDATE statements, you can clear the Parse SQL
                    Query property to write LogMiner SQL statements to generated records. Then,
                    specify SELECT_FOR_UPDATE in the Operations property. </dd>
            
            
                <dt class="- topic/dt dt dlterm">Upgrade from versions earlier than 3.7.0</dt>
                <dd class="- topic/dd dd">Starting with version 3.7.0, pipelines that use the Oracle CDC Client origin can
                    produce some duplicate data. </dd>
                <dd class="- topic/dd dd ddexpand">Due to a change in offset format, when the pipeline restarts, the Oracle CDC
                    Client origin reprocesses all transactions with the commit SCN from the last
                    offset to prevent skipping unread records. This issue occurs only for the last
                    SCN that was processed before the upgrade, and only once, upon upgrading to Data
                    Collector version 3.7.0 or later.</dd>
                <dd class="- topic/dd dd ddexpand">When possible, remove the duplicate records from the destination system.</dd>
            
        </dl>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title55" id="concept_vs5_2tz_lnb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title55">Update Cluster EMR Batch Pipelines</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 3.19.0, cluster EMR batch pipelines that provision a cluster store
            the specified EMR version differently than in earlier versions. As a result, the EMR
            versions defined in earlier pipelines are not retained. </p>
        <p class="- topic/p p">When you upgrade from a version earlier than 3.19.0, you must edit any cluster EMR batch
            pipeline that provisions a cluster, and define the EMR Version property.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title56" id="concept_q1s_vcg_kmb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title56">Review Processing of MySQL Data (JDBC Origins)</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 3.17.0, JDBC origins convert MySQL unsigned integer data types to
            different <span class="- topic/ph ph">Data Collector</span>
            types than in earlier <span class="- topic/ph ph">Data Collector</span>
            versions.</p>
        <p class="- topic/p p">When you upgrade to version 3.17.0 or later, review pipelines that use JDBC origins to
            process MySQL database data to ensure that downstream expressions provide the expected
            results.</p>
        <div class="- topic/p p">The following table describes the data type conversion changes:<div class="table-container"><table class="- topic/table table frame-all" id="concept_q1s_vcg_kmb__table_ukn_mdg_kmb" data-ofbid="concept_q1s_vcg_kmb__table_ukn_mdg_kmb" data-cols="3"><caption></caption><colgroup><col style="width:25%"/><col style="width:37.5%"/><col style="width:37.5%"/></colgroup><thead class="- topic/thead thead">
                        <tr class="- topic/row">
                            <th class="- topic/entry entry align-left colsep-1 rowsep-1" id="concept_q1s_vcg_kmb__table_ukn_mdg_kmb__entry__1">MySQL Data Type</th>
                            <th class="- topic/entry entry align-left colsep-1 rowsep-1" id="concept_q1s_vcg_kmb__table_ukn_mdg_kmb__entry__2">Data Type Conversion Before 3.17.0</th>
                            <th class="- topic/entry entry align-left colsep-0 rowsep-1" id="concept_q1s_vcg_kmb__table_ukn_mdg_kmb__entry__3">Data Type Conversion with 3.17.0 and Later</th>
                        </tr>
                    </thead><tbody class="- topic/tbody tbody">
                        <tr class="- topic/row">
                            <td class="- topic/entry entry align-left colsep-1 rowsep-1" headers="concept_q1s_vcg_kmb__table_ukn_mdg_kmb__entry__1">Bigint Unsigned</td>
                            <td class="- topic/entry entry align-left colsep-1 rowsep-1" headers="concept_q1s_vcg_kmb__table_ukn_mdg_kmb__entry__2">Long</td>
                            <td class="- topic/entry entry align-left colsep-0 rowsep-1" headers="concept_q1s_vcg_kmb__table_ukn_mdg_kmb__entry__3">Decimal</td>
                        </tr>
                        <tr class="- topic/row">
                            <td class="- topic/entry entry align-left colsep-1 rowsep-1" headers="concept_q1s_vcg_kmb__table_ukn_mdg_kmb__entry__1">Int Unsigned</td>
                            <td class="- topic/entry entry align-left colsep-1 rowsep-1" headers="concept_q1s_vcg_kmb__table_ukn_mdg_kmb__entry__2">Integer</td>
                            <td class="- topic/entry entry align-left colsep-0 rowsep-1" headers="concept_q1s_vcg_kmb__table_ukn_mdg_kmb__entry__3">Long</td>
                        </tr>
                        <tr class="- topic/row">
                            <td class="- topic/entry entry align-left colsep-1 rowsep-1" headers="concept_q1s_vcg_kmb__table_ukn_mdg_kmb__entry__1">Mediumint Unsigned</td>
                            <td class="- topic/entry entry align-left colsep-1 rowsep-1" headers="concept_q1s_vcg_kmb__table_ukn_mdg_kmb__entry__2">Integer</td>
                            <td class="- topic/entry entry align-left colsep-0 rowsep-1" headers="concept_q1s_vcg_kmb__table_ukn_mdg_kmb__entry__3">Long</td>
                        </tr>
                        <tr class="- topic/row">
                            <td class="- topic/entry entry align-left colsep-1 rowsep-0" headers="concept_q1s_vcg_kmb__table_ukn_mdg_kmb__entry__1">Smallint Unsigned</td>
                            <td class="- topic/entry entry align-left colsep-1 rowsep-0" headers="concept_q1s_vcg_kmb__table_ukn_mdg_kmb__entry__2">Short</td>
                            <td class="- topic/entry entry align-left colsep-0 rowsep-0" headers="concept_q1s_vcg_kmb__table_ukn_mdg_kmb__entry__3">Short</td>
                        </tr>
                    </tbody></table></div></div>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title57" id="concept_xb1_zvx_gmb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title57">Update Elasticsearch Security Properties (Optional)</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">Starting with version 3.17.0, Elasticsearch stages provide a User Name property and a
            Password property. Elasticsearch stages in previous versions pass the credentials
            together in a single Security Username/Password property. </p>
        <p class="- topic/p p">When you upgrade to version 3.17.0 or later, any configuration in the Security
            Username/Password properties is moved to the new User Name property, where the Security
            Username/Password format, <code class="+ topic/ph pr-d/codeph ph codeph">&lt;username&gt;:&lt;password&gt;</code>, remains valid. </p>
        <p class="- topic/p p">Though not required, you can update Elasticsearch stages to use the new User Name and
            Password properties. </p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title58" id="concept_hkf_ylq_xhb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title58">Update Syslog Pipelines</h2>
    <div class="- topic/body concept/conbody body conbody">
        <div class="- topic/p p">Starting with version 3.9.0, the Syslog destination no longer includes the following
            properties on the Message tab:<ul class="- topic/ul ul" id="concept_hkf_ylq_xhb__ul_pqs_1mq_xhb" data-ofbid="concept_hkf_ylq_xhb__ul_pqs_1mq_xhb">
                <li class="- topic/li li">Use Non-Text Message Format</li>
                <li class="- topic/li li">Message Text</li>
            </ul></div>
        <p class="- topic/p p">You now configure the destination to use the Text data format on the Data Format tab,
            just as you do with other destinations.</p>
        <p class="- topic/p p">If pipelines created in a previous version include the Syslog destination configured to
            use text data, you must configure the Text data format properties on the Data Format tab
            after the upgrade.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title59" id="concept_azm_ncy_vhb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title59">JDBC Tee and JDBC Producer Cache Change</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc">Starting with version 3.9.0, the JDBC Tee processor and the JDBC Producer destination
        no longer cache prepared statements when performing single-row operations. As a result, the
        Max Cache Size Per Batch property has been removed from both stages.</p>
        <p class="- topic/p p">In previous versions when you enabled the stage to perform single-row operations, you
            could configure the Max Cache Size Per Batch property to specify the maximum number of
            prepared statements to store in the cache.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title60" id="concept_d2k_ssk_1hb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title60">Pipeline Export</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc">Starting with version 3.8.0, <span class="- topic/ph ph">Data Collector</span> has
        changed the behavior of the pipeline <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Export</span> option. <span class="- topic/ph ph">Data Collector</span> now
        strips all plain text credentials from exported pipelines. Previously, <span class="- topic/ph ph">Data Collector</span>
        included plain text credentials in exported pipelines.</p>
        <p class="- topic/p p">To use the previous behavior and include credentials in the export, choose the new
                <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Export with Plain Text Credentials</span> option when exporting a
            pipeline.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title61" id="concept_k1s_j4s_rgb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title61">Update TCP Server Pipelines</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc">Starting with version 3.7.2, the TCP Server origin has changed the valid values for
        the Read Timeout property. The property now allows a minimum of 1 second and a maximum of
        3,600 seconds.</p>
        <p class="- topic/p p">In previous versions, the Read Timeout property had no maximum value and could be set to
            0 to keep the connection open regardless of whether the origin read any data.</p>
        <p class="- topic/p p">If pipelines created in a previous version have the Read Timeout property set to a value
            less than 1 or greater than 3,600, the upgrade process sets the property to the maximum
            value of 3,600 seconds. If necessary, update the Read Timeout property as needed after
            the upgrade.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title62" id="concept_clv_tzk_cgb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title62">Update Cluster Pipelines</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc">Starting with version 3.7.0, <span class="- topic/ph ph">Data Collector</span> now
        requires that the Java temporary directory on the gateway node in the cluster is
        writable.</p>
        <p class="- topic/p p">The Java temporary directory is specified by the Java system property
                <code class="+ topic/ph pr-d/codeph ph codeph">java.io.tmpdir</code>. On UNIX, the default value of this property is
            typically <span class="+ topic/ph sw-d/filepath ph filepath">/tmp</span> and is writable. </p>
        <p class="- topic/p p">Previous <span class="- topic/ph ph">Data Collector</span>
            versions did not have this requirement. Before running upgraded cluster pipelines,
            verify that the Java temporary directory on the gateway node is writable.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title63" id="concept_lxn_s5h_gnb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title63">Update Kafka Consumer or Kafka Multitopic Consumer Pipelines</h2>
    <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p">Starting with version 3.7.0, <span class="- topic/ph ph">Data Collector</span> no
            longer uses the <code class="+ topic/ph pr-d/codeph ph codeph">auto.offset.reset</code> value set in the Kafka Configuration
            property to determine the initial offset for the Kafka Consumer or Kafka Multitopic
            Consumer origin. Instead, <span class="- topic/ph ph">Data Collector</span>
            uses the new Auto Offset Reset property to determine the initial offset. With the
            default setting of the new property, the origin reads all existing messages in a topic.
            In previous versions, the origin read only new messages by default. Because running a
            pipeline sets an offset value, configuration of the initial offset only affects
            pipelines that have never run. </p>
        <div class="- topic/p p">After upgrading from a version earlier than 3.7.0, update any pipelines that have not run
            and use the Kafka Consumer or Kafka Multitopic Consumer origins.<ol class="- topic/ol ol" id="concept_lxn_s5h_gnb__ol_pvr_bvh_gnb" data-ofbid="concept_lxn_s5h_gnb__ol_pvr_bvh_gnb">
                <li class="- topic/li li">On the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Kafka</span> tab for the origin, set the value of the
                        <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Auto Offset Reset</span> property: <ul class="- topic/ul ul" id="concept_lxn_s5h_gnb__ul_zm5_2rc_cgb" data-ofbid="concept_lxn_s5h_gnb__ul_zm5_2rc_cgb">
                        <li class="- topic/li li"><span class="+ topic/ph ui-d/uicontrol ph uicontrol">Earliest</span> - Select to have the origin read
                            messages starting with the first message in the topic (same behavior as
                            configuring <span class="+ topic/ph ui-d/uicontrol ph uicontrol">auto.offset.reset</span> to
                                <span class="+ topic/ph ui-d/uicontrol ph uicontrol">earliest</span> in previous versions of <span class="- topic/ph ph">Data Collector</span>).</li>
                        <li class="- topic/li li"><span class="+ topic/ph ui-d/uicontrol ph uicontrol">Latest</span> - Select to have the origin read messages
                            starting with the last message in the topic (same behavior as not
                            configuring <span class="+ topic/ph ui-d/uicontrol ph uicontrol">auto.offset.reset</span> in previous
                            versions of <span class="- topic/ph ph">Data Collector</span>). </li>
                        <li class="- topic/li li"><span class="+ topic/ph ui-d/uicontrol ph uicontrol">Timestamp</span> - Select to have the origin read
                            messages starting with messages at a particular timestamp, which you
                            specify in the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Auto Offset Reset Timestamp</span>
                            property.</li>
                    </ul></li>
                <li class="- topic/li li">If configured in the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Kafka Configuration</span> property, delete
                    the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">auto.offset.reset</span> property. </li>
            </ol></div>
    </div>
</article><article class="- topic/topic task/task topic task nested1" aria-labelledby="ariaid-title64" id="task_tzp_2dd_vhb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title64">Update JDBC Pipelines</h2>
    
    <div class="- topic/body task/taskbody body taskbody"><p class="- topic/shortdesc shortdesc">Starting with version 3.5.0, <span class="- topic/ph ph">Data Collector</span>
        requires the maximum lifetime for a connection to be at least 30 minutes in stages that use
        a JDBC connection. <span class="- topic/ph ph">Data Collector</span> does
        not validate stages with lower non-zero values configured. </p>
        <section class="- topic/section task/context section context">
            <p class="- topic/p p">If you upgrade pipelines that include a stage that uses a JDBC connection, update the
                stage to set the maximum lifetime for a connection to be at least 30 minutes. </p>
        </section>
        <section class="- topic/ol task/steps ol steps"><div class="- topic/ol task/steps ol steps"><div class="- topic/li task/step li step p">
                <span class="- topic/ph task/cmd ph cmd">On the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Advanced</span> tab, set the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Max Connection
                        Lifetime</span> property to be at least 30 minutes or 1800
                    seconds.</span>
            </div></div></section>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title65" id="concept_b34_vly_cfb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title65">Update Spark Executor with Databricks Pipelines</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc">Starting with version 3.5.0, <span class="- topic/ph ph">Data Collector</span>
        introduces a new Databricks Job Launcher executor and has removed the ability to use the
        Spark executor with Databricks.</p>
        <p class="- topic/p p">If you upgrade pipelines that include the Spark executor with Databricks, you must update
            the pipeline to use the <a class="- topic/xref xref" href="../Executors/Databricks.html#concept_fdc_qrx_jz">Databricks Job Launcher executor</a> after you upgrade. </p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title66" id="concept_jzf_b3b_xdb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title66">Update Pipelines to Use Spark 2.1 or Later</h2>
    <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p">Starting with version 3.3.0, <span class="- topic/ph ph">Data Collector</span>
            removes support for Spark 1.x and introduces cluster streaming mode with support for
            Kafka security features such as SSL/TLS and Kerberos authentication using Spark 2.1 or
            later and Kafka 0.10.0.0 or later. For more information about these changes, see <a class="- topic/xref xref" href="PreUpgrade.html#concept_zgm_vj2_mdb" title="Data Collector version 3.3.0 introduces cluster streaming mode with support for Kafka security features such as SSL/TLS and Kerberos authentication using Spark 2.1 or later and Kafka 0.10.0.0 or later.">Upgrade to Spark 2.1 or Later</a>.</p>
        <p class="- topic/p p">After upgrading the Cloudera CDH distribution, Hortonworks Hadoop distribution, or Kafka
            system to the required version and then upgrading <span class="- topic/ph ph">Data Collector</span>,
            you must update pipelines to use Spark 2.1 or later. Pipelines that use the earlier
            systems will not run until you perform these tasks:</p>
        <ol class="- topic/ol ol" id="concept_jzf_b3b_xdb__ol_t1f_w3b_xdb" data-ofbid="concept_jzf_b3b_xdb__ol_t1f_w3b_xdb">
            <li class="- topic/li li"><a class="- topic/xref xref" href="../Installation/AddtionalStageLibs.html#concept_fb2_qmn_bz">Install the
                    stage library</a> for the upgraded system.</li>
            <li class="- topic/li li">In the pipeline, edit the stage and select the appropriate stage library.</li>
            <li class="- topic/li li">If the pipeline includes a Spark Evaluator processor and the Spark application was
                previously built with Spark 2.0 or earlier, rebuild it with Spark 2.1. <p class="- topic/p p">Or if you
                    used Scala to write the custom Spark class, and the application was compiled
                    with Scala 2.10, recompile it with Scala 2.11.</p></li>
            <li class="- topic/li li">If the pipeline includes a Spark executor and the Spark application was previously
                built with Spark 2.0 or earlier, rebuild it with Spark 2.1 and Scala 2.11. </li>
        </ol>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title67" id="concept_hxf_3yd_qcb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title67">Update Value Replacer Pipelines</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc">Starting with version 3.1.0.0, <span class="- topic/ph ph">Data Collector</span>
        introduces a new Field Replacer processor and has deprecated the Value Replacer processor. </p>
        <p class="- topic/p p">The Field Replacer processor lets you define more complex conditions to replace values.
            For example, unlike the Value Replacer, the Field Replacer can replace values that fall
            within a specified range.</p>
        <p class="- topic/p p">You can continue to use the deprecated Value Replacer processor in pipelines. However,
            the processor will be removed in a future release - so we recommend that you update
            pipelines to use the Field Replacer as soon as possible.</p>
        <p class="- topic/p p">To update your pipelines, replace the Value Replacer processor with the <a class="- topic/xref xref" href="../Processors/FieldReplacer.html#concept_rw4_2d3_4cb">Field Replacer
                processor</a>. The Field Replacer replaces values in fields with nulls or with
            new values. In the Field Replacer, use field path expressions to replace values based on
            a condition. </p>
        <p class="- topic/p p">For example, let's say that your Value Replacer processor is configured to replace null
            values in the product_id field with "NA" and to replace the "0289" store ID with "0132"
            as follows:</p>
        <p class="- topic/p p"><img class="- topic/image image" id="concept_hxf_3yd_qcb__image_psh_jby_tcb" src="../Graphics/UpgradeValueReplacer.png" height="171" width="729"/></p>
        <p class="- topic/p p">In the Field Replacer processor, you can configure the same replacements using field path
            expressions as follows:</p>
        <p class="- topic/p p"><img class="- topic/image image" id="concept_hxf_3yd_qcb__image_uq2_pcy_tcb" src="../Graphics/UpgradeValueReplacerWithFieldReplacer.png" height="127" width="794"/></p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title68" id="concept_dzq_djt_vcb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title68">Update Tableau CRM Pipelines</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc">Starting with version 3.1.0.0, the Tableau CRM destination, previously known as the
        Einstein Analytics destination, introduces a new append operation that lets you combine data
        into a single dataset. Configuring the destination to use dataflows to combine data into a
        single dataset has been deprecated.</p>
        <p class="- topic/p p">You can continue to configure the destination to use dataflows. However, dataflows will
            be removed in a future release - so we recommend that you update pipelines to use the
                <a class="- topic/xref xref" href="../Destinations/TableauCRM.html#concept_ryp_g4r_vcb">append
                operation</a> as soon as possible.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title69" id="concept_wnp_scs_wbb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title69">Disable Cloudera Navigator Integration</h2>
    <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p">Starting with version 3.0.0.0, the beta version of Cloudera Navigator integration is no
            longer available with <span class="- topic/ph ph">Data Collector</span>.
            Cloudera Navigator integration now requires a paid subscription. For more information
            about purchasing Cloudera Navigator integration, <a class="- topic/xref xref" href="https://streamsets.com/contact-us/" target="_blank" rel="external noopener">contact <span class="- topic/ph ph">StreamSets</span></a>.</p>
        <p class="- topic/p p">When upgrading from a <span class="- topic/ph ph">Data Collector</span>
            version with Cloudera Navigator integration enabled to version 3.0.0.0 without a paid
            subscription, perform the following post-upgrade task:</p>
        <div class="- topic/p p">Do not include the Cloudera Navigator properties when you configure the 3.0.0.0 <span class="- topic/ph ph">Data Collector</span>
            configuration file, sdc.properties. The properties to omit are: <ul class="- topic/ul ul" id="concept_wnp_scs_wbb__ul_lrx_xcs_wbb" data-ofbid="concept_wnp_scs_wbb__ul_lrx_xcs_wbb">
                <li class="- topic/li li">lineage.publishers</li>
                <li class="- topic/li li">lineage.publisher.navigator.def</li>
                <li class="- topic/li li">All other properties with the lineage.publisher.navigator prefix</li>
            </ul></div>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title70" id="concept_hky_ljl_wbb">
 <h2 class="- topic/title title topictitle2" id="ariaid-title70">JDBC Multitable Consumer Query Interval Change</h2>
 <div class="- topic/body concept/conbody body conbody">
  <p class="- topic/p p">Starting with version 3.0.0.0, the Query Interval property is replaced by the new Queries per
            Second property.</p>
        <div class="- topic/p p">Upgraded pipelines with the Query Interval specified using a constant or the default
            format and unit of time, ${10 * SECONDS}, have the new Queries per Second property
            calculated and defined as follows:
            <pre class="+ topic/pre pr-d/codeblock pre codeblock"><code>Queries per Second = Number of Threads / Query Interval (in seconds)</code></pre>For
            example, say the origin uses three threads and Query Interval is configured for ${15 *
            SECONDS}. Then, the upgraded origin sets Queries per Seconds to 3 divided by 15, which
            is .2. This means the origin will run a maximum of two queries every 10 seconds. </div>
        <p class="- topic/p p">The upgrade would occur the same way if Query Interval were set to 15. </p>
        <p class="- topic/p p">Pipelines with a Query Interval configured to use other units of time, such as ${.1
            *MINUTES}, or configured with a different expression format, such as ${SECONDS * 5}, are
            upgraded to use the default for Queries per Second, which is 10. This means the pipeline
            will run a maximum of 10 queries per second. The fact that these expressions are not
            upgraded correctly is noted in the Data Collector log.</p>
        <p class="- topic/p p">If necessary, update the Queries per Second property as needed after the upgrade. </p>
 </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title71" id="concept_ys3_bjl_wbb">
 <h2 class="- topic/title title topictitle2" id="ariaid-title71">Update JDBC Query Consumer Pipelines used for SQL Server CDC Data</h2>
 <div class="- topic/body concept/conbody body conbody">
  <p class="- topic/p p">Starting with version 3.0.0.0, the Microsoft SQL Server CDC functionality in the JDBC Query
            Consumer origin has been deprecated and will be removed in a future release. </p>
        <div class="- topic/p p">For pipelines that use the JDBC Query Consumer to process Microsoft SQL Server CDC data,
            replace the JDBC Query Consumer origin with another origin: <ul class="- topic/ul ul" id="concept_ys3_bjl_wbb__ul_ylc_fjl_wbb" data-ofbid="concept_ys3_bjl_wbb__ul_ylc_fjl_wbb">
                <li class="- topic/li li">To read data from Microsoft SQL Server CDC tables, use the <a class="- topic/xref xref" href="../Origins/SQLServerCDC.html#concept_ut3_ywc_v1b">SQL Server CDC
                        Client origin</a>. </li>
                <li class="- topic/li li">To read data from Microsoft SQL Server change tracking tables, use the <a class="- topic/xref xref" href="../Origins/SQLServerChange.html#concept_ewq_b2s_r1b">SQL Server Change
                        Tracking origin</a>. </li>
            </ul></div>
 </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title72" id="concept_ncs_5jl_wbb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title72">Update MongoDB Destination Upsert Pipelines</h2>
    <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p">Starting with version 3.0.0.0, the MongoDB destination supports the replace and update
            operation codes, and no longer supports the upsert operation code. You can use a new
            Upsert flag in conjunction with Replace and Update. </p>
        <p class="- topic/p p">After upgrading from a version earlier than 3.0.0.0, update the pipeline as needed to
            ensure that records passed to the destination do not use the upsert operation code
            (sdc.operation.type = 4). Records that use the upsert operation code will be sent to
            error. </p>
        <p class="- topic/p p">In previous releases, records flagged for upsert were treated in the MongoDB system as
            Replace operations with the Upsert flag set. </p>
        <div class="- topic/p p">If you want to replicate the upsert behavior from earlier releases, perform the following
                steps:<ol class="- topic/ol ol" id="concept_ncs_5jl_wbb__ol_hll_bkl_wbb" data-ofbid="concept_ncs_5jl_wbb__ol_hll_bkl_wbb">
                <li class="- topic/li li">Configure the pipeline to use the Replace operation code. <p class="- topic/p p">Make sure that the
                        sdc.operation.type is set to 7 for Replace instead of 4 for Upsert.</p></li>
                <li class="- topic/li li">In the MongoDB destination, enable the new Upsert property.</li>
            </ol></div>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title73" id="concept_o1s_gkl_wbb">
 <h2 class="- topic/title title topictitle2" id="ariaid-title73">Time Zones in Stages</h2>
 <div class="- topic/body concept/conbody body conbody">
  <p class="- topic/p p">Starting with version 3.0.0.0, time zones have been organized and updated to use JDK 8 names.
            This should make it easier to select time zones in stage properties. </p>
        <p class="- topic/p p">In the rare case that an upgraded pipeline uses a format not supported by JDK 8, edit the
            pipeline to select a compatible time zone. </p>
 </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title74" id="concept_epj_gqd_rx">
 <h2 class="- topic/title title topictitle2" id="ariaid-title74">Update Kudu Pipelines</h2>
 <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p">Consider the following upgrade tasks for Kudu pipelines, based on the version that you
            are upgrading from:</p>
        <dl class="- topic/dl dl">
            
                <dt class="- topic/dt dt dlterm">Upgrade from versions earlier than 3.0.0.0</dt>
                <dd class="- topic/dd dd">Starting with version 3.0.0.0, if the destination receives a change data capture
                    log from the following source systems, you must specify the source system so
                    that the destination can determine the format of the log: Microsoft SQL Server,
                    Oracle CDC Client, MySQL Binary Log, or MongoDB Oplog. </dd>
                <dd class="- topic/dd dd ddexpand">Previously, the Kudu destination could not directly receive changed data from
                    these source systems. You either had to include a scripting processor in the
                    pipeline to modify the field paths in the record to a format that the
                    destination could read. Or, you had to add multiple Kudu destinations to the
                    pipeline - one per operation type - and include a Stream Selector processor to
                    send records to the destination by operation type. </dd>
                <dd class="- topic/dd dd ddexpand">If you implemented one of these workarounds, then after upgrading, modify the
                    pipeline to remove the scripting processor or the Stream Selector processor and
                    the multiple destinations. In the Kudu destination, set the Change Log Format to
                    the appropriate format of the log: Microsoft SQL Server, Oracle CDC Client,
                    MySQL Binary Log, or MongoDB Oplog. </dd>
            
            
                <dt class="- topic/dt dt dlterm">Upgrade from versions earlier than 2.2.0.0</dt>
                <dd class="- topic/dd dd">Starting with version 2.2.0.0, <span class="- topic/ph ph">Data Collector</span> provides support for Apache Kudu version 1.0.x and no longer supports earlier
                    Kudu versions. To upgrade pipelines that contain a Kudu destination from <span class="- topic/ph ph">Data Collector</span> versions earlier than 2.2.0.0, upgrade your Kudu cluster and then add a stage
                    alias for the earlier Kudu version to the <span class="- topic/ph ph">Data Collector</span> configuration file, <code class="+ topic/ph pr-d/codeph ph codeph">$SDC_CONF/sdc.properties</code>. </dd>
                <dd class="- topic/dd dd ddexpand">
                    <p class="- topic/p p">The configuration file includes stage aliases to enable backward
                        compatibility for pipelines created with earlier versions of <span class="- topic/ph ph">Data Collector</span>.</p>
                    <p class="- topic/p p">To update Kudu pipelines:</p>
                    <div class="- topic/p p">
                        <ol class="- topic/ol ol" id="concept_epj_gqd_rx__ol_gpt_ssd_rx" data-ofbid="concept_epj_gqd_rx__ol_gpt_ssd_rx">
                            <li class="- topic/li li">Upgrade your Kudu cluster to version 1.0.x.<p class="- topic/p p">For instructions, see
                                    the Apache Kudu documentation.</p></li>
                            <li class="- topic/li li">Open the <code class="+ topic/ph pr-d/codeph ph codeph">$SDC_CONF/sdc.properties</code> file and locate
                                the following
                                comment:<pre class="+ topic/pre pr-d/codeblock pre codeblock"><code># Stage aliases for mapping to keep backward compatibility on pipelines when stages move libraries</code></pre></li>
                            <li class="- topic/li li">Below the comment, add a stage alias for the earlier Kudu version as
                                    follows:<pre class="+ topic/pre pr-d/codeblock pre codeblock"><code>stage.alias.streamsets-datacollector-apache-kudu-&lt;version&gt;-lib, com_streamsets_pipeline_stage_destination_kudu_KuduDTarget = streamsets-datacollector-apache-kudu_1_0-lib, com_streamsets_pipeline_stage_destination_kudu_KuduDTarget</code></pre><div class="- topic/p p">Where
                                    &lt;version&gt; is the earlier Kudu version: 0_7, 0_8, or 0_9. For
                                    example, if you previously used Kudu version 0.9, add the
                                    following stage
                                    alias:<pre class="+ topic/pre pr-d/codeblock pre codeblock"><code>stage.alias.streamsets-datacollector-apache-kudu-0_9-lib, com_streamsets_pipeline_stage_destination_kudu_KuduDTarget = streamsets-datacollector-apache-kudu_1_0-lib, com_streamsets_pipeline_stage_destination_kudu_KuduDTarget</code></pre></div></li>
                            <li class="- topic/li li">Restart <span class="- topic/ph ph">Data Collector</span> to enable the changes.</li>
                        </ol>
                    </div>
                </dd>
            
        </dl>
 </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title75" id="concept_hhc_15s_dbb">
 <h2 class="- topic/title title topictitle2" id="ariaid-title75">Update JDBC Multitable Consumer Pipelines</h2>
 <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p">Starting with version 2.7.1.1, the JDBC Multitable Consumer origin can now read from
            views in addition to tables. The origin now reads from all tables and all views that are
            included in the defined table configurations. </p>
        <p class="- topic/p p">When upgrading pipelines that contain a JDBC Multitable Consumer origin from <span class="- topic/ph ph">Data Collector</span>
            versions earlier than 2.7.1.1, review the table configurations to determine if any views
            are included. If a table configuration includes views that you do not want to read,
            simply exclude them from the configuration.</p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title76" id="concept_yyv_v45_zw">
 <h2 class="- topic/title title topictitle2" id="ariaid-title76">Update Vault Pipelines</h2>
 <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p">Starting with version 2.7.0.0, <span class="- topic/ph ph">Data Collector</span>
            introduces a credential store API and credential expression language functions to access
            Hashicorp Vault secrets. </p>
        
        <p class="- topic/p p">In addition, the <span class="- topic/ph ph">Data Collector</span>
            Vault integration now relies on Vault's App Role authentication backend. </p>
        <p class="- topic/p p">Previously, <span class="- topic/ph ph">Data Collector</span>
            used Vault functions to access Vault secrets and relied on Vault's App ID authentication
            backend. StreamSets has deprecated the Vault functions, and Hashicorp has deprecated the
            App ID authentication backend. </p>
        <p class="- topic/p p">After upgrading, update pipelines that use Vault functions in one of the following
            ways:</p>
        <dl class="- topic/dl dl">
            
                <dt class="- topic/dt dt dlterm">Use the new credential store expression language functions (recommended)</dt>
                <dd class="- topic/dd dd">To use the new credential functions, install the Vault credential store stage
                    library and define the configuration properties used to connect to Vault. Then,
                    update each upgraded pipeline that includes stages using Vault functions to use
                    the new credential functions to retrieve the credential values.</dd>
                <dd class="- topic/dd dd ddexpand">For details on using the Vault credential store system, see <a class="- topic/xref xref" href="../Configuration/CredentialStores.html#concept_s3y_dps_51b">Hashicorp Vault</a>.</dd>
            
            
                <dt class="- topic/dt dt dlterm">Continue to use the deprecated Vault functions</dt>
                <dd class="- topic/dd dd">You can continue to use the deprecated Vault functions in pipelines. However,
                    the functions will be removed in a future release - so we recommend that you use
                    the credential functions as soon as possible.</dd>
                <dd class="- topic/dd dd ddexpand">To continue to use the Vault functions, make the following changes after
                    upgrading:</dd>
                <dd class="- topic/dd dd ddexpand">
                    <ul class="- topic/ul ul" id="concept_yyv_v45_zw__ul_emz_ltw_51b" data-ofbid="concept_yyv_v45_zw__ul_emz_ltw_51b">
                        <li class="- topic/li li">Uncomment the single Vault EL property in the
                                <code class="+ topic/ph pr-d/codeph ph codeph">$SDC_CONF/vault.properties</code> file.</li>
                        <li class="- topic/li li">The remaining Vault configuration properties have been moved to the
                                <samp class="+ topic/ph sw-d/systemoutput ph systemoutput sysout">$SDC_CONF/credential-stores.properties</samp>
                            file. The properties use the same name, with an added
                            "credentialStore.vault.config" prefix. Copy any values that you
                            customized in the previous <code class="+ topic/ph pr-d/codeph ph codeph">vault.properties</code> file into
                            the same property names in the
                                <code class="+ topic/ph pr-d/codeph ph codeph">credential-stores.properties</code> file.</li>
                        <li class="- topic/li li">Define the Vault Role ID and Secret ID that <span class="- topic/ph ph">Data Collector</span> uses to authenticate with Vault in the
                                <code class="+ topic/ph pr-d/codeph ph codeph">credential-stores.properties</code> file. Defining an App
                            ID for <span class="- topic/ph ph">Data Collector</span> is deprecated and will be removed in a future release.</li>
                    </ul>
                </dd>
                <dd class="- topic/dd dd ddexpand">For details on using the Vault functions, see <a class="- topic/xref xref" href="../Configuration/Vault-Overview.html#concept_bmq_gl1_mw">Accessing Hashicorp Vault Secrets with Vault Functions (deprecated)</a>.</dd>
            
        </dl>
 </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title77" id="concept_cmh_ryd_pz">
 <h2 class="- topic/title title topictitle2" id="ariaid-title77">Configure JDBC Producer Schema Names</h2>
    <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p">Starting with Data Collector version 2.5.0.0, you can use a Schema Name property to
            specify the database or schema name. In previous releases, you specified the database or
            schema name in the Table Name property. </p>
        <p class="- topic/p p">Upgrading from a previous release does not require changing any existing configuration at
            this time. But we recommend using the new Schema Name property, since the ability to
            specify a database or schema name with the table name might be deprecated in the future. </p>
    </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title78" id="concept_gk3_s5l_nz">
 <h2 class="- topic/title title topictitle2" id="ariaid-title78">Evaluate Precondition Error Handling</h2>
 <div class="- topic/body concept/conbody body conbody">
  <p class="- topic/p p">Starting with Data Collector version 2.5.0.0, precondition error handling has changed. </p>
        <p class="- topic/p p">The Precondition stage property allows you to define conditions that must be met for a
            record to enter the stage. Previously, records that did not meet all specified
            preconditions were passed to the pipeline for error handling. That is, the records were
            processed based on the Error Records pipeline property. </p>
        <p class="- topic/p p">With version 2.5.0.0, records that do not meet the specified preconditions are handled by
            the error handling configured for the stage. Stage error handling occurs based on the On
            Record Error property on the General tab of the stage.</p>
        <p class="- topic/p p">Review pipelines that use preconditions to verify that this change does not adversely
            affect the behavior of the pipelines.</p>
 </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title79" id="concept_czx_bbn_gz">
 <h2 class="- topic/title title topictitle2" id="ariaid-title79">Authentication for Docker Image</h2>
 <div class="- topic/body concept/conbody body conbody">
  <p class="- topic/p p">Starting with <span class="- topic/ph ph">Data Collector</span>
            version 2.4.1.0, the Docker image now uses the form type of file-based authentication by
            default. As a result, you must use a <span class="- topic/ph ph">Data Collector</span>
            user account to log in to the <span class="- topic/ph ph">Data Collector</span>. If
            you haven't set up custom user accounts, you can use the admin account shipped with the
                <span class="- topic/ph ph">Data Collector</span>. The default login is: <code class="+ topic/ph pr-d/codeph ph codeph">admin / admin</code>.</p>
        <p class="- topic/p p">Earlier versions of the Docker image used no authentication.</p>
 </div>
</article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title80" id="concept_zbn_fpw_xy">
 <h2 class="- topic/title title topictitle2" id="ariaid-title80">Configure Pipeline Permissions</h2>
 <div class="- topic/body concept/conbody body conbody">
        <p class="- topic/p p"><span class="- topic/ph ph">Data Collector</span>
            version 2.4.0.0 is designed for multitenancy and enables you to share and grant
            permissions on pipelines. Permissions determine the access level that users and groups
            have on pipelines.</p>
        <p class="- topic/p p">In earlier versions of <span class="- topic/ph ph">Data Collector</span>
            without pipeline permissions, pipeline access is determined by roles. For example, any
            user with the Creator role could edit any pipeline. </p>
        <p class="- topic/p p">In version  2.4.0.0, roles are augmented with pipeline permissions. In addition to having
            the necessary role, users must also have the appropriate permissions to perform pipeline
            tasks. </p>
        <p class="- topic/p p">For example, to edit a pipeline in 2.4.0.0, a user with the Creator role must also have
            read and write permission on the pipeline. Without write permission, the user cannot
            edit the pipeline. Without read permission, the user cannot see the pipeline at all. It
            does not display in the list of available pipelines. </p>
        <div class="- topic/p p">
            <div class="- topic/note note note note_note"><span class="note__title">Note:</span> With pipeline permissions enabled, all upgraded pipelines are initially visible
                only to users with the Admin role and the pipeline owner - the user who created the
                pipeline. To enable other users to work with pipelines, have an Admin user configure
                the appropriate permissions for each pipeline. </div>
        </div>
        <p class="- topic/p p">In Data Collector version 2.5.0.0, pipeline permissions are disabled by default. To
            enable pipeline permissions, set the pipeline.access.control.enabled property to true in
            the Data Collector configuration file.</p>
        <div class="- topic/note note tip note_tip"><span class="note__title">Tip:</span> You can configure pipeline permissions when permissions are disabled. Then,
            you can enable the pipeline permissions property after pipeline permissions are properly
            configured. </div>
        <p class="- topic/p p">For more information about roles and permissions, see <a class="- topic/xref xref" href="../Configuration/RolesandPermissions.html#concept_k1r_prc_yy">Roles and Permissions</a>. For details
            about configuring pipeline permissions, see <a class="- topic/xref xref" href="../Pipeline_Maintenance/PipelineMaintenance_title.html#concept_jrg_1vy_wy">Sharing Pipelines</a>. </p>
    </div>
</article><article class="- topic/topic task/task topic task nested1" aria-labelledby="ariaid-title81" id="task_afy_k12_ry">
    <h2 class="- topic/title title topictitle2" id="ariaid-title81">Update Elasticsearch Pipelines</h2>
    
    <div class="- topic/body task/taskbody body taskbody"><p class="- topic/shortdesc shortdesc"><span class="- topic/ph ph">Data Collector</span>
        version 2.3.0.0 includes an enhanced Elasticsearch destination that uses the Elasticsearch
        HTTP API. To upgrade pipelines that use the Elasticsearch destination from <span class="- topic/ph ph">Data Collector</span>
        versions earlier than 2.3.0.0, you must review the value of the Default Operation
        property.</p>
        <section class="- topic/section task/context section context">
            <p class="- topic/p p">Review all upgraded Elasticsearch destinations to ensure that the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Default
                    Operation</span> property is set to the correct operation. Upgraded
                Elasticsearch destinations have the Default Operation property set based on the
                configuration for the Enable Upsert property:</p>
            <ul class="- topic/ul ul" id="task_afy_k12_ry__ul_hyw_pvt_5y" data-ofbid="task_afy_k12_ry__ul_hyw_pvt_5y">
                <li class="- topic/li li">With upsert enabled, the default operation is set to INDEX.</li>
                <li class="- topic/li li">With upsert not enabled, the default operation is set to CREATE which requires a
                    DocumentId.</li>
            </ul>
            <div class="- topic/note note note note_note"><span class="note__title">Note:</span> The Elasticsearch version 5 stage library is compatible with all versions of
                Elasticsearch. Earlier stage library versions have been removed.</div>
        </section>
    </div>
</article></article></article></main></div>

                        
                        
                        


                    </div>
                    
                </div>
            </div>


        </div> <nav class="navbar navbar-default wh_footer" data-whc_version="25.0">
  <div class=" footer-container  mx-auto">
    <!-- script for Data Collector, all flavors, but only used when accessed directly, not from portal --><script>
  (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
  (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
  m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
  })(window,document,'script','https://www.google-analytics.com/analytics.js','ga');

  ga('create', 'UA-60917135-3', 'auto');
  ga('send', 'pageview');
</script>
  </div>
</nav>

        
        <div id="go2top">
            <span class="oxy-icon oxy-icon-up"></span>
        </div>
        
        <!-- The modal container for images -->
        <div id="modal_img_large" class="modal">
            <span class="close oxy-icon oxy-icon-remove"></span>
            <!-- Modal Content (The Image) -->
            <div id="modal_img_container"></div>
            <!-- Modal Caption (Image Text) -->
            <div id="caption"></div>
        </div>
        
        
        Â© 2023 StreamSets, Inc.

    </body>
</html>