<!DOCTYPE html><html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en" data-whc_version="25.0">
    <head><link rel="shortcut icon" href="../../../oxygen-webhelp/template/images/favicon.png"/><link rel="icon" href="../../../oxygen-webhelp/template/images/favicon.png"/><meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/><meta name="viewport" content="width=device-width, initial-scale=1.0"/><meta http-equiv="X-UA-Compatible" content="IE=edge"/><meta name="copyright" content="(C) Copyright 2024"/><meta name="generator" content="DITA-OT"/><meta name="description" content="You can use several solutions to load data into a Delta Lake table on Databricks. Before continuing with one of the solutions, ensure that you have completed all of the required prerequisites in ..."/><meta name="prodname" content="Data Collector"/><meta name="version" content="3"/><meta name="release" content="16"/><meta name="modification" content="0"/>        
      <title>Loading Data into Databricks Delta Lake</title><!--  Generated with Oxygen version 25.1, build number 2023042410.  --><meta name="wh-path2root" content="../../../"/><meta name="wh-toc-id" content="concept_a5b_wvk_ckb-d16893e60338"/><meta name="wh-source-relpath" content="datacollector/UserGuide/Solutions/DeltaLake.dita"/><meta name="wh-out-relpath" content="datacollector/UserGuide/Solutions/DeltaLake.html"/>

    <link rel="stylesheet" type="text/css" href="../../../oxygen-webhelp/app/commons.css?buildId=2023042410"/>
    <link rel="stylesheet" type="text/css" href="../../../oxygen-webhelp/app/topic.css?buildId=2023042410"/>

    <script src="../../../oxygen-webhelp/app/options/properties.js?buildId=20240802104629"></script>
    <script src="../../../oxygen-webhelp/app/localization/strings.js?buildId=2023042410"></script>
    <script src="../../../oxygen-webhelp/app/search/index/keywords.js?buildId=20240802104629"></script>
    <script defer="defer" src="../../../oxygen-webhelp/app/commons.js?buildId=2023042410"></script>
    <script defer="defer" src="../../../oxygen-webhelp/app/topic.js?buildId=2023042410"></script>
<link rel="stylesheet" type="text/css" href="../../../oxygen-webhelp/template/light.css?buildId=2023042410"/><link rel="stylesheet" type="text/css" href="../../../skin.css"/></head>

    <body class="wh_topic_page frmBody">
        
        
        

        
<nav class="navbar navbar-default wh_header" data-whc_version="25.0">
    <div class="container-fluid">
        <div class="wh_header_flex_container navbar-nav navbar-expand-md navbar-dark">
            <div class="wh_logo_and_publication_title_container">
                <div class="wh_logo_and_publication_title">
                    
                    <!--
                            This component will be generated when the next parameters are specified in the transformation scenario:
                            'webhelp.logo.image' and 'webhelp.logo.image.target.url'.
                            See: http://oxygenxml.com/doc/versions/17.1/ug-editor/#topics/dita_webhelp_output.html.
                    -->
                    
                    <div class=" wh_publication_title "><a href="../../../index.html"><span class="booktitle">  <span class="ph mainbooktitle"><span class="ph">Data Collector</span> User Guide</span>  </span></a></div>
                    
                </div>
                
                <!-- The menu button for mobile devices is copied in the output only when the 'webhelp.show.top.menu' parameter is set to 'yes' -->
                
            </div>

            <div class="wh_top_menu_and_indexterms_link collapse navbar-collapse">
                
                
                <div class=" wh_indexterms_link "><a href="../../../indexTerms.html" title="Index" aria-label="Go to index terms page"><span>Index</span></a></div>
                
            </div>
        </div>
    </div>
</nav>

        <div class=" wh_search_input navbar-form wh_topic_page_search search " role="form">


<form id="searchForm" method="get" role="search" action="../../../search.html"><div><input type="search" placeholder="Search " class="wh_search_textfield" id="textToSearch" name="searchQuery" aria-label="Search query" required="required"/><button type="submit" class="wh_search_button" aria-label="Search"><span class="search_input_text">Search</span></button></div></form>

</div>
        
        <div class="container-fluid">
            <div class="row">

                <nav class="wh_tools d-print-none">
                    
<div data-tooltip-position="bottom" class=" wh_breadcrumb "><ol class="d-print-none"><li><span class="home"><a href="../../../index.html"><span>Home</span></a></span></li><li><div class="topicref" data-id="concept_zq5_pb4_flb"><div class="title"><a href="../../../datacollector/UserGuide/Solutions/Solutions-title.html">Solutions</a></div></div></li><li class="active"><div class="topicref" data-id="concept_a5b_wvk_ckb"><div class="title"><a href="../../../datacollector/UserGuide/Solutions/DeltaLake.html#concept_a5b_wvk_ckb">Loading Data into Databricks Delta Lake</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li></ol></div>



                    <div class="wh_right_tools "><button class="wh_hide_highlight" aria-label="Toggle search highlights" title="Toggle search highlights"></button><button class="webhelp_expand_collapse_sections" data-next-state="collapsed" aria-label="Collapse sections" title="Collapse sections"></button><div class=" wh_navigation_links "><span id="topic_navigation_links" class="navheader">
  
<span class="navprev"><a class="- topic/link link" href="../../../datacollector/UserGuide/Solutions/EventStorage.html#concept_ocb_nnl_px" title="Preserving an Audit Trail of Events" aria-label="Previous topic: Preserving an Audit Trail of Events" rel="prev"></a></span>  
<span class="navnext"><a class="- topic/link link" href="../../../datacollector/UserGuide/Solutions/HiveDrift-Overview.html#concept_phk_bdf_2w" title="Drift Synchronization Solution for Hive" aria-label="Next topic: Drift Synchronization Solution for Hive" rel="next"></a></span>  </span></div>
<!--External resource link-->
<div class=" wh_print_link print d-none d-md-inline-block "><button onClick="window.print()" title="Print this page" aria-label="Print this page"></button></div>
                        
                        
                        
                        
                    </div>
                </nav>
            </div>

            

<div class="wh_content_area">
                <div class="row">
                    


                        <nav role="navigation" id="wh_publication_toc" class="col-lg-3 col-md-3 col-sm-12 d-md-block d-none d-print-none">
<div id="wh_publication_toc_content">


                            <div class=" wh_publication_toc " data-tooltip-position="right"><span class="expand-button-action-labels"><span id="button-expand-action" role="button" aria-label="Expand"></span><span id="button-collapse-action" role="button" aria-label="Collapse"></span><span id="button-pending-action" role="button" aria-label="Pending"></span></span><ul role="tree" aria-label="Table of Contents"><li role="treeitem" aria-expanded="false"><div data-tocid="concept_htw_ghg_jq-d16893e53" class="topicref" data-id="concept_htw_ghg_jq" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_htw_ghg_jq-d16893e53-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Getting_Started/GettingStarted_Title.html#concept_htw_ghg_jq" id="concept_htw_ghg_jq-d16893e53-link">Getting Started</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_l2v_nlp_mpb-d16893e331" class="topicref" data-id="concept_l2v_nlp_mpb" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_l2v_nlp_mpb-d16893e331-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/ReleaseNotes/ReleaseNotes.html#concept_l2v_nlp_mpb" id="concept_l2v_nlp_mpb-d16893e331-link">Release Notes</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_l4q_flb_kr-d16893e2582" class="topicref" data-id="concept_l4q_flb_kr" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_l4q_flb_kr-d16893e2582-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Installation/Install_title.html" id="concept_l4q_flb_kr-d16893e2582-link">Installation</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_ylh_yyz_ky-d16893e3984" class="topicref" data-id="concept_ylh_yyz_ky" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_ylh_yyz_ky-d16893e3984-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Configuration/Config_title.html" id="concept_ylh_yyz_ky-d16893e3984-link">Configuration</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_ejk_f1f_5v-d16893e7058" class="topicref" data-id="concept_ejk_f1f_5v" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_ejk_f1f_5v-d16893e7058-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Upgrade/Upgrade_title.html" id="concept_ejk_f1f_5v-d16893e7058-link">Upgrade</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_qsw_cjy_bt-d16893e10103" class="topicref" data-id="concept_qsw_cjy_bt" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_qsw_cjy_bt-d16893e10103-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Pipeline_Design/PipelineDesign_title.html" id="concept_qsw_cjy_bt-d16893e10103-link">Pipeline Concepts and Design</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_qn1_wn4_kq-d16893e11199" class="topicref" data-id="concept_qn1_wn4_kq" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_qn1_wn4_kq-d16893e11199-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Pipeline_Configuration/PipelineConfiguration_title.html" id="concept_qn1_wn4_kq-d16893e11199-link">Pipeline Configuration</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_hdr_gyw_41b-d16893e13057" class="topicref" data-id="concept_hdr_gyw_41b" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_hdr_gyw_41b-d16893e13057-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Data_Formats/DataFormats-Title.html" id="concept_hdr_gyw_41b-d16893e13057-link">Data Formats</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_yjl_nc5_jq-d16893e14164" class="topicref" data-id="concept_yjl_nc5_jq" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_yjl_nc5_jq-d16893e14164-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Origins/Origins_title.html" id="concept_yjl_nc5_jq-d16893e14164-link">Origins</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_yjl_nc5_jq-d16893e35197" class="topicref" data-id="concept_yjl_nc5_jq" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_yjl_nc5_jq-d16893e35197-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Processors/Processors_title.html" id="concept_yjl_nc5_jq-d16893e35197-link">Processors</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_agj_cfj_br-d16893e44037" class="topicref" data-id="concept_agj_cfj_br" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_agj_cfj_br-d16893e44037-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Destinations/Destinations-title.html" id="concept_agj_cfj_br-d16893e44037-link">Destinations</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_umc_1lk_fx-d16893e56072" class="topicref" data-id="concept_umc_1lk_fx" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_umc_1lk_fx-d16893e56072-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Executors/Executors-title.html" id="concept_umc_1lk_fx-d16893e56072-link">Executors</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_xxd_f5r_kx-d16893e59696" class="topicref" data-id="concept_xxd_f5r_kx" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_xxd_f5r_kx-d16893e59696-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Event_Handling/EventFramework-Title.html#concept_xxd_f5r_kx" id="concept_xxd_f5r_kx-d16893e59696-link">Dataflow Triggers</a></div></div></li><li role="treeitem" aria-expanded="true"><div data-tocid="concept_zq5_pb4_flb-d16893e60134" class="topicref" data-id="concept_zq5_pb4_flb" data-state="expanded"><span role="button" tabindex="0" aria-labelledby="button-collapse-action concept_zq5_pb4_flb-d16893e60134-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Solutions/Solutions-title.html" id="concept_zq5_pb4_flb-d16893e60134-link">Solutions</a></div></div><ul role="group" class="navbar-nav nav-list"><li role="treeitem"><div data-tocid="concept_aw1_p1q_plb-d16893e60156" class="topicref" data-id="concept_aw1_p1q_plb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Solutions/Overview.html#concept_aw1_p1q_plb" id="concept_aw1_p1q_plb-d16893e60156-link">Solutions Overview </a></div></div></li><li role="treeitem"><div data-tocid="concept_jkm_rnz_kx-d16893e60178" class="topicref" data-id="concept_jkm_rnz_kx" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Solutions/Parquet.html#concept_jkm_rnz_kx" id="concept_jkm_rnz_kx-d16893e60178-link">Converting Data to the Parquet Data Format</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_szz_xwm_lx-d16893e60202" class="topicref" data-id="concept_szz_xwm_lx" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Solutions/Impala.html#concept_szz_xwm_lx" id="concept_szz_xwm_lx-d16893e60202-link">Automating Impala Metadata Updates for Drift Synchronization for Hive</a></div></div></li><li role="treeitem"><div data-tocid="concept_d1q_xl4_lx-d16893e60224" class="topicref" data-id="concept_d1q_xl4_lx" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Solutions/FileManagement.html#concept_d1q_xl4_lx" id="concept_d1q_xl4_lx-d16893e60224-link">Managing Output Files</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_kff_ykv_lz-d16893e60248" class="topicref" data-id="concept_kff_ykv_lz" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Solutions/StopPipeline.html#concept_kff_ykv_lz" id="concept_kff_ykv_lz-d16893e60248-link">Stopping a Pipeline After Processing All Available Data</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem"><div data-tocid="concept_vrh_jrs_bbb-d16893e60272" class="topicref" data-id="concept_vrh_jrs_bbb" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Solutions/SqoopReplacement.html#concept_vrh_jrs_bbb" id="concept_vrh_jrs_bbb-d16893e60272-link">Offloading Data from Relational Sources to Hadoop</a></div></div></li><li role="treeitem"><div data-tocid="concept_t2t_lp5_xz-d16893e60294" class="topicref" data-id="concept_t2t_lp5_xz" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Solutions/SendEmail.html#concept_t2t_lp5_xz" id="concept_t2t_lp5_xz-d16893e60294-link">Sending Email During Pipeline Processing</a></div></div></li><li role="treeitem"><div data-tocid="concept_ocb_nnl_px-d16893e60316" class="topicref" data-id="concept_ocb_nnl_px" data-state="leaf"><span role="button" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Solutions/EventStorage.html#concept_ocb_nnl_px" id="concept_ocb_nnl_px-d16893e60316-link">Preserving an Audit Trail of Events</a></div></div></li><li role="treeitem" aria-expanded="true" class="active"><div data-tocid="concept_a5b_wvk_ckb-d16893e60338" class="topicref" data-id="concept_a5b_wvk_ckb" data-state="expanded"><span role="button" tabindex="0" aria-labelledby="button-collapse-action concept_a5b_wvk_ckb-d16893e60338-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Solutions/DeltaLake.html#concept_a5b_wvk_ckb" id="concept_a5b_wvk_ckb-d16893e60338-link">Loading Data into Databricks Delta Lake</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div><ul role="group" class="navbar-nav nav-list"><li role="treeitem" aria-expanded="false"><div data-tocid="concept_ml2_1vv_yjb-d16893e60362" class="topicref" data-id="concept_ml2_1vv_yjb" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_ml2_1vv_yjb-d16893e60362-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Solutions/DeltaLake.html#concept_ml2_1vv_yjb" id="concept_ml2_1vv_yjb-d16893e60362-link">Bulk Loading Data into a Delta Lake Table</a><div class="wh-tooltip"><p class="shortdesc">This solution describes how to build a pipeline that bulk loads Salesforce data into         a Delta Lake table on Databricks.</p></div></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_uk4_fvv_yjb-d16893e60489" class="topicref" data-id="concept_uk4_fvv_yjb" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_uk4_fvv_yjb-d16893e60489-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Solutions/DeltaLake.html#concept_uk4_fvv_yjb" id="concept_uk4_fvv_yjb-d16893e60489-link">Merging Changed Data into a Delta Lake Table</a><div class="wh-tooltip"><p class="shortdesc">This solution describes how to design a pipeline that reads change data capture (CDC)         data from a database and replicates the changes to a Delta Lake table on         Databricks.</p></div></div></div></li></ul></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_phk_bdf_2w-d16893e60616" class="topicref" data-id="concept_phk_bdf_2w" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_phk_bdf_2w-d16893e60616-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Solutions/HiveDrift-Overview.html#concept_phk_bdf_2w" id="concept_phk_bdf_2w-d16893e60616-link">Drift Synchronization Solution for Hive</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_ljq_knr_4cb-d16893e61090" class="topicref" data-id="concept_ljq_knr_4cb" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_ljq_knr_4cb-d16893e61090-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Solutions/JDBC_DriftSyncSolution.html#concept_ljq_knr_4cb" id="concept_ljq_knr_4cb-d16893e61090-link"><span class="ph">Drift Synchronization Solution for PostgreSQL</span></a></div></div></li></ul></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_ugp_kwf_xw-d16893e61337" class="topicref" data-id="concept_ugp_kwf_xw" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_ugp_kwf_xw-d16893e61337-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/DPM/DPM_title.html" id="concept_ugp_kwf_xw-d16893e61337-link">StreamSets Control Hub</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_fyf_gkq_4bb-d16893e62693" class="topicref" data-id="concept_fyf_gkq_4bb" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_fyf_gkq_4bb-d16893e62693-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Edge_Mode/EdgePipelines_title.html" id="concept_fyf_gkq_4bb-d16893e62693-link"><span class="ph">StreamSets Data Collector Edge</span></a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_wwq_gxc_py-d16893e63980" class="topicref" data-id="concept_wwq_gxc_py" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_wwq_gxc_py-d16893e63980-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Multithreaded_Pipelines/MultithreadedPipelines.html#concept_wwq_gxc_py" id="concept_wwq_gxc_py-d16893e63980-link">Multithreaded Pipelines</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_gzw_tdm_p2b-d16893e64187" class="topicref" data-id="concept_gzw_tdm_p2b" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_gzw_tdm_p2b-d16893e64187-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Microservice/Microservice_Title.html#concept_gzw_tdm_p2b" id="concept_gzw_tdm_p2b-d16893e64187-link">Microservice Pipelines</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="Orchestrators_Title-d16893e64348" class="topicref" data-id="Orchestrators_Title" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action Orchestrators_Title-d16893e64348-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Orchestration_Pipelines/OrchestrationPipelines_Title.html#Orchestrators_Title" id="Orchestrators_Title-d16893e64348-link">Orchestration Pipelines</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_wr1_ktz_bt-d16893e64489" class="topicref" data-id="concept_wr1_ktz_bt" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_wr1_ktz_bt-d16893e64489-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/RPC_Pipelines/SDC_RPCpipelines_title.html#concept_wr1_ktz_bt" id="concept_wr1_ktz_bt-d16893e64489-link">SDC RPC Pipelines</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_fpz_5r4_vs-d16893e64679" class="topicref" data-id="concept_fpz_5r4_vs" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_fpz_5r4_vs-d16893e64679-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Cluster_Mode/ClusterPipelines_title.html" id="concept_fpz_5r4_vs-d16893e64679-link">Cluster Pipelines</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_jjk_23z_sq-d16893e65172" class="topicref" data-id="concept_jjk_23z_sq" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_jjk_23z_sq-d16893e65172-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Data_Preview/DataPreview_Title.html#concept_jjk_23z_sq" id="concept_jjk_23z_sq-d16893e65172-link">Data Preview</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_pgk_brx_rr-d16893e65458" class="topicref" data-id="concept_pgk_brx_rr" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_pgk_brx_rr-d16893e65458-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Alerts/RulesAlerts_title.html#concept_pgk_brx_rr" id="concept_pgk_brx_rr-d16893e65458-link">Rules and Alerts</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_asx_fdz_sq-d16893e65960" class="topicref" data-id="concept_asx_fdz_sq" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_asx_fdz_sq-d16893e65960-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Pipeline_Monitoring/PipelineMonitoring_title.html#concept_asx_fdz_sq" id="concept_asx_fdz_sq-d16893e65960-link">Pipeline Monitoring</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_o3l_dtr_5q-d16893e66304" class="topicref" data-id="concept_o3l_dtr_5q" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_o3l_dtr_5q-d16893e66304-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Pipeline_Maintenance/PipelineMaintenance_title.html#concept_o3l_dtr_5q" id="concept_o3l_dtr_5q-d16893e66304-link">Pipeline Maintenance</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_yms_ftm_sq-d16893e66768" class="topicref" data-id="concept_yms_ftm_sq" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_yms_ftm_sq-d16893e66768-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Administration/Administration_title.html#concept_yms_ftm_sq" id="concept_yms_ftm_sq-d16893e66768-link">Administration</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_nls_w1r_ks-d16893e67508" class="topicref" data-id="concept_nls_w1r_ks" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_nls_w1r_ks-d16893e67508-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Tutorial/Tutorial-title.html" id="concept_nls_w1r_ks-d16893e67508-link">Tutorial</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_sh3_frm_tq-d16893e68001" class="topicref" data-id="concept_sh3_frm_tq" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_sh3_frm_tq-d16893e68001-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Troubleshooting/Troubleshooting_title.html#concept_sh3_frm_tq" id="concept_sh3_frm_tq-d16893e68001-link">Troubleshooting</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_xbx_rs1_tq-d16893e68798" class="topicref" data-id="concept_xbx_rs1_tq" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_xbx_rs1_tq-d16893e68798-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Glossary/Glossary_title.html#concept_xbx_rs1_tq" id="concept_xbx_rs1_tq-d16893e68798-link">Glossary</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_jn1_nzb_kv-d16893e68843" class="topicref" data-id="concept_jn1_nzb_kv" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_jn1_nzb_kv-d16893e68843-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Apx-DataFormats/DataFormat_Title.html#concept_jn1_nzb_kv" id="concept_jn1_nzb_kv-d16893e68843-link">Data Formats by Stage</a><div class="wh-tooltip"><p class="shortdesc"></p></div></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_pvm_yt3_wq-d16893e68958" class="topicref" data-id="concept_pvm_yt3_wq" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_pvm_yt3_wq-d16893e68958-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Expression_Language/ExpressionLanguage_title.html" id="concept_pvm_yt3_wq-d16893e68958-link">Expression Language</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_vcj_1ws_js-d16893e69669" class="topicref" data-id="concept_vcj_1ws_js" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_vcj_1ws_js-d16893e69669-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Apx-RegEx/RegEx-Title.html#concept_vcj_1ws_js" id="concept_vcj_1ws_js-d16893e69669-link">Regular Expressions</a></div></div></li><li role="treeitem" aria-expanded="false"><div data-tocid="concept_chv_vmj_wr-d16893e69787" class="topicref" data-id="concept_chv_vmj_wr" data-state="not-ready"><span role="button" tabindex="0" aria-labelledby="button-expand-action concept_chv_vmj_wr-d16893e69787-link" class="wh-expand-btn"></span><div class="title"><a href="../../../datacollector/UserGuide/Apx-GrokPatterns/GrokPatterns_title.html#concept_chv_vmj_wr" id="concept_chv_vmj_wr-d16893e69787-link">Grok Patterns</a></div></div></li></ul></div>
                        

</div>
</nav>
                    


                    
                    <div id="wh_topic_body" class="col-lg-7 col-md-9 col-sm-12">
<button id="wh_close_publication_toc_button" class="close-toc-button d-none" aria-label="Toggle publishing table of content" aria-controls="wh_publication_toc" aria-expanded="true"><span class="close-toc-icon-container"><span class="close-toc-icon"></span></span></button><button id="wh_close_topic_toc_button" class="close-toc-button d-none" aria-label="Toggle topic table of content" aria-controls="wh_topic_toc" aria-expanded="true"><span class="close-toc-icon-container"><span class="close-toc-icon"></span></span></button>

                        
<div class=" wh_topic_content body "><main role="main"><article class="" role="article" aria-labelledby="ariaid-title1"><article class="nested0" aria-labelledby="ariaid-title1" id="concept_a5b_wvk_ckb">
    <h1 class="- topic/title title topictitle1" id="ariaid-title1">Loading Data into Databricks Delta Lake</h1>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc"></p>
        <p class="- topic/p p">You can use several solutions to load data into a Delta Lake table on Databricks. </p>
        
        
        <p class="- topic/p p">Before continuing with one of the solutions, ensure that you have completed all of the
            required prerequisites in Databricks, including generating a personal access token,
            configuring and starting your Databricks cluster, and then locating the JDBC URL used to
            access the cluster. </p>
        <div class="- topic/p p">For detailed prerequisite steps, see one of the following
                Databricks articles depending on your staging location:<ul class="- topic/ul ul" id="concept_a5b_wvk_ckb__ul_ksq_szg_tkb" data-ofbid="concept_a5b_wvk_ckb__ul_ksq_szg_tkb">
                    <li class="- topic/li li">When using Amazon S3 as the staging location, see this <a class="- topic/xref xref" href="https://docs.databricks.com/integrations/ingestion/streamsets.html" target="_blank" rel="external noopener">Databricks article</a>.</li>
                    <li class="- topic/li li">When using Azure Data Lake Storage Gen2 as the staging location, see this
                            <a class="- topic/xref xref" href="https://docs.microsoft.com/en-us/azure/databricks/integrations/ingestion/streamsets" target="_blank" rel="external noopener">Azure Databricks article</a>. </li>
                    <li class="- topic/li li">When using Google Cloud Storage as the staging location, see this <a class="- topic/xref xref" href="https://docs.gcp.databricks.com/data/data-sources/google/gcs.html" target="_blank" rel="external noopener">Databricks article</a>.</li>
                </ul></div>
        <div class="- topic/p p">Then use one of the following solutions to build a pipeline that loads data into a Delta
            Lake table on Databricks:<ul class="- topic/ul ul" id="concept_a5b_wvk_ckb__ul_vwx_fdx_bkb" data-ofbid="concept_a5b_wvk_ckb__ul_vwx_fdx_bkb">
                <li class="- topic/li li"><a class="- topic/xref xref" href="DeltaLake.html#concept_ml2_1vv_yjb" title="This solution describes how to build a pipeline that bulk loads Salesforce data into a Delta Lake table on Databricks.">Bulk load data into a
                        Delta Lake table</a><p class="- topic/p p">Build a pipeline that reads new Salesforce data,
                        cleans some of the input data, and then passes the data to the Databricks
                        Delta Lake destination. The Databricks Delta Lake destination first stages
                        the data in an Amazon S3 staging location, and then uses the COPY command to
                        copy the data from the staging location to a Delta Lake table.</p></li>
                <li class="- topic/li li"><a class="- topic/xref xref" href="DeltaLake.html#concept_uk4_fvv_yjb" title="This solution describes how to design a pipeline that reads change data capture (CDC) data from a database and replicates the changes to a Delta Lake table on Databricks.">Merge changed data into a
                        Delta Lake table</a><p class="- topic/p p">Build a pipeline that processes change data
                        capture (CDC) data using the MySQL Binary Log origin and then passes the
                        changed data to the Databricks Delta Lake destination. The Databricks Delta
                        Lake destination first stages the changed data in an Amazon S3 staging
                        location, and then uses the MERGE command to merge the changed data from the
                        staging location to a Delta Lake table.</p></li>
            </ul></div>
    </div>
<article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title2" id="concept_ml2_1vv_yjb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title2">Bulk Loading Data into a Delta Lake Table</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc">This solution describes how to build a pipeline that bulk loads Salesforce data into
        a Delta Lake table on Databricks.</p>
        <div class="- topic/note note tip note_tip"><span class="note__title">Tip:</span> You can download the sample Salesforce to Delta Lake pipeline from the
                <a class="- topic/xref xref" href="https://github.com/streamsets/pipeline-library/tree/master/datacollector" target="_blank" rel="external noopener"><span class="- topic/ph ph">StreamSets</span>
                <span class="- topic/ph ph">Data Collector</span>
                pipeline library</a>, import the pipeline into <span class="- topic/ph ph">Data Collector</span>,
            and then follow these steps for more details on the solution.</div>
        
        <p class="- topic/p p">Let's say that you want to bulk
            load Salesforce account data into Databricks Delta Lake for further analysis. You'd like
            the pipeline to clean some of the account data before loading it into Delta Lake. When
            the pipeline passes the cleaned data to the Databricks Delta Lake destination, the
            destination first stages the data in an Amazon S3 staging location, and then uses the
            COPY command to copy the data from the staging location to a Delta Lake table.</p>
        <div class="- topic/p p">To build this pipeline, complete the following tasks:<ol class="- topic/ol ol" id="concept_ml2_1vv_yjb__ul_e3k_z2w_bkb" data-ofbid="concept_ml2_1vv_yjb__ul_e3k_z2w_bkb">
                <li class="- topic/li li">Create the pipeline and configure a Salesforce origin to read account data from
                    Salesforce.</li>
                <li class="- topic/li li">Configure an Expression Evaluator processor to clean the input data.</li>
                <li class="- topic/li li">Configure a Databricks Delta Lake destination to stage the pipeline data in text
                    files in Amazon S3 and then copy the staged data to the target Delta Lake
                    table.</li>
                <li class="- topic/li li">Run the pipeline to move the data from Salesforce to Delta Lake.</li>
            </ol></div>
    </div>
<article class="- topic/topic task/task topic task nested2" aria-labelledby="ariaid-title3" id="task_xjr_mrw_bkb">
    <h3 class="- topic/title title topictitle3" id="ariaid-title3">Create the Pipeline and Configure the Salesforce Origin</h3>
    
    <div class="- topic/body task/taskbody body taskbody"><p class="- topic/shortdesc shortdesc">Create the pipeline and then configure the Salesforce origin to read account data
        from Salesforce.</p>
        <section class="- topic/section task/context section context">
            <p class="- topic/p p">For more detailed information about this origin, see <a class="- topic/xref xref" href="../Origins/Salesforce.html#concept_odf_vr3_rx">Salesforce origin</a>.
            </p>
        </section>
        <section class="- topic/ol task/steps ol steps"><ol class="- topic/ol task/steps ol steps"><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">After logging into <span class="- topic/ph ph">Data Collector</span>, click the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Home</span> icon (<img class="- topic/image image" id="task_xjr_mrw_bkb__image_brs_yz5_3lb" src="../Graphics/icon_OverHome.png" height="14" width="18"/>) in
                    the top toolbar, and then click <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Create New
                    Pipeline</span>.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Enter a title for the pipeline, such as
                        <kbd class="+ topic/ph sw-d/userinput ph userinput">BulkLoadDeltaLake</kbd>, and then click
                        <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Save</span>.</span>
                <div class="- topic/itemgroup task/info itemgroup info">An empty pipeline opens in the pipeline canvas.</div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">From the pipeline creation help bar, click <span class="+ topic/ph ui-d/menucascade ph menucascade"><span class="+ topic/ph ui-d/uicontrol ph uicontrol">Select Origin</span><abbr title="and then"> &gt; </abbr><span class="+ topic/ph ui-d/uicontrol ph uicontrol">Salesforce</span></span>, as follows:</span>
                <div class="- topic/itemgroup task/info itemgroup info"><img class="- topic/image image" id="task_xjr_mrw_bkb__image_ogs_f22_jlb" src="../Graphics/DeltaLake-BulkLoadHelpBar.png" height="161" width="709"/><p class="- topic/p p">The origin is added to the canvas.</p></div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">On the <span class="+ topic/keyword ui-d/wintitle keyword wintitle">Salesforce</span> tab, enter your Salesforce user name and
                    password.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Clear the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Subscribe for Notifications</span> checkbox.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">This way, the origin runs a query to process existing data and is not
                        subscribed to notifications.</p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Leave the default values for the remaining properties.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">The <span class="+ topic/keyword ui-d/wintitle keyword wintitle">Salesforce</span> tab should be configured as follows: </p>
                    <p class="- topic/p p"><img class="- topic/image image" id="task_xjr_mrw_bkb__image_fdl_c3x_bkb" src="../Graphics/DeltaLake-BulkLoadSalesforceTab.png" height="520" width="633"/></p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Click the <span class="+ topic/keyword ui-d/wintitle keyword wintitle">Query</span> tab and enter the following query for the
                        <span class="+ topic/ph ui-d/uicontrol ph uicontrol">SOQL Query</span> property so that the origin reads only
                    these attributes from the Salesforce <code class="+ topic/ph pr-d/codeph ph codeph">account</code> object:</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <pre class="+ topic/pre pr-d/codeblock pre codeblock"><code>SELECT Id,
Name,
Type,
BillingStreet,
BillingCity,
BillingState,
BillingPostalCode,
BillingCountry,
ShippingStreet,
ShippingCity,
ShippingState,
ShippingPostalCode,
ShippingCountry,
Phone,
Fax
FROM Account
WHERE Id &gt; '${OFFSET}'
ORDER BY Id</code></pre>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Leave the default values for the remaining properties.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Click the error icon (<img class="- topic/image image" id="task_xjr_mrw_bkb__image_jsb_dl1_3lb" src="../Graphics/icon_ConfigPipeError.png" height="15" width="17"/>) in the empty pipeline canvas.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">The properties panel displays the <span class="+ topic/keyword ui-d/wintitle keyword wintitle">Error Records</span> tab for
                        the pipeline.</p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Select <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Discard</span> for the error records.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">In the toolbar above the pipeline canvas, click the
                        <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Preview</span> icon: <img class="- topic/image image" id="task_xjr_mrw_bkb__image_uc3_tyx_sjb" src="../Graphics/icon_Preview.png" height="15" width="20"/>. </span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">When you preview the pipeline, you can verify that you correctly entered the
                        Salesforce connection information and you can view several records of data
                        read from Salesforce.</p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">In the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Preview Configuration</span> dialog box, accept the
                    defaults and then click <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Run Preview</span>.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">If the Salesforce connection information is valid, the preview displays
                        several records of Salesforce data, as follows:</p>
                    <p class="- topic/p p"><img class="- topic/image image" id="task_xjr_mrw_bkb__image_bzv_dfc_ckb" src="../Graphics/DeltaLake-BulkLoadPreviewOrigin.png" height="323" width="711"/></p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Click the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Close Preview</span> icon (<img class="- topic/image image" id="task_xjr_mrw_bkb__image_krt_1y1_3lb" src="../Graphics/icon_PrevClose.png" height="16" width="20"/>)
                    to close the preview and continue building the pipeline.</span>
            </li></ol></section>
    </div>
</article><article class="- topic/topic task/task topic task nested2" aria-labelledby="ariaid-title4" id="task_cmk_5cy_bkb">
    <h3 class="- topic/title title topictitle3" id="ariaid-title4">Configure the Expression Evaluator Processor</h3>
    
    <div class="- topic/body task/taskbody body taskbody"><p class="- topic/shortdesc shortdesc">Next you add and configure the Expression Evaluator processor to clean some of the
        account data.</p>
        <section class="- topic/section task/context section context">
            <p class="- topic/p p">The <code class="+ topic/ph pr-d/codeph ph codeph">Type</code> field contains either <code class="+ topic/ph pr-d/codeph ph codeph">Customer - Direct</code> or
                    <code class="+ topic/ph pr-d/codeph ph codeph">Customer - Channel</code> as the value. You'd like to clean this data
                by keeping only <code class="+ topic/ph pr-d/codeph ph codeph">Direct</code> or <code class="+ topic/ph pr-d/codeph ph codeph">Channel</code> as the value
                before loading the data into a Delta Lake table. </p>
            <p class="- topic/p p">So you add an Expression Evaluator processor to the pipeline and define an expression
                that uses the <code class="+ topic/ph pr-d/codeph ph codeph">str:regExCapture()</code> function to replace the value of
                the <code class="+ topic/ph pr-d/codeph ph codeph">Type</code> field with only <kbd class="+ topic/ph sw-d/userinput ph userinput">Direct</kbd> or
                    <kbd class="+ topic/ph sw-d/userinput ph userinput">Channel</kbd>.</p>
            <div class="- topic/p p">
                <div class="- topic/note note note note_note"><span class="note__title">Note:</span> The Expression Evaluator processor performs calculations using the <span class="- topic/ph ph">StreamSets</span> expression language and writes the results to new or existing fields. For
                    more detailed information about this processor, see <a class="- topic/xref xref" href="../Processors/Expression.html#concept_zm2_pp3_wq">Expression Evaluator
                        processor</a>.</div>
            </div>
        </section>
        <section class="- topic/ol task/steps ol steps"><ol class="- topic/ol task/steps ol steps"><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">From the pipeline creation help bar, click <span class="+ topic/ph ui-d/menucascade ph menucascade"><span class="+ topic/ph ui-d/uicontrol ph uicontrol">Select Processor</span><abbr title="and then"> &gt; </abbr><span class="+ topic/ph ui-d/uicontrol ph uicontrol">Expression Evaluator</span></span>.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">The processor is added to the canvas and connected to the origin.</p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Select the Expression Evaluator processor in the pipeline canvas, and then
                    click the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Expressions</span> tab.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">In the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Field Expressions</span> section, enter
                        <kbd class="+ topic/ph sw-d/userinput ph userinput">/Type</kbd> for the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Output Field</span> and
                    then enter the following expression for the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Field
                        Expression</span>:</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <pre class="+ topic/pre pr-d/codeblock pre codeblock"><code>${str:regExCapture(record:value('/Type'),'(.*) - (.*)',2)}</code></pre>
                    <p class="- topic/p p">The <span class="+ topic/keyword ui-d/wintitle keyword wintitle">Expressions</span> tab should be configured as follows: </p>
                    <p class="- topic/p p"><img class="- topic/image image" id="task_cmk_5cy_bkb__image_xgx_xtd_ckb" src="../Graphics/DeltaLake-BulkLoadExpressionTab.png" height="389" width="855"/></p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">To verify that the expression cleans the data as expected, click the
                        <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Preview</span> icon (<img class="- topic/image image" id="task_cmk_5cy_bkb__image_uc3_tyx_sjb" src="../Graphics/icon_Preview.png" height="15" width="20"/>) and
                    then click <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Run Preview</span> in the dialog box. </span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">When the preview starts, select the Expression Evaluator processor in the
                    pipeline canvas.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">The preview displays the input and output data of the processor, highlighting
                        the changed data in the <code class="+ topic/ph pr-d/codeph ph codeph">Type</code> field and confirming that the
                        expression correctly removes the string <kbd class="+ topic/ph sw-d/userinput ph userinput">Customer -</kbd>
                        from field values, as follows: </p>
                    <p class="- topic/p p"><img class="- topic/image image" id="task_cmk_5cy_bkb__image_lsb_sxd_ckb" src="../Graphics/DeltaLake-BulkLoadExpressionPreview.png" height="410" width="863"/></p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Click the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Close Preview</span> icon (<img class="- topic/image image" id="task_cmk_5cy_bkb__image_krt_1y1_3lb" src="../Graphics/icon_PrevClose.png" height="16" width="20"/>)
                    to close the preview and continue configuring the next stage in the
                    pipeline.</span>
            </li></ol></section>
    </div>
</article><article class="- topic/topic task/task topic task nested2" aria-labelledby="ariaid-title5" id="task_uvc_zv2_ckb">
    <h3 class="- topic/title title topictitle3" id="ariaid-title5">Configure the Databricks Delta Lake Destination</h3>
    
    <div class="- topic/body task/taskbody body taskbody"><p class="- topic/shortdesc shortdesc">Add and configure the Databricks Delta Lake destination to bulk load the Salesforce
        data into a Delta Lake table. </p>
        <section class="- topic/section task/context section context">
            <p class="- topic/p p">To bulk load data, the Databricks Delta Lake destination first stages the pipeline
                data in text files in Amazon S3 or Azure Data Lake Storage Gen2. Then, the
                destination sends the COPY command to Databricks to process the staged files.</p>
            <p class="- topic/p p">For more detailed information about this destination, see <a class="- topic/xref xref" href="../Destinations/DeltaLake.html#concept_ddy_cdz_clb">Databricks Delta Lake
                    destination</a>.</p>
        </section>
        <section class="- topic/ol task/steps ol steps"><ol class="- topic/ol task/steps ol steps"><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">From the pipeline creation help bar, click <span class="+ topic/ph ui-d/menucascade ph menucascade"><span class="+ topic/ph ui-d/uicontrol ph uicontrol">Select Destination</span><abbr title="and then"> &gt; </abbr><span class="+ topic/ph ui-d/uicontrol ph uicontrol">Databricks Delta Lake</span></span>.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">The destination is added to the canvas.</p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Select the destination, and then click the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Databricks Delta
                        Lake</span> tab.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Configure the following properties:</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <div class="table-container"><table class="- topic/table table frame-all" id="task_uvc_zv2_ckb__table_s5c_lgv_3lb" data-ofbid="task_uvc_zv2_ckb__table_s5c_lgv_3lb" data-cols="2"><caption></caption><colgroup><col style="width:30%"/><col style="width:70%"/></colgroup><thead class="- topic/thead thead">
                                <tr class="- topic/row">
                                    <th class="- topic/entry entry colsep-1 rowsep-1" id="task_uvc_zv2_ckb__table_s5c_lgv_3lb__entry__1">Property</th>
                                    <th class="- topic/entry entry colsep-0 rowsep-1" id="task_uvc_zv2_ckb__table_s5c_lgv_3lb__entry__2">Value</th>
                                </tr>
                            </thead><tbody class="- topic/tbody tbody">
                                <tr class="- topic/row" id="task_uvc_zv2_ckb__row_JDBCURL">
                                    <td class="- topic/entry entry colsep-1 rowsep-1" headers="task_uvc_zv2_ckb__table_s5c_lgv_3lb__entry__1">JDBC URL</td>
                                    <td class="- topic/entry entry colsep-0 rowsep-1" headers="task_uvc_zv2_ckb__table_s5c_lgv_3lb__entry__2">Enter the JDBC URL that the destination uses to connect
                                        to the Databricks cluster. Remove the <code class="+ topic/ph pr-d/codeph ph codeph">PWD</code>
                                        parameter from the URL, and then enter the personal access
                                        token value for the Token property below.<p class="- topic/p p">Enter in the
                                following format:
                                    <code class="+ topic/ph pr-d/codeph ph codeph">jdbc:databricks://&lt;server_hostname&gt;:443/default;transportMode=http</code>
                                <code class="+ topic/ph pr-d/codeph ph codeph">:ssl=1;httpPath=sql/protocolv1/o/0/xxxx-xxxxxx-xxxxxxxx;AuthMech=3;</code></p></td>
                                </tr>
                                <tr class="- topic/row" id="task_uvc_zv2_ckb__row_Token">
                                    <td class="- topic/entry entry colsep-1 rowsep-1" headers="task_uvc_zv2_ckb__table_s5c_lgv_3lb__entry__1">Token</td>
                                    <td class="- topic/entry entry colsep-0 rowsep-1" headers="task_uvc_zv2_ckb__table_s5c_lgv_3lb__entry__2">Enter the personal access token that you generated as a
                                        prerequisite in Databricks. </td>
                                </tr>
                                <tr class="- topic/row">
                                    <td class="- topic/entry entry colsep-1 rowsep-1" headers="task_uvc_zv2_ckb__table_s5c_lgv_3lb__entry__1">Table Name</td>
                                    <td class="- topic/entry entry colsep-0 rowsep-1" headers="task_uvc_zv2_ckb__table_s5c_lgv_3lb__entry__2">Enter <kbd class="+ topic/ph sw-d/userinput ph userinput">sales.accounts</kbd> to write the
                                        data to the <code class="+ topic/ph pr-d/codeph ph codeph">accounts</code> Delta Lake table in
                                        the <code class="+ topic/ph pr-d/codeph ph codeph">sales</code> database.</td>
                                </tr>
                                <tr class="- topic/row" id="task_uvc_zv2_ckb__row_DataDrift">
                                    <td class="- topic/entry entry colsep-1 rowsep-1" headers="task_uvc_zv2_ckb__table_s5c_lgv_3lb__entry__1">Enable Data Drift</td>
                                    <td class="- topic/entry entry colsep-0 rowsep-1" headers="task_uvc_zv2_ckb__table_s5c_lgv_3lb__entry__2">Select to compensate for data drift and automatically
                                        create new columns or tables.</td>
                                </tr>
                                <tr class="- topic/row">
                                    <td class="- topic/entry entry colsep-1 rowsep-0" headers="task_uvc_zv2_ckb__table_s5c_lgv_3lb__entry__1">Auto Create Table</td>
                                    <td class="- topic/entry entry colsep-0 rowsep-0" headers="task_uvc_zv2_ckb__table_s5c_lgv_3lb__entry__2">Select to automatically create the new
                                            <kbd class="+ topic/ph sw-d/userinput ph userinput">accounts</kbd> table in Delta
                                        Lake.</td>
                                </tr>
                            </tbody></table></div>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Leave the default values for the remaining properties.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">The <span class="+ topic/keyword ui-d/wintitle keyword wintitle">Databricks Delta Lake</span> tab should be configured as
                        follows:</p>
                    <p class="- topic/p p"><img class="- topic/image image" id="task_uvc_zv2_ckb__image_qxl_v3v_3lb" src="../Graphics/DeltaLake-BulkLoadDeltaLake.png" height="516" width="695"/></p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Click the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Staging</span> tab, and then set the
                        <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Staging Location</span> to <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Amazon S3</span>. </span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">The Staging tab defines how the destination connects to the specified staging
                        location. This solution uses Amazon S3 as the staging location and assumes
                        that <span class="- topic/ph ph">Data Collector</span> runs
                        on an EC2 instance with a configured instance profile. If you prefer, you
                        can configure the destination to use an alternate staging location.</p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Configure the following properties:</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <div class="table-container"><table class="- topic/table table frame-all" id="task_uvc_zv2_ckb__table_az3_lkv_3lb" data-ofbid="task_uvc_zv2_ckb__table_az3_lkv_3lb" data-cols="2"><caption></caption><colgroup><col style="width:30%"/><col style="width:70%"/></colgroup><thead class="- topic/thead thead">
                                <tr class="- topic/row">
                                    <th class="- topic/entry entry colsep-1 rowsep-1" id="task_uvc_zv2_ckb__table_az3_lkv_3lb__entry__1">Property</th>
                                    <th class="- topic/entry entry colsep-0 rowsep-1" id="task_uvc_zv2_ckb__table_az3_lkv_3lb__entry__2">Value</th>
                                </tr>
                            </thead><tbody class="- topic/tbody tbody">
                                <tr class="- topic/row">
                                    <td class="- topic/entry entry colsep-1 rowsep-1" headers="task_uvc_zv2_ckb__table_az3_lkv_3lb__entry__1">Purge Stage File After Ingesting</td>
                                    <td class="- topic/entry entry colsep-0 rowsep-1" headers="task_uvc_zv2_ckb__table_az3_lkv_3lb__entry__2">Select to enable purging a staged file after its data is
                                        successfully written to a Delta Lake table.</td>
                                </tr>
                                <tr class="- topic/row">
                                    <td class="- topic/entry entry colsep-1 rowsep-1" headers="task_uvc_zv2_ckb__table_az3_lkv_3lb__entry__1">Bucket</td>
                                    <td class="- topic/entry entry colsep-0 rowsep-1" headers="task_uvc_zv2_ckb__table_az3_lkv_3lb__entry__2">Enter the name of the Amazon S3 bucket to write the
                                        staged files to.</td>
                                </tr>
                                <tr class="- topic/row">
                                    <td class="- topic/entry entry colsep-1 rowsep-0" headers="task_uvc_zv2_ckb__table_az3_lkv_3lb__entry__1">Use Instance Profile</td>
                                    <td class="- topic/entry entry colsep-0 rowsep-0" headers="task_uvc_zv2_ckb__table_az3_lkv_3lb__entry__2">Select to use the instance profile assigned to the EC2
                                        instance where Data Collector runs to connect to Amazon
                                            S3.<p class="- topic/p p">If not using instance profiles, clear and enter
                                            your AWS secret access key pair.</p></td>
                                </tr>
                            </tbody></table></div>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Leave the default values for the remaining properties.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">The <span class="+ topic/keyword ui-d/wintitle keyword wintitle">Staging</span> tab should be configured as follows: </p>
                    <p class="- topic/p p"><img class="- topic/image image" id="task_uvc_zv2_ckb__image_h4z_hlv_3lb" src="../Graphics/DeltaLake-BulkLoadDeltaLakeStaging.png" height="581" width="919"/></p>
                </div>
            </li></ol></section>
    </div>
</article><article class="- topic/topic task/task topic task nested2" aria-labelledby="ariaid-title6" id="task_v4g_zrj_ckb">
    <h3 class="- topic/title title topictitle3" id="ariaid-title6">Run the Pipeline</h3>
    
    <div class="- topic/body task/taskbody body taskbody"><p class="- topic/shortdesc shortdesc"><span class="- topic/ph ph">Run the pipeline</span> to move the data from
        Salesforce to Delta Lake.</p>
        <section class="- topic/section task/context section context"></section>
        <section class="- topic/ol task/steps ol steps"><ol class="- topic/ol task/steps ol steps"><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">In the toolbar above the pipeline canvas, click
                    <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Start</span>.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">When the pipeline successfully starts, <span class="- topic/ph ph">Data Collector</span> displays the pipeline in Monitor mode. In Monitor mode, you can monitor
                        the health and performance of the pipeline by viewing real-time statistics
                        and errors as data moves through the pipeline, as displayed in the following
                        image:</p>
                    <p class="- topic/p p"><img class="- topic/image image" id="task_v4g_zrj_ckb__image_ctq_vmk_ckb" src="../Graphics/DeltaLake-BulkLoadMonitor.png" height="271" width="650"/></p>
                    <p class="- topic/p p">Because the Salesforce origin is configured to read all account data in bulk,
                        the pipeline automatically stops after reading all account data. </p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Verify that the pipeline loaded data into the Delta Lake table by running a SQL
                    query in your Databricks notebook.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <div class="- topic/p p">For example, if you run the following SQL
                        query:<pre class="+ topic/pre pr-d/codeblock pre codeblock"><code>select * from sales.accounts</code></pre></div>
                    <p class="- topic/p p">Databricks displays the following results:</p>
                    <p class="- topic/p p"><img class="- topic/image image" id="task_v4g_zrj_ckb__image_tyh_nnk_ckb" src="../Graphics/DeltaLake-BulkLoadDatabricksResults.png" height="343" width="1639"/></p>
                </div>
            </li></ol></section>
    </div>
</article></article><article class="- topic/topic concept/concept topic concept nested1" aria-labelledby="ariaid-title7" id="concept_uk4_fvv_yjb">
    <h2 class="- topic/title title topictitle2" id="ariaid-title7">Merging Changed Data into a Delta Lake Table</h2>
    
    <div class="- topic/body concept/conbody body conbody"><p class="- topic/shortdesc shortdesc">This solution describes how to design a pipeline that reads change data capture (CDC)
        data from a database and replicates the changes to a Delta Lake table on
        Databricks.</p>
        <div class="- topic/note note tip note_tip"><span class="note__title">Tip:</span> You can download the sample MySQL Schema Replication to Delta Lake and
            MySQL CDC (Binary Log) to Delta Lake pipelines from the <a class="- topic/xref xref" href="https://github.com/streamsets/pipeline-library/tree/master/datacollector" target="_blank" rel="external noopener"><span class="- topic/ph ph">StreamSets</span>
                <span class="- topic/ph ph">Data Collector</span>
                pipeline library</a>, import the pipelines into <span class="- topic/ph ph">Data Collector</span>,
            and then follow these steps for more details on the solution.</div>
        <p class="- topic/p p">Let's say that you want to track
            customer transactions in a MySQL table and apply those changes to a Delta Lake table for
            further analysis. That is, you need to apply the same set of updates, deletes, and
            inserts made to the MySQL table to the Delta Lake table. You first design and run a
            pipeline to <a class="- topic/xref xref" href="DeltaLake.html#concept_ml2_1vv_yjb" title="This solution describes how to build a pipeline that bulk loads Salesforce data into a Delta Lake table on Databricks.">bulk load</a>
            the initial set of transactions in the MySQL table into the Delta Lake table. Then you
            design the CDC pipeline that processes subsequent changes.</p>
        <p class="- topic/p p">In the CDC pipeline, you use a MySQL Binary Log origin to capture the changes from the
            MySQL master database. Due to the structure of the MySQL binary log records, you need to
            add processors to the pipeline to restructure the record and keep only the necessary
            fields. When the pipeline passes the data to the Databricks Delta Lake destination, the
            destination first stages the changed data in an Amazon S3 staging location, and then
            uses the MERGE command to merge the changed data from the staging location to a Delta
            Lake table.</p>
        <div class="- topic/p p">To build this CDC pipeline, complete the following tasks:<ol class="- topic/ol ol" id="concept_uk4_fvv_yjb__ul_e3k_z2w_bkb" data-ofbid="concept_uk4_fvv_yjb__ul_e3k_z2w_bkb">
                <li class="- topic/li li">Create the pipeline and configure a MySQL Binary Log origin to read CDC
                    information provided by MySQL in binary logs.</li>
                <li class="- topic/li li">Configure several processors to restructure the record based on the type of
                    operation performed: INSERT, UPDATE, or DELETE.</li>
                <li class="- topic/li li">Configure a Databricks Delta Lake destination to stage the changed data in text
                    files in Amazon S3 and then merge the staged data to the target Delta Lake
                    table.</li>
                <li class="- topic/li li">Run the pipeline to replicate data from MySQL binary logs to the Delta Lake
                    target table.</li>
            </ol></div>
    </div>
<article class="- topic/topic task/task topic task nested2" aria-labelledby="ariaid-title8" id="task_n3z_5sq_ckb">
    <h3 class="- topic/title title topictitle3" id="ariaid-title8">Create the Pipeline and Configure the MySQL Binary Log Origin</h3>
    
    <div class="- topic/body task/taskbody body taskbody"><p class="- topic/shortdesc shortdesc">Create the pipeline and then configure the MySQL Binary Log origin to read CDC
        information provided by MySQL in binary logs.</p>
        <section class="- topic/section task/context section context">
            <div class="- topic/p p">
                <div class="- topic/note note important note_important"><span class="note__title">Important:</span> Before you use the MySQL Binary Log origin, you must <a class="- topic/xref xref" href="../Origins/MySQLBinaryLog.html#concept_wps_52k_qy">install the MySQL JDBC driver</a>. You cannot access the database until
                    you install the required driver.</div>
            </div>
        </section>
        <section class="- topic/ol task/steps ol steps"><ol class="- topic/ol task/steps ol steps"><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">After logging into <span class="- topic/ph ph">Data Collector</span>, click the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Home</span> icon (<img class="- topic/image image" id="task_n3z_5sq_ckb__image_brs_yz5_3lb" src="../Graphics/icon_OverHome.png" height="14" width="18"/>) in
                    the top toolbar, and then click <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Create New
                    Pipeline</span>.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Enter a title for the pipeline, such as <kbd class="+ topic/ph sw-d/userinput ph userinput">CDCDeltaLake</kbd>,
                    and then click <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Save</span>.</span>
                <div class="- topic/itemgroup task/info itemgroup info">An empty pipeline opens in the pipeline canvas.</div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">From the pipeline creation help bar, click <span class="+ topic/ph ui-d/menucascade ph menucascade"><span class="+ topic/ph ui-d/uicontrol ph uicontrol">Select Origin</span><abbr title="and then"> &gt; </abbr><span class="+ topic/ph ui-d/uicontrol ph uicontrol">MySQL Binary Log</span></span>, as follows:</span>
                <div class="- topic/itemgroup task/info itemgroup info"><img class="- topic/image image" id="task_n3z_5sq_ckb__image_ogs_f22_jlb" src="../Graphics/DeltaLake-CDCHelpBar.png" height="158" width="718"/><p class="- topic/p p">The origin is added to the canvas.</p></div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">On the <span class="+ topic/keyword ui-d/wintitle keyword wintitle">MySQL Binary Log</span> tab, enter the MySQL server host
                    name and port number.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Optionally enter the replication server ID that the origin uses to connect to
                    the master MySQL server.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">This solution assumes that the MySQL database is enabled for GTID which does
                        not require that you configure the server ID. </p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Select <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Start from Beginning</span>.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Leave the default values for the remaining properties.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">The <span class="+ topic/keyword ui-d/wintitle keyword wintitle">MySQL Binary Log</span> tab should be configured as
                        follows:</p>
                    <p class="- topic/p p"><img class="- topic/image image" id="task_n3z_5sq_ckb__image_jt5_w5q_ckb" src="../Graphics/DeltaLake-CDCMySQLBinLogTab.png" height="365" width="638"/></p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Click the <span class="+ topic/keyword ui-d/wintitle keyword wintitle">Credentials</span> tab and enter the user name and
                    password to connect to MySQL.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Click the error icon (<img class="- topic/image image" id="task_n3z_5sq_ckb__image_jsb_dl1_3lb" src="../Graphics/icon_ConfigPipeError.png" height="15" width="17"/>) in the empty pipeline canvas.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">The properties panel displays the <span class="+ topic/keyword ui-d/wintitle keyword wintitle">Error Records</span> tab for
                        the pipeline.</p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Select <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Discard</span> for the error records.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">In the toolbar above the pipeline canvas, click the
                        <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Preview</span> icon: <img class="- topic/image image" id="task_n3z_5sq_ckb__image_uc3_tyx_sjb" src="../Graphics/icon_Preview.png" height="15" width="20"/>. </span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">When you preview the pipeline, you can verify that you correctly entered the
                        MySQL connection information, and you can view several records of data read
                        from the binary logs.</p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">In the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Preview Configuration</span> dialog box, accept the
                    defaults and then click <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Run Preview</span>.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">If the MySQL connection information is valid and if the binary log contains
                        pending transactions, the preview displays the pending transactions, as
                        follows:</p>
                    <p class="- topic/p p"><img class="- topic/image image" id="task_n3z_5sq_ckb__image_ywm_gt4_kkb" src="../Graphics/DeltaLake-CDCPreviewOrigin.png" height="305" width="714"/></p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Click the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Close Preview</span> icon (<img class="- topic/image image" id="task_n3z_5sq_ckb__image_krt_1y1_3lb" src="../Graphics/icon_PrevClose.png" height="16" width="20"/>)
                    to close the preview and continue building the pipeline.</span>
            </li></ol></section>
    </div>
</article><article class="- topic/topic task/task topic task nested2" aria-labelledby="ariaid-title9" id="task_ppg_r3r_ckb">
    <h3 class="- topic/title title topictitle3" id="ariaid-title9">Configure Processors to Restructure the Record</h3>
    
    <div class="- topic/body task/taskbody body taskbody"><p class="- topic/shortdesc shortdesc">Due to the structure of the MySQL binary log records, you need to add several
        processors to the pipeline to restructure the record and keep only the necessary
        fields.</p>
        <section class="- topic/section task/context section context">
            <p class="- topic/p p">Each record generated by the MySQL Binary Log origin includes the following
                information:</p>
            <ul class="- topic/ul ul" id="task_ppg_r3r_ckb__ul_mhx_xjr_ckb" data-ofbid="task_ppg_r3r_ckb__ul_mhx_xjr_ckb">
                <li class="- topic/li li">CRUD operation type in the <code class="+ topic/ph pr-d/codeph ph codeph">Type</code> field: INSERT, UPDATE, or
                    DELETE.</li>
                <li class="- topic/li li">Change data capture information such as the table, server ID, and timestamp in
                    various fields.</li>
                <li class="- topic/li li">
                    <p class="- topic/p p">New data to be inserted or updated in the <code class="+ topic/ph pr-d/codeph ph codeph">Data</code> map
                        field.</p>
                </li>
                <li class="- topic/li li">Old data to be deleted in the <code class="+ topic/ph pr-d/codeph ph codeph">OldData</code> map field.</li>
            </ul>
            <p class="- topic/p p">For example, the origin might generate the following record for data that needs to be
                inserted:</p>
            <p class="- topic/p p"><img class="- topic/image image" id="task_ppg_r3r_ckb__image_pvn_d54_kkb" src="../Graphics/DeltaLake-CDCMySQLBinLogSampleRecord.png" height="413" width="343"/></p>
            <p class="- topic/p p">You need to restructure the records differently, based on the operation type. You add
                a Stream Selector processor to the pipeline to route records with a DELETE operation
                in the <code class="+ topic/ph pr-d/codeph ph codeph">Type</code> field to one processing stream and to route records
                with an INSERT or UPDATE operation in the <code class="+ topic/ph pr-d/codeph ph codeph">Type</code> field to another
                processing stream. Then for each stream, you add a Field Remover processor to keep
                only the necessary fields and a Field Flattener processor to flatten the fields in
                the <code class="+ topic/ph pr-d/codeph ph codeph">Data</code> or <code class="+ topic/ph pr-d/codeph ph codeph">OldData</code> map fields.</p>
        </section>
        <section class="- topic/ol task/steps ol steps"><ol class="- topic/ol task/steps ol steps"><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">From the pipeline creation help bar, click <span class="+ topic/ph ui-d/menucascade ph menucascade"><span class="+ topic/ph ui-d/uicontrol ph uicontrol">Select Processor</span><abbr title="and then"> &gt; </abbr><span class="+ topic/ph ui-d/uicontrol ph uicontrol">Stream Selector</span></span>.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">The processor is added to the canvas.</p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Select the Stream Selector processor in the pipeline canvas, and then click the
                        <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Conditions</span> tab.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Click the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Add</span> icon (<img class="- topic/image image" id="task_ppg_r3r_ckb__image_mfz_vwb_jlb" src="../Graphics/icon_ConfigAddIcon.png" height="18" width="18"/>) to add a condition.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Enter the following expression for the condition:</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <pre class="+ topic/pre pr-d/codeblock pre codeblock"><code>${record:value('/Type') == 'DELETE'}</code></pre>
                </div>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">This condition uses the <a class="- topic/xref xref" href="../Expression_Language/ExpressionLanguage_overview.html#concept_p54_4kl_vq"><span class="- topic/ph ph">StreamSets</span> expression language</a> to route records with a DELETE operation
                        in the <code class="+ topic/ph pr-d/codeph ph codeph">Type</code> field to the first output stream of the
                        processor. All other records, with an INSERT or UPDATE operation in the
                            <code class="+ topic/ph pr-d/codeph ph codeph">Type</code> field, route to the default output stream.</p>
                    <p class="- topic/p p">The configured <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Conditions</span> tab and the pipeline should
                        look like this. Note that the Stream Selector processor has two output
                        streams:</p>
                    <p class="- topic/p p"><img class="- topic/image image" id="task_ppg_r3r_ckb__image_yjd_vpr_ckb" src="../Graphics/DeltaLake-CDCLinkOriginStreamSelector.png" height="274" width="666"/></p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Add a Field Remover processor, and connect the first output stream of the
                    Stream Selector processor to the new processor.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Select the Field Remover processor in the pipeline canvas, and then on the
                        <span class="+ topic/ph ui-d/uicontrol ph uicontrol">General</span> tab, enter <kbd class="+ topic/ph sw-d/userinput ph userinput">Keep OldData Fields to
                        DELETE</kbd> for the processor name.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Click the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Remove/Keep</span> tab.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">For <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Action</span>, select <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Keep Listed
                        Fields</span>, and then enter the following field paths for the
                        <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Fields</span> property:</span>
                <ul class="- topic/ul task/choices ul choices" id="task_ppg_r3r_ckb__choices_ukm_nqr_ckb" data-ofbid="task_ppg_r3r_ckb__choices_ukm_nqr_ckb">
                    <li class="- topic/li task/choice li choice"><kbd class="+ topic/ph sw-d/userinput ph userinput">/OldData</kbd></li>
                    <li class="- topic/li task/choice li choice"><kbd class="+ topic/ph sw-d/userinput ph userinput">/Type</kbd></li>
                </ul>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">This configuration keeps only the <code class="+ topic/ph pr-d/codeph ph codeph">OldData</code> and
                            <code class="+ topic/ph pr-d/codeph ph codeph">Type</code> fields for records with a DELETE operation, and
                        removes all other fields. The pipeline and the configured
                            <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Remove/Keep</span> tab should look like this:</p>
                    <p class="- topic/p p"><img class="- topic/image image" id="task_ppg_r3r_ckb__image_mps_h5r_ckb" src="../Graphics/DeltaLake-CDCKeepOldData.png" height="363" width="665"/></p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Select the Stream Selector processor in the pipeline canvas, and then add
                    another Field Remover processor.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">The processor is added to the canvas, connected to the second output stream
                        of the Stream Selector processor.</p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Select the second Field Remover processor in the pipeline canvas, and then on
                    the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">General</span> tab, enter <kbd class="+ topic/ph sw-d/userinput ph userinput">Keep Data Fields to
                        INSERT/UPDATE</kbd> for the processor name.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Click the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Remove/Keep</span> tab.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">For <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Action</span>, select <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Keep Listed
                        Fields</span>, and then enter the following field paths for the
                        <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Fields</span> property:</span>
                <ul class="- topic/ul task/choices ul choices" id="task_ppg_r3r_ckb__choices_ycw_m5r_ckb" data-ofbid="task_ppg_r3r_ckb__choices_ycw_m5r_ckb">
                    <li class="- topic/li task/choice li choice"><kbd class="+ topic/ph sw-d/userinput ph userinput">/Data</kbd></li>
                    <li class="- topic/li task/choice li choice"><kbd class="+ topic/ph sw-d/userinput ph userinput">/Type</kbd></li>
                </ul>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">This configuration keeps only the <code class="+ topic/ph pr-d/codeph ph codeph">Data</code> and
                            <code class="+ topic/ph pr-d/codeph ph codeph">Type</code> fields for records with an INSERT or UPDATE
                        operation, and removes all other fields. The configured
                            <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Remove/Keep</span> tab and the pipeline should look like
                        this:</p>
                    <p class="- topic/p p"><img class="- topic/image image" id="task_ppg_r3r_ckb__image_ayp_pvr_ckb" src="../Graphics/DeltaLake-CDCKeepData.png" height="375" width="659"/></p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Add two Field Flattener processors to the pipeline, connecting each to one of
                    the Field Remover processors.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Select the Field Flattener processor in the stream that keeps the
                        <code class="+ topic/ph pr-d/codeph ph codeph">OldData</code> field, and then click the
                        <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Flatten</span> tab.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Configure the following properties with the required values:</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <div class="table-container"><table class="- topic/table table frame-all" id="task_ppg_r3r_ckb__table_ejb_2t2_ckb" data-ofbid="task_ppg_r3r_ckb__table_ejb_2t2_ckb" data-cols="2"><caption></caption><colgroup><col style="width:30%"/><col style="width:70%"/></colgroup><thead class="- topic/thead thead">
                                <tr class="- topic/row">
                                    <th class="- topic/entry entry colsep-1 rowsep-1" id="task_ppg_r3r_ckb__table_ejb_2t2_ckb__entry__1">Property</th>
                                    <th class="- topic/entry entry colsep-0 rowsep-1" id="task_ppg_r3r_ckb__table_ejb_2t2_ckb__entry__2">Value</th>
                                </tr>
                            </thead><tbody class="- topic/tbody tbody">
                                <tr class="- topic/row">
                                    <td class="- topic/entry entry colsep-1 rowsep-1" headers="task_ppg_r3r_ckb__table_ejb_2t2_ckb__entry__1">Flatten</td>
                                    <td class="- topic/entry entry colsep-0 rowsep-1" headers="task_ppg_r3r_ckb__table_ejb_2t2_ckb__entry__2">Select <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Flatten specific
                                        fields</span>.</td>
                                </tr>
                                <tr class="- topic/row">
                                    <td class="- topic/entry entry colsep-1 rowsep-1" headers="task_ppg_r3r_ckb__table_ejb_2t2_ckb__entry__1">Fields</td>
                                    <td class="- topic/entry entry colsep-0 rowsep-1" headers="task_ppg_r3r_ckb__table_ejb_2t2_ckb__entry__2">Enter <kbd class="+ topic/ph sw-d/userinput ph userinput">/OldData</kbd>.</td>
                                </tr>
                                <tr class="- topic/row">
                                    <td class="- topic/entry entry colsep-1 rowsep-1" headers="task_ppg_r3r_ckb__table_ejb_2t2_ckb__entry__1">Flatten in Place</td>
                                    <td class="- topic/entry entry colsep-0 rowsep-1" headers="task_ppg_r3r_ckb__table_ejb_2t2_ckb__entry__2">Clear the property.</td>
                                </tr>
                                <tr class="- topic/row">
                                    <td class="- topic/entry entry colsep-1 rowsep-0" headers="task_ppg_r3r_ckb__table_ejb_2t2_ckb__entry__1">Target Field</td>
                                    <td class="- topic/entry entry colsep-0 rowsep-0" headers="task_ppg_r3r_ckb__table_ejb_2t2_ckb__entry__2">Enter <kbd class="+ topic/ph sw-d/userinput ph userinput">/</kbd> to write the flattened
                                        data to the root field.</td>
                                </tr>
                            </tbody></table></div>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Leave the default values for the remaining properties.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">The <span class="+ topic/keyword ui-d/wintitle keyword wintitle">Flatten</span> tab should be configured as follows:</p>
                    <p class="- topic/p p"><img class="- topic/image image" id="task_ppg_r3r_ckb__image_m1q_m1s_ckb" src="../Graphics/DeltaLake-CDCFlattenOldData.png" height="464" width="665"/></p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Select the second Field Flattener processor in the stream that keeps the
                        <code class="+ topic/ph pr-d/codeph ph codeph">Data</code> field, and then configure it the same way as the first
                    Field Flattener processor, except enter <kbd class="+ topic/ph sw-d/userinput ph userinput">/Data</kbd> for the
                        <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Fields</span> property.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">To verify that you've restructured the data as expected, click the
                        <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Preview</span> icon (<img class="- topic/image image" id="task_ppg_r3r_ckb__image_uc3_tyx_sjb" src="../Graphics/icon_Preview.png" height="15" width="20"/>) and
                    then click <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Confirm</span> in the dialog box. </span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Assuming that the binary log contains pending insert or update transactions,
                    select the Field Remover processor that keeps the <code class="+ topic/ph pr-d/codeph ph codeph">Data</code>
                    field.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">The preview displays the input and output data of the processor, highlighting
                        that only the <code class="+ topic/ph pr-d/codeph ph codeph">Data</code> and <code class="+ topic/ph pr-d/codeph ph codeph">Type</code> fields are
                        included in the output, as follows: </p>
                    <p class="- topic/p p"><img class="- topic/image image" id="task_ppg_r3r_ckb__image_vm3_vqr_lkb" src="../Graphics/DeltaLake-CDCFieldRemoverData.png" height="441" width="637"/></p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Next, select the Field Flattener processor connected to this Field Remover
                    processor.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">The preview displays the input and output data of the Field Flattener
                        processor, showing that the fields in the <code class="+ topic/ph pr-d/codeph ph codeph">Data</code> map field
                        have been flattened to the root field, as follows: </p>
                    <p class="- topic/p p"><img class="- topic/image image" id="task_ppg_r3r_ckb__image_m2z_lsr_lkb" src="../Graphics/DeltaLake-CDCFieldFlattenerData.png" height="460" width="697"/></p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Click the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Close Preview</span> icon (<img class="- topic/image image" id="task_ppg_r3r_ckb__image_krt_1y1_3lb" src="../Graphics/icon_PrevClose.png" height="16" width="20"/>)
                    to close the preview and continue configuring the next stage in the
                    pipeline.</span>
            </li></ol></section>
    </div>
</article><article class="- topic/topic task/task topic task nested2" aria-labelledby="ariaid-title10" id="task_vh5_vws_ckb">
    <h3 class="- topic/title title topictitle3" id="ariaid-title10">Configure the Databricks Delta Lake Destination</h3>
    
    <div class="- topic/body task/taskbody body taskbody"><p class="- topic/shortdesc shortdesc">Add and configure the Databricks Delta Lake destination to merge the changed data to
        a Delta Lake table.</p>
        <section class="- topic/section task/context section context">
            <p class="- topic/p p">To merge changed data, the Databricks Delta Lake destination first stages the
                pipeline data in text files in Amazon S3 or Azure Data Lake Storage Gen2. Then, the
                destination runs the COPY command to load the data to a temporary Delta Lake table,
                and then finally runs a MERGE command that uses the temporary table to merge the
                changed data into the target Delta Lake table.</p>
            <p class="- topic/p p">For more detailed information about this destination, see <a class="- topic/xref xref" href="../Destinations/DeltaLake.html#concept_ddy_cdz_clb">Databricks Delta Lake
                    destination</a>.</p>
        </section>
        <section class="- topic/ol task/steps ol steps"><ol class="- topic/ol task/steps ol steps"><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">From the pipeline creation help bar, click <span class="+ topic/ph ui-d/menucascade ph menucascade"><span class="+ topic/ph ui-d/uicontrol ph uicontrol">Select Destination</span><abbr title="and then"> &gt; </abbr><span class="+ topic/ph ui-d/uicontrol ph uicontrol">Databricks Delta Lake</span></span>.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">The destination is added to the canvas.</p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Use your cursor to connect both Field Flattener processors to the
                    destination.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Select the destination, and then click the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Databricks Delta
                        Lake</span> tab.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Configure the following properties:</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <div class="table-container"><table class="- topic/table table frame-all" id="task_vh5_vws_ckb__table_orr_jj2_jlb" data-ofbid="task_vh5_vws_ckb__table_orr_jj2_jlb" data-cols="2"><caption></caption><colgroup><col style="width:50%"/><col style="width:50%"/></colgroup><thead class="- topic/thead thead">
                                <tr class="- topic/row">
                                    <th class="- topic/entry entry colsep-1 rowsep-1" id="task_vh5_vws_ckb__table_orr_jj2_jlb__entry__1">Property</th>
                                    <th class="- topic/entry entry colsep-0 rowsep-1" id="task_vh5_vws_ckb__table_orr_jj2_jlb__entry__2">Value</th>
                                </tr>
                            </thead><tbody class="- topic/tbody tbody">
                                <tr class="- topic/row">
                                    <td class="- topic/entry entry colsep-1 rowsep-1" headers="task_vh5_vws_ckb__table_orr_jj2_jlb__entry__1">JDBC URL</td>
                                    <td class="- topic/entry entry colsep-0 rowsep-1" headers="task_vh5_vws_ckb__table_orr_jj2_jlb__entry__2">Enter the JDBC URL that the destination uses to connect
                                        to the Databricks cluster. Remove the <code class="+ topic/ph pr-d/codeph ph codeph">PWD</code>
                                        parameter from the URL, and then enter the personal access
                                        token value for the Token property below.<p class="- topic/p p">Enter in the
                                following format:
                                    <code class="+ topic/ph pr-d/codeph ph codeph">jdbc:databricks://&lt;server_hostname&gt;:443/default;transportMode=http</code>
                                <code class="+ topic/ph pr-d/codeph ph codeph">:ssl=1;httpPath=sql/protocolv1/o/0/xxxx-xxxxxx-xxxxxxxx;AuthMech=3;</code></p></td>
                                </tr>
                                <tr class="- topic/row">
                                    <td class="- topic/entry entry colsep-1 rowsep-1" headers="task_vh5_vws_ckb__table_orr_jj2_jlb__entry__1">Token</td>
                                    <td class="- topic/entry entry colsep-0 rowsep-1" headers="task_vh5_vws_ckb__table_orr_jj2_jlb__entry__2">Enter the personal access token that you generated as a
                                        prerequisite in Databricks. </td>
                                </tr>
                                <tr class="- topic/row">
                                    <td class="- topic/entry entry colsep-1 rowsep-1" headers="task_vh5_vws_ckb__table_orr_jj2_jlb__entry__1">Table Name</td>
                                    <td class="- topic/entry entry colsep-0 rowsep-1" headers="task_vh5_vws_ckb__table_orr_jj2_jlb__entry__2">Enter <kbd class="+ topic/ph sw-d/userinput ph userinput">customers_cdc</kbd> to write the
                                        changed data to a <code class="+ topic/ph pr-d/codeph ph codeph">customers_cdc</code> table in
                                        the default <code class="+ topic/ph pr-d/codeph ph codeph">delta</code> database.</td>
                                </tr>
                                <tr class="- topic/row">
                                    <td class="- topic/entry entry colsep-1 rowsep-1" headers="task_vh5_vws_ckb__table_orr_jj2_jlb__entry__1">Enable Data Drift</td>
                                    <td class="- topic/entry entry colsep-0 rowsep-1" headers="task_vh5_vws_ckb__table_orr_jj2_jlb__entry__2">Select to compensate for data drift and automatically
                                        create new columns or tables.</td>
                                </tr>
                                <tr class="- topic/row">
                                    <td class="- topic/entry entry colsep-1 rowsep-0" headers="task_vh5_vws_ckb__table_orr_jj2_jlb__entry__1">Auto Create Table</td>
                                    <td class="- topic/entry entry colsep-0 rowsep-0" headers="task_vh5_vws_ckb__table_orr_jj2_jlb__entry__2">Select so that the destination can automatically create
                                        the new <kbd class="+ topic/ph sw-d/userinput ph userinput">customers_cdc</kbd> table in Delta
                                        Lake.</td>
                                </tr>
                            </tbody></table></div>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Leave the default values for the remaining properties.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">The <span class="+ topic/keyword ui-d/wintitle keyword wintitle">Databricks Delta Lake</span> tab should be configured as
                        follows:</p>
                    <p class="- topic/p p"><img class="- topic/image image" id="task_vh5_vws_ckb__image_qxl_v3v_3lb" src="../Graphics/DeltaLake-CDCDeltaLake.png" height="581" width="791"/></p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Click the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Staging</span> tab, and then set the
                        <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Staging Location</span> to <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Amazon S3</span>. </span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">The Staging tab defines how the destination connects to the specified staging
                        location. This solution uses Amazon S3 as the staging location and assumes
                        that <span class="- topic/ph ph">Data Collector</span> runs
                        on an EC2 instance with a configured instance profile. If you prefer, you
                        can configure the destination to use an alternate staging location.</p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Configure the following properties:</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <div class="table-container"><table class="- topic/table table frame-all" id="task_vh5_vws_ckb__table_az3_lkv_3lb" data-ofbid="task_vh5_vws_ckb__table_az3_lkv_3lb" data-cols="2"><caption></caption><colgroup><col style="width:30%"/><col style="width:70%"/></colgroup><thead class="- topic/thead thead">
                                <tr class="- topic/row">
                                    <th class="- topic/entry entry colsep-1 rowsep-1" id="task_vh5_vws_ckb__table_az3_lkv_3lb__entry__1">Property</th>
                                    <th class="- topic/entry entry colsep-0 rowsep-1" id="task_vh5_vws_ckb__table_az3_lkv_3lb__entry__2">Value</th>
                                </tr>
                            </thead><tbody class="- topic/tbody tbody">
                                <tr class="- topic/row">
                                    <td class="- topic/entry entry colsep-1 rowsep-1" headers="task_vh5_vws_ckb__table_az3_lkv_3lb__entry__1">Purge Stage File After Ingesting</td>
                                    <td class="- topic/entry entry colsep-0 rowsep-1" headers="task_vh5_vws_ckb__table_az3_lkv_3lb__entry__2">Select to enable purging a staged file after its data is
                                        successfully written to a Delta Lake table.</td>
                                </tr>
                                <tr class="- topic/row">
                                    <td class="- topic/entry entry colsep-1 rowsep-1" headers="task_vh5_vws_ckb__table_az3_lkv_3lb__entry__1">Bucket</td>
                                    <td class="- topic/entry entry colsep-0 rowsep-1" headers="task_vh5_vws_ckb__table_az3_lkv_3lb__entry__2">Enter the name of the Amazon S3 bucket to write the
                                        staged files to.</td>
                                </tr>
                                <tr class="- topic/row">
                                    <td class="- topic/entry entry colsep-1 rowsep-0" headers="task_vh5_vws_ckb__table_az3_lkv_3lb__entry__1">Use Instance Profile</td>
                                    <td class="- topic/entry entry colsep-0 rowsep-0" headers="task_vh5_vws_ckb__table_az3_lkv_3lb__entry__2">Select to use the instance profile assigned to the EC2
                                        instance where Data Collector runs to connect to Amazon
                                            S3.<p class="- topic/p p">If not using instance profiles, clear and enter
                                            your AWS secret access key pair.</p></td>
                                </tr>
                            </tbody></table></div>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Leave the default values for the remaining properties.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">The <span class="+ topic/keyword ui-d/wintitle keyword wintitle">Staging</span> tab should be configured as follows:</p>
                    <p class="- topic/p p"><img class="- topic/image image" id="task_vh5_vws_ckb__image_h4z_hlv_3lb" src="../Graphics/DeltaLake-CDCDeltaLakeStaging.png" height="557" width="907"/></p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Click the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Data</span> tab.</span>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Select <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Merge CDC Data</span>.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">Enabling this property configures the destination to use the MERGE command to
                        insert, update, or delete the changed data in Delta Lake tables as
                        appropriate.</p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Configure the following properties for the <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Key Columns</span>
                    section.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">The destination uses the key columns to evaluate the MERGE condition.</p>
                </div>
                <div class="- topic/itemgroup task/info itemgroup info"><div class="table-container"><table class="- topic/table table frame-all" id="task_vh5_vws_ckb__table_ocx_mw2_jlb" data-ofbid="task_vh5_vws_ckb__table_ocx_mw2_jlb" data-cols="2"><caption></caption><colgroup><col style="width:30%"/><col style="width:70%"/></colgroup><thead class="- topic/thead thead">
                                <tr class="- topic/row">
                                    <th class="- topic/entry entry colsep-1 rowsep-1" id="task_vh5_vws_ckb__table_ocx_mw2_jlb__entry__1">Property</th>
                                    <th class="- topic/entry entry colsep-0 rowsep-1" id="task_vh5_vws_ckb__table_ocx_mw2_jlb__entry__2">Value</th>
                                </tr>
                            </thead><tbody class="- topic/tbody tbody">
                                <tr class="- topic/row">
                                    <td class="- topic/entry entry colsep-1 rowsep-1" headers="task_vh5_vws_ckb__table_ocx_mw2_jlb__entry__1">Table</td>
                                    <td class="- topic/entry entry colsep-0 rowsep-1" headers="task_vh5_vws_ckb__table_ocx_mw2_jlb__entry__2">Enter <kbd class="+ topic/ph sw-d/userinput ph userinput">customers_cdc</kbd>.</td>
                                </tr>
                                <tr class="- topic/row">
                                    <td class="- topic/entry entry colsep-1 rowsep-0" headers="task_vh5_vws_ckb__table_ocx_mw2_jlb__entry__1">Key Columns</td>
                                    <td class="- topic/entry entry colsep-0 rowsep-0" headers="task_vh5_vws_ckb__table_ocx_mw2_jlb__entry__2">Enter <kbd class="+ topic/ph sw-d/userinput ph userinput">customer_id</kbd>.</td>
                                </tr>
                            </tbody></table></div>The <span class="+ topic/keyword ui-d/wintitle keyword wintitle">Data</span> tab should be configured as
                            follows:<p class="- topic/p p"><img class="- topic/image image" id="task_vh5_vws_ckb__image_ydl_jx2_jlb" src="../Graphics/DeltaLake-CDCDeltaLakeData.png" height="539" width="891"/></p></div>
            </li></ol></section>
    </div>
</article><article class="- topic/topic task/task topic task nested2" aria-labelledby="ariaid-title11" id="task_ftn_q2t_ckb">
    <h3 class="- topic/title title topictitle3" id="ariaid-title11">Run the Pipeline</h3>
    
    <div class="- topic/body task/taskbody body taskbody"><p class="- topic/shortdesc shortdesc"><span class="- topic/ph ph">Run the pipeline</span> to move the
        changed data from MySQL binary logs to Delta Lake.</p>
        <section class="- topic/ol task/steps ol steps"><ol class="- topic/ol task/steps ol steps"><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">In the toolbar above the pipeline canvas, click
                    <span class="+ topic/ph ui-d/uicontrol ph uicontrol">Start</span>.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <p class="- topic/p p">When the pipeline successfully starts, <span class="- topic/ph ph">Data Collector</span> displays the pipeline in Monitor mode. In Monitor mode, you can monitor
                        the health and performance of the pipeline by viewing real-time statistics
                        and errors as data moves through the pipeline, as displayed in the following
                        image: </p>
                    <p class="- topic/p p"><img class="- topic/image image" id="task_ftn_q2t_ckb__image_ctq_vmk_ckb" src="../Graphics/DeltaLake-CDCMonitor.png" height="343" width="685"/></p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Next, verify that the pipeline loaded the data into the target table in Delta
                    Lake by running a SQL query in your Databricks notebook.</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <div class="- topic/p p">For example, if you run the following SQL
                        query:<pre class="+ topic/pre pr-d/codeblock pre codeblock"><code>select * from customers_cdc</code></pre></div>
                    <p class="- topic/p p">Databricks displays the following results:</p>
                    <p class="- topic/p p"><img class="- topic/image image" id="task_ftn_q2t_ckb__image_ylg_dmt_ckb" src="../Graphics/DeltaLake-CDCDatabricksResults.png" height="138" width="932"/></p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Verify that the pipeline successfully applies update operations to the Delta
                    Lake table by running the following command on the MySQL database to update one
                    of the rows:</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <pre class="+ topic/pre pr-d/codeblock pre codeblock"><code>update retail.customers_cdc set address='10 Downing ST' where customer_id=6;</code></pre>
                    <p class="- topic/p p">Then in your Databricks notebook, verify that the Delta Lake table has been
                        updated with the changed address for that customer ID:</p>
                    <p class="- topic/p p"><img class="- topic/image image" id="task_ftn_q2t_ckb__image_knw_nnt_ckb" src="../Graphics/DeltaLake-CDCDatabricksUpdate.png" height="135" width="936"/></p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Verify that the pipeline successfully applies delete operations to the Delta
                    Lake table by running the following command on the MySQL database to delete one
                    of the rows:</span>
                <div class="- topic/itemgroup task/info itemgroup info">
                    <pre class="+ topic/pre pr-d/codeblock pre codeblock"><code>delete from retail.customers_cdc where customer_id=7;</code></pre>
                    <p class="- topic/p p">Then in your Databricks notebook, verify that the row for that customer ID
                        has been deleted from the Delta Lake table:</p>
                    <p class="- topic/p p"><img class="- topic/image image" id="task_ftn_q2t_ckb__image_zvc_j4t_ckb" src="../Graphics/DeltaLake-CDCDatabricksDelete.png" height="126" width="940"/></p>
                </div>
            </li><li class="- topic/li task/step li step">
                <span class="- topic/ph task/cmd ph cmd">Click the <span class="- topic/ph ph"><span class="+ topic/ph ui-d/uicontrol ph uicontrol">Stop</span></span> icon (<img class="- topic/image image" id="task_ftn_q2t_ckb__image_kfp_w1z_sjb" src="../Graphics/icon_MonStop.png" height="19" width="21"/>) to
                    stop the pipeline.</span>
            </li></ol></section>
    </div>
</article></article></article></article></main></div>

                        
                        
                        


                    </div>
                    
                </div>
            </div>


        </div> <nav class="navbar navbar-default wh_footer" data-whc_version="25.0">
  <div class=" footer-container  mx-auto">
    <!-- script for Data Collector, all flavors, but only used when accessed directly, not from portal --><script>
  (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
  (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
  m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
  })(window,document,'script','https://www.google-analytics.com/analytics.js','ga');

  ga('create', 'UA-60917135-3', 'auto');
  ga('send', 'pageview');
</script>
  </div>
</nav>

        
        <div id="go2top">
            <span class="oxy-icon oxy-icon-up"></span>
        </div>
        
        <!-- The modal container for images -->
        <div id="modal_img_large" class="modal">
            <span class="close oxy-icon oxy-icon-remove"></span>
            <!-- Modal Content (The Image) -->
            <div id="modal_img_container"></div>
            <!-- Modal Caption (Image Text) -->
            <div id="caption"></div>
        </div>
        
        
        Â© 2023 StreamSets, Inc.

    </body>
</html>