diff --git a/docs/dataartsstudio/umn/ALL_META.TXT.json b/docs/dataartsstudio/umn/ALL_META.TXT.json index 1001c66d..c01e338d 100644 --- a/docs/dataartsstudio/umn/ALL_META.TXT.json +++ b/docs/dataartsstudio/umn/ALL_META.TXT.json @@ -1,3381 +1,6521 @@ [ + { + "dockw":"User Guide" + }, { "uri":"dataartsstudio_12_0001.html", + "node_id":"dataartsstudio_12_0001.xml", "product_code":"dgc", "code":"1", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Service Overview", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "documenttype":"usermanual" + } + ], "title":"Service Overview", "githuburl":"" }, { "uri":"dataartsstudio_07_001.html", + "node_id":"dataartsstudio_07_001.xml", "product_code":"dgc", "code":"2", "des":"Enterprises often face challenges in the following aspects when managing data:GovernanceInconsistent data system standards impact data exchange and sharing between differ", "doc_type":"productdesc", "kw":"What Is DataArts Studio?,Service Overview,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "isMulti":"no", + "IsMulti":"No", + "documenttype":"productdesc", + "isBot":"yes", + "IsBot":"yes" + } + ], "title":"What Is DataArts Studio?", "githuburl":"" }, { "uri":"dataartsstudio_07_004.html", + "node_id":"dataartsstudio_07_004.xml", "product_code":"dgc", "code":"3", "des":"A DataArts Studio instance is the minimum unit of compute resources provided for users. You can create, access, and manage multiple DataArts Studio instances at the same ", "doc_type":"productdesc", "kw":"Basic Concepts,Service Overview,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "isMulti":"no", + "IsMulti":"No", + "documenttype":"productdesc", + "isBot":"yes", + "IsBot":"yes" + } + ], "title":"Basic Concepts", "githuburl":"" }, { "uri":"dataartsstudio_07_005.html", + "node_id":"dataartsstudio_07_005.xml", "product_code":"dgc", "code":"4", "des":"DataArts Migration can help you seamlessly migrate batch data between 20+ homogeneous or heterogeneous data sources. You can use it to ingest data from both on-premises a", "doc_type":"productdesc", "kw":"Functions,Service Overview,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "isMulti":"no", + "IsMulti":"No", + "documenttype":"productdesc", + "isBot":"yes", + "IsBot":"yes" + } + ], "title":"Functions", "githuburl":"" }, { "uri":"dataartsstudio_07_002.html", + "node_id":"dataartsstudio_07_002.xml", "product_code":"dgc", "code":"5", "des":"DataArts Studio is a one-stop data operations platform that allows you to perform many operations, including integrating data from every domain and connecting data from d", "doc_type":"productdesc", "kw":"Advantages,Service Overview,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "isMulti":"no", + "IsMulti":"No", + "documenttype":"productdesc", + "isBot":"yes", + "IsBot":"yes" + } + ], "title":"Advantages", "githuburl":"" }, { "uri":"dataartsstudio_07_003.html", + "node_id":"dataartsstudio_07_003.xml", "product_code":"dgc", "code":"6", "des":"You can use DataArts Studio to migrate offline data to the cloud and integrate the data into big data services. On the DataArts Studio management console, you can use the", "doc_type":"productdesc", "kw":"Application Scenarios,Service Overview,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "isMulti":"no", + "IsMulti":"No", + "documenttype":"productdesc", + "isBot":"yes", + "IsBot":"yes" + } + ], "title":"Application Scenarios", "githuburl":"" }, { "uri":"dataartsstudio_07_012.html", + "node_id":"dataartsstudio_07_012.xml", "product_code":"dgc", "code":"7", "des":"If you need to assign different permissions to employees in your enterprise to access your DataArts Studio resources, IAM is a good choice for fine-grained permissions ma", "doc_type":"productdesc", "kw":"DataArts Studio Permissions Management,Service Overview,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "isMulti":"no", + "IsMulti":"No", + "documenttype":"productdesc", + "isBot":"yes", + "IsBot":"yes" + } + ], "title":"DataArts Studio Permissions Management", "githuburl":"" }, { "uri":"dataartsstudio_07_013.html", + "node_id":"dataartsstudio_07_013.xml", "product_code":"dgc", "code":"8", "des":"A workspace member can be assigned the role of admin, developer, operator, or viewer. This topic describes the permissions of each role.Admin: Users with this role have t", "doc_type":"productdesc", "kw":"DataArts Studio Permissions,Service Overview,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "isMulti":"no", + "IsMulti":"No", + "documenttype":"productdesc", + "isBot":"yes", + "IsBot":"yes" + } + ], "title":"DataArts Studio Permissions", "githuburl":"" }, { "uri":"dataartsstudio_07_006.html", + "node_id":"dataartsstudio_07_006.xml", "product_code":"dgc", "code":"9", "des":"The following table lists the recommended browser for logging in to DataArts Studio.Browser compatibilityBrowser VersionDescriptionGoogle Chrome 93.x or laterRecommendedB", "doc_type":"productdesc", "kw":"Constraints,Service Overview,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "isMulti":"no", + "IsMulti":"No", + "documenttype":"productdesc", + "isBot":"yes", + "IsBot":"yes" + } + ], "title":"Constraints", "githuburl":"" }, { "uri":"dataartsstudio_07_007.html", + "node_id":"dataartsstudio_07_007.xml", "product_code":"dgc", "code":"10", "des":"DataArts Studio uses Identity and Access Management (IAM) for authentication and authorization.DataArts Studio uses Cloud Trace Service (CTS) to audit users' non-query op", "doc_type":"productdesc", "kw":"Related Services,Service Overview,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "isMulti":"no", + "IsMulti":"No", + "documenttype":"productdesc", + "isBot":"yes", + "IsBot":"yes" + } + ], "title":"Related Services", "githuburl":"" }, { "uri":"dataartsstudio_12_0002.html", + "node_id":"dataartsstudio_12_0002.xml", "product_code":"dgc", "code":"11", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Preparations", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "documenttype":"usermanual" + } + ], "title":"Preparations", "githuburl":"" }, { "uri":"dataartsstudio_01_0003.html", + "node_id":"dataartsstudio_01_0003.xml", "product_code":"dataartsstudio", "code":"12", - "des":"To use DataArts Studio, create a cloud platform account, create a DataArts Studio instance, and authorize a user to use DataArts Studio.For details about the preparations", + "des":"To use DataArts Studio, create a a cloud platform account, create a DataArts Studio instance, and authorize a user to use DataArts Studio.For details about the preparatio", "doc_type":"usermanual", "kw":"Preparations,Preparations,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dataartsstudio", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Preparations", "githuburl":"" }, { "uri":"dataartsstudio_01_1028.html", + "node_id":"dataartsstudio_01_1028.xml", "product_code":"dataartsstudio", "code":"13", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Creating DataArts Studio Instances", + "search_title":"", + "metedata":[ + { + "prodname":"dataartsstudio", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Creating DataArts Studio Instances", "githuburl":"" }, { "uri":"dataartsstudio_01_0115_0.html", + "node_id":"dataartsstudio_01_0115_0.xml", "product_code":"dataartsstudio", "code":"14", - "des":"Only cloud platform account users with the DAYU Administrator or Tenant Administrator permissions can create DataArts Studio instances or DataArts Studio incremental pack", + "des":"Only a cloud platform account users with the DAYU Administrator or Tenant Administrator permissions can create DataArts Studio instances or DataArts Studio incremental pa", "doc_type":"usermanual", "kw":"Creating a DataArts Studio Basic Package,Creating DataArts Studio Instances,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dataartsstudio", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Creating a DataArts Studio Basic Package", "githuburl":"" }, { "uri":"dataartsstudio_01_0119.html", + "node_id":"dataartsstudio_01_0119.xml", "product_code":"dataartsstudio", "code":"15", "des":"DataArts Studio provides basic and incremental packages. If the basic package cannot meet your requirements, you can create an incremental package. Before you create an i", "doc_type":"usermanual", "kw":"(Optional) Creating a DataArts Studio Incremental Package,Creating DataArts Studio Instances,User Gu", + "search_title":"", + "metedata":[ + { + "prodname":"dataartsstudio", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"(Optional) Creating a DataArts Studio Incremental Package", "githuburl":"" }, { "uri":"dataartsstudio_01_0011.html", + "node_id":"dataartsstudio_01_0011.xml", "product_code":"dataartsstudio", "code":"16", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Managing a Workspace", + "search_title":"", + "metedata":[ + { + "prodname":"dataartsstudio", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Managing a Workspace", "githuburl":"" }, { "uri":"dataartsstudio_01_0116_0.html", + "node_id":"dataartsstudio_01_0116_0.xml", "product_code":"dataartsstudio", "code":"17", "des":"By default, a workspace will be automatically created after you create a DataArts Studio instance. You will be automatically assigned the admin role and can use the defau", "doc_type":"usermanual", "kw":"Creating and Managing a Workspace,Managing a Workspace,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dataartsstudio", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Creating and Managing a Workspace", "githuburl":"" }, { "uri":"dataartsstudio_01_0530.html", + "node_id":"dataartsstudio_01_0530.xml", "product_code":"dataartsstudio", "code":"18", "des":"By default, job logs and Data Lake Insight (DLI) dirty data are stored in an Object Storage Service (OBS) bucket named dlf-log-{Project ID}. You can customize a log stora", "doc_type":"usermanual", "kw":"(Optional) Changing the Job Log Storage Path,Managing a Workspace,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dataartsstudio", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"(Optional) Changing the Job Log Storage Path", "githuburl":"" }, { "uri":"dataartsstudio_01_0118_0.html", + "node_id":"dataartsstudio_01_0118_0.xml", "product_code":"dataartsstudio", "code":"19", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Authorizing Users to Use DataArts Studio", + "search_title":"", + "metedata":[ + { + "prodname":"dataartsstudio", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Authorizing Users to Use DataArts Studio", "githuburl":"" }, { "uri":"dataartsstudio_01_0004.html", + "node_id":"dataartsstudio_01_0004.xml", "product_code":"dataartsstudio", "code":"20", "des":"Identity and Access Management (IAM) can be used for fine-grained permissions management on your DataArts Studio resources. With IAM, you can:Create IAM users for employe", "doc_type":"usermanual", "kw":"Creating an IAM User and Assigning DataArts Studio Permissions,Authorizing Users to Use DataArts Stu", + "search_title":"", + "metedata":[ + { + "prodname":"dataartsstudio", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Creating an IAM User and Assigning DataArts Studio Permissions", "githuburl":"" }, { "uri":"dataartsstudio_01_0117_0.html", + "node_id":"dataartsstudio_01_0117_0.xml", "product_code":"dataartsstudio", "code":"21", "des":"If you want to allow another IAM user to use your DataArts Studio instance, create an IAM user by referring to Creating an IAM User and Assigning DataArts Studio Permissi", "doc_type":"usermanual", "kw":"Adding a Member and Assigning a Role,Authorizing Users to Use DataArts Studio,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dataartsstudio", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Adding a Member and Assigning a Role", "githuburl":"" }, { "uri":"dataartsstudio_01_0006.html", + "node_id":"dataartsstudio_01_0006.xml", "product_code":"dataartsstudio", "code":"22", "des":"When creating OBS links, making API calls, or locating issues, you may need to obtain information such as access keys, project IDs, and endpoints. This section describes ", "doc_type":"usermanual", "kw":"(Optional) Obtaining Authentication Information,Preparations,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dataartsstudio", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"(Optional) Obtaining Authentication Information", "githuburl":"" }, { "uri":"dataartsstudio_12_0004.html", + "node_id":"dataartsstudio_12_0004.xml", "product_code":"dgc", "code":"23", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "documenttype":"usermanual" + } + ], "title":"User Guide", "githuburl":"" }, { "uri":"dataartsstudio_01_0134.html", + "node_id":"dataartsstudio_01_0134.xml", "product_code":"dgc", "code":"24", "des":"Before using DataArts Studio, you must conduct data and business surveys and select an appropriate data governance model.Then, make the following preparations by referrin", "doc_type":"usermanual", "kw":"Preparations Before Using DataArts Studio,User Guide,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Preparations Before Using DataArts Studio", "githuburl":"" }, { "uri":"dataartsstudio_01_0008.html", + "node_id":"dataartsstudio_01_0008.xml", "product_code":"dgc", "code":"25", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Management Center", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Management Center", "githuburl":"" }, { "uri":"dataartsstudio_01_0005.html", + "node_id":"dataartsstudio_01_0005.xml", "product_code":"dgc", "code":"26", "des":"Before using DataArts Studio, select a cloud service or data warehouse as the data lake. The data lake stores raw data and data generated during data governance and serve", "doc_type":"usermanual", "kw":"Data Sources,Management Center,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Data Sources", "githuburl":"" }, { "uri":"dataartsstudio_01_0009.html", + "node_id":"dataartsstudio_01_0009.xml", "product_code":"dgc", "code":"27", "des":"You can create data connections by configuring data sources. Based on the data connections of the Management Center, DataArts Studio performs data development, governance", "doc_type":"usermanual", "kw":"Creating Data Connections,Management Center,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Creating Data Connections", "githuburl":"" }, { "uri":"dataartsstudio_01_0010.html", + "node_id":"dataartsstudio_01_0010.xml", "product_code":"dgc", "code":"28", "des":"To migrate resources in one workspace to another, you can use the resource migration function provided by DataArts Studio.The resources that can be migrated include the d", "doc_type":"usermanual", "kw":"Migrating Resources,Management Center,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Migrating Resources", "githuburl":"" }, { "uri":"dataartsstudio_01_0350.html", + "node_id":"dataartsstudio_01_0350.xml", "product_code":"", "code":"29", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"", "kw":"Tutorials", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Tutorials", "githuburl":"" }, { "uri":"dataartsstudio_01_0351.html", + "node_id":"dataartsstudio_01_0351.xml", "product_code":"", "code":"30", "des":"This section describes how to create an MRS Hive connection between DataArts Studio and the data lake base.You have created a data lake to connect, for example, a databas", "doc_type":"", "kw":"Creating an MRS Hive Connection,Tutorials,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Creating an MRS Hive Connection", "githuburl":"" }, { "uri":"dataartsstudio_01_0352.html", + "node_id":"dataartsstudio_01_0352.xml", "product_code":"", "code":"31", "des":"This section describes how to create a DWS connection between DataArts Studio and the data lake base.You have created a data lake to connect, for example, a database or c", "doc_type":"", "kw":"Creating a DWS Connection,Tutorials,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Creating a DWS Connection", "githuburl":"" }, { "uri":"dataartsstudio_01_0353.html", + "node_id":"dataartsstudio_01_0353.xml", "product_code":"", "code":"32", "des":"This section describes how to create a MySQL connection between DataArts Studio and the data lake base.You have created a data lake to connect, for example, a database or", "doc_type":"", "kw":"Creating a MySQL Connection,Tutorials,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Creating a MySQL Connection", "githuburl":"" }, { "uri":"dataartsstudio_01_0012.html", + "node_id":"dataartsstudio_01_0012.xml", "product_code":"dgc", "code":"33", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"DataArts Migration", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"DataArts Migration", "githuburl":"" }, { "uri":"dataartsstudio_01_0013.html", + "node_id":"dataartsstudio_01_0013.xml", "product_code":"dgc", "code":"34", "des":"DataArts Migration is an efficient and easy-to-use data integration service. Based on the big data migration to the cloud and intelligent data lake solutions, CDM provide", "doc_type":"usermanual", "kw":"Overview,DataArts Migration,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Overview", "githuburl":"" }, { "uri":"dataartsstudio_01_0015.html", + "node_id":"dataartsstudio_01_0015.xml", "product_code":"cdm", "code":"35", "des":"You cannot modify the flavor of an existing cluster. If you require a higher flavor, create a cluster with your desired flavor.Arm CDM clusters do not support agents. The", "doc_type":"usermanual", "kw":"Constraints,DataArts Migration,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Constraints", "githuburl":"" }, { "uri":"dataartsstudio_01_0014.html", + "node_id":"dataartsstudio_01_0014.xml", "product_code":"cdm", "code":"36", "des":"CDM provides the following migration modes which support different data sources:Table/File migration in the import of data into a data lake or migration of data to the cl", "doc_type":"usermanual", "kw":"Supported Data Sources,DataArts Migration,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Supported Data Sources", "githuburl":"" }, { "uri":"dataartsstudio_01_0017.html", + "node_id":"dataartsstudio_01_0017.xml", "product_code":"cdm", "code":"37", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Managing Clusters", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Managing Clusters", "githuburl":"" }, { "uri":"dataartsstudio_01_0576.html", + "node_id":"dataartsstudio_01_0576.xml", "product_code":"dgc", "code":"38", "des":"CDM provides independent clusters for secure and reliable data migration. Clusters are isolated from each other and cannot access each other.CDM clusters can be used in t", "doc_type":"usermanual", "kw":"Creating a CDM Cluster,Managing Clusters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Creating a CDM Cluster", "githuburl":"" }, { "uri":"dataartsstudio_01_0020.html", + "node_id":"dataartsstudio_01_0020.xml", "product_code":"cdm", "code":"39", "des":"After creating a CDM cluster, you can bind an EIP to or unbind an EIP from the cluster.If CDM needs to access a local or Internet data source, or a cloud service in anoth", "doc_type":"usermanual", "kw":"Binding or Unbinding an EIP,Managing Clusters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Binding or Unbinding an EIP", "githuburl":"" }, { "uri":"dataartsstudio_01_0578.html", + "node_id":"dataartsstudio_01_0578.xml", "product_code":"cdm", "code":"40", "des":"After modifying some configurations (for example, disabling user isolation), you must restart the cluster to make the modification take effect.You have created a CDM clus", "doc_type":"usermanual", "kw":"Restarting a Cluster,Managing Clusters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Restarting a Cluster", "githuburl":"" }, { "uri":"dataartsstudio_01_0579.html", + "node_id":"dataartsstudio_01_0579.xml", "product_code":"cdm", "code":"41", "des":"You can delete a CDM cluster that you no longer use.After a CDM cluster is deleted, the cluster and its data are destroyed and cannot be restored. Exercise caution when p", "doc_type":"usermanual", "kw":"Deleting a Cluster,Managing Clusters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Deleting a Cluster", "githuburl":"" }, { "uri":"dataartsstudio_01_0022.html", + "node_id":"dataartsstudio_01_0022.xml", "product_code":"cdm", "code":"42", "des":"This section describes how to obtain cluster logs to view the job running history and locate job failure causes.You have created a CDM cluster.The Source column is displa", "doc_type":"usermanual", "kw":"Downloading Cluster Logs,Managing Clusters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Downloading Cluster Logs", "githuburl":"" }, { "uri":"dataartsstudio_01_0021.html", + "node_id":"dataartsstudio_01_0021.xml", "product_code":"cdm", "code":"43", "des":"After creating a CDM cluster, you can view its basic information and modify its configurations.You can view the following basic cluster information:Cluster information: c", "doc_type":"usermanual", "kw":"Viewing Basic Cluster Information and Modifying Cluster Configurations,Managing Clusters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Viewing Basic Cluster Information and Modifying Cluster Configurations", "githuburl":"" }, { "uri":"dataartsstudio_01_0121.html", + "node_id":"dataartsstudio_01_0121.xml", "product_code":"cdm", "code":"44", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Viewing Metrics", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Viewing Metrics", "githuburl":"" }, { "uri":"dataartsstudio_01_0122.html", + "node_id":"dataartsstudio_01_0122.xml", "product_code":"cdm", "code":"45", "des":"You have obtained required Cloud Eye permissions.This section describes metrics reported by CDM to Cloud Eye as well as their namespaces and dimensions. You can use APIs ", "doc_type":"usermanual", "kw":"CDM Metrics,Viewing Metrics,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"CDM Metrics", "githuburl":"" }, { "uri":"dataartsstudio_01_0123.html", + "node_id":"dataartsstudio_01_0123.xml", "product_code":"cdm", "code":"46", "des":"Set the alarm rules to customize the monitored objects and notification policies. Then, learn CDM running status in a timely manner.A CDM alarm rule includes the alarm ru", "doc_type":"usermanual", "kw":"Configuring Alarm Rules,Viewing Metrics,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Configuring Alarm Rules", "githuburl":"" }, { "uri":"dataartsstudio_01_0124.html", + "node_id":"dataartsstudio_01_0124.xml", "product_code":"cdm", "code":"47", "des":"You can use Cloud Eye to monitor the running status of a CDM cluster. You can view the monitoring metrics on the Cloud Eye console.Monitored data takes some time for tran", "doc_type":"usermanual", "kw":"Querying Metrics,Viewing Metrics,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Querying Metrics", "githuburl":"" }, { "uri":"dataartsstudio_01_0023.html", + "node_id":"dataartsstudio_01_0023.xml", "product_code":"cdm", "code":"48", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Managing Links", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Managing Links", "githuburl":"" }, { "uri":"dataartsstudio_01_0024.html", + "node_id":"dataartsstudio_01_0024.xml", "product_code":"cdm", "code":"49", "des":"Before creating a data migration job, create a link to enable the CDM cluster to read data from and write data to a data source. A migration job requires a source link an", "doc_type":"usermanual", "kw":"Creating Links,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Creating Links", "githuburl":"" }, { "uri":"dataartsstudio_01_0132.html", + "node_id":"dataartsstudio_01_0132.xml", "product_code":"", "code":"50", "des":"The Java Database Connectivity (JDBC) provides programmatic access to relational databases. Applications can execute SQL statements and retrieve data using the JDBC API.B", "doc_type":"", "kw":"Managing Drivers,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Managing Drivers", "githuburl":"" }, { "uri":"dataartsstudio_01_0128.html", + "node_id":"dataartsstudio_01_0128.xml", "product_code":"cdm", "code":"51", "des":"If your data is stored in HDFS or a relational database, you can deploy an agent on the source network. CDM pulls data from your internal data sources through an agent bu", "doc_type":"usermanual", "kw":"Managing Agents,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Managing Agents", "githuburl":"" }, { "uri":"dataartsstudio_01_1096.html", + "node_id":"dataartsstudio_01_1096.xml", "product_code":"dgc", "code":"52", "des":"On the Cluster Configurations page, you can create, edit, or delete Hadoop cluster configurations.When creating a Hadoop link, the Hadoop cluster configurations can simpl", "doc_type":"usermanual", "kw":"Managing Cluster Configurations,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Managing Cluster Configurations", "githuburl":"" }, { "uri":"dataartsstudio_01_0044.html", + "node_id":"dataartsstudio_01_0044.xml", "product_code":"cdm", "code":"53", "des":"Common relational databases include: Data Warehouse Service (DWS), RDS for MySQL, RDS for PostgreSQL, RDS for SQL Server, PostgreSQL, Microsoft SQL Server, IBM Db2, and S", "doc_type":"usermanual", "kw":"Link to a Common Relational Database,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Link to a Common Relational Database", "githuburl":"" }, { "uri":"dataartsstudio_01_1214.html", + "node_id":"dataartsstudio_01_1214.xml", "product_code":"", "code":"54", "des":"Sharding refers to the link to multiple backend data sources at the same time. The link can be used as the job source to migrate data from multiple data sources to other ", "doc_type":"", "kw":"Link to a Database Shard,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Link to a Database Shard", "githuburl":"" }, { "uri":"dataartsstudio_01_1211.html", + "node_id":"dataartsstudio_01_1211.xml", "product_code":"", "code":"55", "des":"Table 1 lists the parameters for a link to a MySQL database.", "doc_type":"", "kw":"Link to a MySQL Database,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Link to a MySQL Database", "githuburl":"" }, { "uri":"dataartsstudio_01_1212.html", + "node_id":"dataartsstudio_01_1212.xml", "product_code":"", "code":"56", "des":"Table 1 lists the parameters for a link to an Oracle database.Parameters for a link to an Oracle databaseParameterDescriptionExample ValueNameLink name, which should be d", "doc_type":"", "kw":"Link to an Oracle Database,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Link to an Oracle Database", "githuburl":"" }, { "uri":"dataartsstudio_01_0036.html", + "node_id":"dataartsstudio_01_0036.xml", "product_code":"dgc", "code":"57", "des":"When connecting CDM to DLI, configure the parameters as described in Table 1.", "doc_type":"usermanual", "kw":"Link to DLI,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Link to DLI", "githuburl":"" }, { "uri":"dataartsstudio_01_0026.html", + "node_id":"dataartsstudio_01_0026.xml", "product_code":"cdm", "code":"58", "des":"CDM supports the following Hive data sources:MRS HiveFusionInsight HiveApache HiveMRS HiveFusionInsight HiveApache HiveYou can view a table during field mapping only when", "doc_type":"usermanual", "kw":"Link to Hive,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Link to Hive", "githuburl":"" }, { "uri":"dataartsstudio_01_0039.html", + "node_id":"dataartsstudio_01_0039.xml", "product_code":"cdm", "code":"59", "des":"CDM supports the following HBase data sources:MRS HBaseFusionInsight HBaseApache HBaseMRS HBaseFusionInsight HBaseApache HBaseWhen connecting CDM to HBase of MRS, configu", "doc_type":"usermanual", "kw":"Link to HBase,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Link to HBase", "githuburl":"" }, { "uri":"dataartsstudio_01_0040.html", + "node_id":"dataartsstudio_01_0040.xml", "product_code":"cdm", "code":"60", "des":"CDM supports the following HDFS data sources:MRS HDFSFusionInsight HDFSApache HDFSMRS HDFSFusionInsight HDFSApache HDFSWhen connecting CDM to HDFS of MRS, configure the p", "doc_type":"usermanual", "kw":"Link to HDFS,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Link to HDFS", "githuburl":"" }, { "uri":"dataartsstudio_01_0045.html", + "node_id":"dataartsstudio_01_0045.xml", "product_code":"cdm", "code":"61", "des":"When connecting CDM to the destination OBS bucket, you need to add the read and write permissions to the destination OBS bucket, and file authentication is not required.W", "doc_type":"usermanual", "kw":"Link to OBS,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Link to OBS", "githuburl":"" }, { "uri":"dataartsstudio_01_0028.html", + "node_id":"dataartsstudio_01_0028.xml", "product_code":"dgc", "code":"62", "des":"The FTP/SFTP link is used to migrate files from the on-premises file server or ECS to database.Only FTP servers running Linux are supported.When connecting CDM to an FTP ", "doc_type":"usermanual", "kw":"Link to an FTP or SFTP Server,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Link to an FTP or SFTP Server", "githuburl":"" }, { "uri":"dataartsstudio_01_0032.html", + "node_id":"dataartsstudio_01_0032.xml", "product_code":"dgc", "code":"63", "des":"The Redis link is applicable to data migration of Redis created in the local data center or ECS. It is used to load data in the database or files to Redis.The DCS link is", "doc_type":"usermanual", "kw":"Link to Redis/DCS,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Link to Redis/DCS", "githuburl":"" }, { "uri":"dataartsstudio_01_0031.html", + "node_id":"dataartsstudio_01_0031.xml", "product_code":"cdm", "code":"64", "des":"The DDS link is used to synchronize data from Document Database Service (DDS) on cloud to a big data platform.When connecting CDM to DDS, configure the parameters as desc", "doc_type":"usermanual", "kw":"Link to DDS,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Link to DDS", "githuburl":"" }, { "uri":"dataartsstudio_01_0027.html", + "node_id":"dataartsstudio_01_0027.xml", "product_code":"dgc", "code":"65", "des":"When connecting CDM to CloudTable, configure the parameters as described in Table 1.Click Show Advanced Attributes, and then click Add to add configuration attributes of ", "doc_type":"usermanual", "kw":"Link to CloudTable,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Link to CloudTable", "githuburl":"" }, { "uri":"dataartsstudio_01_0037.html", + "node_id":"dataartsstudio_01_0037.xml", "product_code":"dgc", "code":"66", "des":"When connecting CDM to CloudTable OpenTSDB, configure the parameters as described in Table 1.", "doc_type":"usermanual", "kw":"Link to CloudTable OpenTSDB,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Link to CloudTable OpenTSDB", "githuburl":"" }, { "uri":"dataartsstudio_01_0030.html", + "node_id":"dataartsstudio_01_0030.xml", "product_code":"cdm", "code":"67", "des":"This link is used to transfer data from a third-party cloud MongoDB service or MongoDB created in the on-premises data center or ECS to a big data platform.When connectin", "doc_type":"usermanual", "kw":"Link to MongoDB,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Link to MongoDB", "githuburl":"" }, { "uri":"dataartsstudio_01_004501.html", + "node_id":"dataartsstudio_01_004501.xml", "product_code":"", "code":"68", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"", "kw":"Link to Cassandra,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Link to Cassandra", "githuburl":"" }, { "uri":"dataartsstudio_01_0033.html", + "node_id":"dataartsstudio_01_0033.xml", "product_code":"dgc", "code":"69", "des":"When connecting CDM to Kafka of MRS, configure the parameters as described in Table 1.Click Show Advanced Attributes, and then click Add to add configuration attributes o", "doc_type":"usermanual", "kw":"Link to Kafka,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Link to Kafka", "githuburl":"" }, { "uri":"dataartsstudio_01_0038.html", + "node_id":"dataartsstudio_01_0038.xml", "product_code":"dgc", "code":"70", "des":"When connecting CDM to DMS Kafka, configure the parameters as described in Table 1.", "doc_type":"usermanual", "kw":"Link to DMS Kafka,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Link to DMS Kafka", "githuburl":"" }, { "uri":"dataartsstudio_01_0035.html", + "node_id":"dataartsstudio_01_0035.xml", "product_code":"dgc", "code":"71", "des":"The Elasticsearch link is applicable to data migration of Elasticsearch services and Elasticsearch created in the local data center or ECS.The Elasticsearch connector sup", "doc_type":"usermanual", "kw":"Link to Elasticsearch/CSS,Managing Links,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Link to Elasticsearch/CSS", "githuburl":"" }, { "uri":"dataartsstudio_01_0081.html", + "node_id":"dataartsstudio_01_0081.xml", "product_code":"cdm", "code":"72", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Managing Jobs", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Managing Jobs", "githuburl":"" }, { "uri":"dataartsstudio_01_0046.html", + "node_id":"dataartsstudio_01_0046.xml", "product_code":"cdm", "code":"73", "des":"CDM supports table and file migration between homogeneous or heterogeneous data sources. For details about supported data sources, see Data Sources Supported by Table/Fil", "doc_type":"usermanual", "kw":"Table/File Migration Jobs,Managing Jobs,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Table/File Migration Jobs", "githuburl":"" }, { "uri":"dataartsstudio_01_0075.html", + "node_id":"dataartsstudio_01_0075.xml", "product_code":"cdm", "code":"74", "des":"CDM supports entire DB migration between homogeneous and heterogeneous data sources. The migration principles are the same as those in Table/File Migration Jobs. Each typ", "doc_type":"usermanual", "kw":"Creating an Entire Database Migration Job,Managing Jobs,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Creating an Entire Database Migration Job", "githuburl":"" }, { "uri":"dataartsstudio_01_0047.html", + "node_id":"dataartsstudio_01_0047.xml", "product_code":"cdm", "code":"75", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Source Job Parameters", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Source Job Parameters", "githuburl":"" }, { "uri":"dataartsstudio_01_0048.html", + "node_id":"dataartsstudio_01_0048.xml", "product_code":"cdm", "code":"76", "des":"If the source link of a job is the Link to OBS, configure the source job parameters based on Table 1.Advanced attributes are optional and not displayed by default. You ca", "doc_type":"usermanual", "kw":"From OBS,Source Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"From OBS", "githuburl":"" }, { "uri":"dataartsstudio_01_0049.html", + "node_id":"dataartsstudio_01_0049.xml", "product_code":"cdm", "code":"77", "des":"When the source link of a job is the Link to HDFS, that is, when data is exported from MRS HDFS, FusionInsight HDFS, or Apache HDFS, configure the source job parameters b", "doc_type":"usermanual", "kw":"From HDFS,Source Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"From HDFS", "githuburl":"" }, { "uri":"dataartsstudio_01_0050.html", + "node_id":"dataartsstudio_01_0050.xml", "product_code":"cdm", "code":"78", "des":"When the source link of a job is the Link to HBase or Link to CloudTable, that is, when data is exported from MRS HBase, FusionInsight HBase, CloudTable, or Apache HBase,", "doc_type":"usermanual", "kw":"From HBase/CloudTable,Source Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"From HBase/CloudTable", "githuburl":"" }, { "uri":"dataartsstudio_01_0051.html", + "node_id":"dataartsstudio_01_0051.xml", "product_code":"cdm", "code":"79", "des":"If the source link of a job is the Link to Hive, configure the source job parameters based on Table 1.If the data source is Hive, CDM will automatically partition data us", "doc_type":"usermanual", "kw":"From Hive,Source Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"From Hive", "githuburl":"" }, { "uri":"dataartsstudio_01_0120.html", + "node_id":"dataartsstudio_01_0120.xml", "product_code":"cdm", "code":"80", "des":"If the source link of a job is the Link to DLI, configure the source job parameters based on Table 1.", "doc_type":"usermanual", "kw":"From DLI,Source Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"From DLI", "githuburl":"" }, { "uri":"dataartsstudio_01_0052.html", + "node_id":"dataartsstudio_01_0052.xml", "product_code":"cdm", "code":"81", "des":"If the source link of a job is the Link to an FTP or SFTP Server, configure the source job parameters based on Table 1.Advanced attributes are optional and not displayed ", "doc_type":"usermanual", "kw":"From FTP/SFTP,Source Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"From FTP/SFTP", "githuburl":"" }, { "uri":"dataartsstudio_01_0053.html", + "node_id":"dataartsstudio_01_0053.xml", "product_code":"cdm", "code":"82", "des":"When the source link of a job is the HTTP link, configure the source job parameters based on Table 1. Currently, data can only be exported from the HTTP URLs.", "doc_type":"usermanual", "kw":"From HTTP,Source Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"From HTTP", "githuburl":"" }, { "uri":"dataartsstudio_01_0054.html", + "node_id":"dataartsstudio_01_0054.xml", "product_code":"cdm", "code":"83", "des":"Common relational databases that can serve as the source include GaussDB(DWS), RDS for MySQL, RDS for PostgreSQL, RDS for SQL Server, FusionInsight LibrA, PostgreSQL, Mi", "doc_type":"usermanual", "kw":"From a Common Relational Database,Source Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"From a Common Relational Database", "githuburl":"" }, { "uri":"dataartsstudio_01_1254.html", + "node_id":"dataartsstudio_01_1254.xml", "product_code":"", "code":"84", "des":"If the source link of a job is the Link to a MySQL Database, configure the source job parameters based on Table 1.In a migration from MySQL to DWS, the constraints on the", "doc_type":"", "kw":"From MySQL,Source Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"From MySQL", "githuburl":"" }, { "uri":"dataartsstudio_01_1255.html", + "node_id":"dataartsstudio_01_1255.xml", "product_code":"", "code":"85", "des":"If the source link of a job is the Link to an Oracle Database, configure the source job parameters based on Table 1.When an Oracle database is the migration source, if Pa", "doc_type":"", "kw":"From Oracle,Source Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"From Oracle", "githuburl":"" }, { "uri":"dataartsstudio_01_1256.html", + "node_id":"dataartsstudio_01_1256.xml", "product_code":"", "code":"86", "des":"If the source link of a job is the Link to a Database Shard, configure the source job parameters based on Table 1.If the Source Link Name is the backend link of the shard", "doc_type":"", "kw":"From a Database Shard,Source Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"From a Database Shard", "githuburl":"" }, { "uri":"dataartsstudio_01_0055.html", + "node_id":"dataartsstudio_01_0055.xml", "product_code":"cdm", "code":"87", "des":"When you migrate MongoDB or DDS data, CDM reads the first row of the collection as an example of the field list. If the first row of data does not contain all fields of t", "doc_type":"usermanual", "kw":"From MongoDB/DDS,Source Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"From MongoDB/DDS", "githuburl":"" }, { "uri":"dataartsstudio_01_0056.html", + "node_id":"dataartsstudio_01_0056.xml", "product_code":"cdm", "code":"88", "des":"Because DCS restricts the commands for obtaining keys, it cannot serve as the migration source but can be the migration destination. The Redis service of the third-party ", "doc_type":"usermanual", "kw":"From Redis,Source Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"From Redis", "githuburl":"" }, { "uri":"dataartsstudio_01_0058.html", + "node_id":"dataartsstudio_01_0058.xml", "product_code":"cdm", "code":"89", "des":"If the source link of a job is the Link to Kafka or Link to DMS Kafka, configure the source job parameters based on Table 1.", "doc_type":"usermanual", "kw":"From Kafka/DMS Kafka,Source Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"From Kafka/DMS Kafka", "githuburl":"" }, { "uri":"dataartsstudio_01_0059.html", + "node_id":"dataartsstudio_01_0059.xml", "product_code":"cdm", "code":"90", "des":"If the source link of a job is the Link to Elasticsearch/CSS, configure the source job parameters based on Table 1.On the Map Field page, you can set custom fields for th", "doc_type":"usermanual", "kw":"From Elasticsearch or CSS,Source Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"From Elasticsearch or CSS", "githuburl":"" }, { "uri":"dataartsstudio_01_0060.html", + "node_id":"dataartsstudio_01_0060.xml", "product_code":"cdm", "code":"91", "des":"If the source link of a job is the Link to CloudTable OpenTSDB, configure the source job parameters based on Table 1.", "doc_type":"usermanual", "kw":"From OpenTSDB,Source Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"From OpenTSDB", "githuburl":"" }, { "uri":"dataartsstudio_01_0061.html", + "node_id":"dataartsstudio_01_0061.xml", "product_code":"cdm", "code":"92", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Destination Job Parameters", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Destination Job Parameters", "githuburl":"" }, { "uri":"dataartsstudio_01_0062.html", + "node_id":"dataartsstudio_01_0062.xml", "product_code":"cdm", "code":"93", "des":"If the destination link of a job is the Link to OBS, configure the destination job parameters based on Table 1.Advanced attributes are optional and not displayed by defau", "doc_type":"usermanual", "kw":"To OBS,Destination Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"To OBS", "githuburl":"" }, { "uri":"dataartsstudio_01_0063.html", + "node_id":"dataartsstudio_01_0063.xml", "product_code":"cdm", "code":"94", "des":"If the destination link of a job is one of them listed in Link to HDFS, configure the destination job parameters based on Table 1.HDFS supports the UTF-8 encoding only. R", "doc_type":"usermanual", "kw":"To HDFS,Destination Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"To HDFS", "githuburl":"" }, { "uri":"dataartsstudio_01_0064.html", + "node_id":"dataartsstudio_01_0064.xml", "product_code":"cdm", "code":"95", "des":"If the destination link of a job is one of them listed in Link to HBase or Link to CloudTable, configure the destination job parameters based on Table 1.", "doc_type":"usermanual", "kw":"To HBase/CloudTable,Destination Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"To HBase/CloudTable", "githuburl":"" }, { "uri":"dataartsstudio_01_0066.html", + "node_id":"dataartsstudio_01_0066.xml", "product_code":"cdm", "code":"96", "des":"If the destination link of a job is the Link to Hive, configure the destination job parameters based on Table 1.When Hive serves as the destination end, a table whose sto", "doc_type":"usermanual", "kw":"To Hive,Destination Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"To Hive", "githuburl":"" }, { "uri":"dataartsstudio_01_0068.html", + "node_id":"dataartsstudio_01_0068.xml", "product_code":"cdm", "code":"97", "des":"Common relational databases serving as the destination include RDS for MySQL, RDS for SQL Server, and RDS for PostgreSQL.To import data to the preceding data sources, con", "doc_type":"usermanual", "kw":"To a Common Relational Database,Destination Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"To a Common Relational Database", "githuburl":"" }, { "uri":"dataartsstudio_01_1251.html", + "node_id":"dataartsstudio_01_1251.xml", "product_code":"", "code":"98", "des":"If the destination link of a job is a DWS link, configure the destination job parameters based on Table 1.Figure 1 describes the field mapping between DWS tables created ", "doc_type":"", "kw":"To DWS,Destination Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"To DWS", "githuburl":"" }, { "uri":"dataartsstudio_01_0069.html", + "node_id":"dataartsstudio_01_0069.xml", "product_code":"cdm", "code":"99", "des":"If the destination link of a job is the Link to DDS, configure the destination job parameters based on Table 1.Parameter descriptionParameterDescriptionExample ValueDatab", "doc_type":"usermanual", "kw":"To DDS,Destination Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"To DDS", "githuburl":"" }, { "uri":"dataartsstudio_01_0070.html", + "node_id":"dataartsstudio_01_0070.xml", "product_code":"cdm", "code":"100", "des":"If the data is imported to DCS, configure the destination job parameters based on Table 1.Parameter descriptionParameterDescriptionExample ValueRedis Key PrefixKey prefix", "doc_type":"usermanual", "kw":"To DCS,Destination Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"To DCS", "githuburl":"" }, { "uri":"dataartsstudio_01_0071.html", + "node_id":"dataartsstudio_01_0071.xml", "product_code":"cdm", "code":"101", "des":"If the destination link of a job is the Link to Elasticsearch/CSS, that is, when data is imported to CSS, configure the destination job parameters based on Table 1.", "doc_type":"usermanual", "kw":"To CSS,Destination Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"To CSS", "githuburl":"" }, { "uri":"dataartsstudio_01_0072.html", + "node_id":"dataartsstudio_01_0072.xml", "product_code":"cdm", "code":"102", "des":"If the destination link of a job is the Link to DLI, configure the destination job parameters based on Table 1.", "doc_type":"usermanual", "kw":"To DLI,Destination Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"To DLI", "githuburl":"" }, { "uri":"dataartsstudio_01_0074.html", + "node_id":"dataartsstudio_01_0074.xml", "product_code":"cdm", "code":"103", "des":"If the destination link of a job is the Link to CloudTable OpenTSDB, configure the destination job parameters based on Table 1.", "doc_type":"usermanual", "kw":"To OpenTSDB,Destination Job Parameters,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"To OpenTSDB", "githuburl":"" }, { "uri":"dataartsstudio_01_0082.html", + "node_id":"dataartsstudio_01_0082.xml", "product_code":"dgc", "code":"104", "des":"CDM supports scheduled execution of table/file migration jobs by minute, hour, day, week, and month. This section describes how to configure scheduled job parameters.When", "doc_type":"usermanual", "kw":"Scheduling Job Execution,Managing Jobs,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Scheduling Job Execution", "githuburl":"" }, { "uri":"dataartsstudio_01_0083.html", + "node_id":"dataartsstudio_01_0083.xml", "product_code":"cdm", "code":"105", "des":"On the Settings tab page, you can perform the following operations:Maximum Concurrent Extractors of JobsScheduled Backup and Restoration of CDM JobsEnvironment Variables ", "doc_type":"usermanual", "kw":"Job Configuration Management,Managing Jobs,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Job Configuration Management", "githuburl":"" }, { "uri":"dataartsstudio_01_0084.html", + "node_id":"dataartsstudio_01_0084.xml", "product_code":"cdm", "code":"106", "des":"Existing CDM jobs can be viewed, modified, deleted, started, and stopped. This section describes how to view and modify a job.Viewing job statusThe job status can be New,", "doc_type":"usermanual", "kw":"Managing a Single Job,Managing Jobs,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Managing a Single Job", "githuburl":"" }, { "uri":"dataartsstudio_01_0085.html", + "node_id":"dataartsstudio_01_0085.xml", "product_code":"cdm", "code":"107", "des":"This section describes how to manage CDM table/file migration jobs in batches. The following operations are involved:Manage jobs by group.Run jobs in batches.Delete jobs ", "doc_type":"usermanual", "kw":"Managing Jobs in Batches,Managing Jobs,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Managing Jobs in Batches", "githuburl":"" }, { "uri":"dataartsstudio_01_0125.html", + "node_id":"dataartsstudio_01_0125.xml", "product_code":"cdm", "code":"108", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Auditing", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Auditing", "githuburl":"" }, { "uri":"dataartsstudio_01_0126.html", + "node_id":"dataartsstudio_01_0126.xml", "product_code":"cdm", "code":"109", "des":"CTS provides records of operations on cloud service resources. With CTS, you can query, audit, and backtrack those operations.", "doc_type":"usermanual", "kw":"Key CDM Operations Recorded by CTS,Auditing,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Key CDM Operations Recorded by CTS", "githuburl":"" }, { "uri":"dataartsstudio_01_0127.html", + "node_id":"dataartsstudio_01_0127.xml", "product_code":"cdm", "code":"110", "des":"After you enable CTS, the system starts to record the CDM operations. The management console of CTS stores the traces of the latest seven days.This section describes how ", "doc_type":"usermanual", "kw":"Viewing Traces,Auditing,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Viewing Traces", "githuburl":"" }, { "uri":"dataartsstudio_01_0086.html", + "node_id":"dataartsstudio_01_0086.xml", "product_code":"cdm", "code":"111", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Tutorials", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Tutorials", "githuburl":"" }, { "uri":"dataartsstudio_01_0130.html", + "node_id":"dataartsstudio_01_0130.xml", "product_code":"", "code":"112", "des":"MRS Hive links are applicable to the MapReduce Service (MRS). This tutorial describes how to create an MRS Hive link.You have created a CDM cluster.You have obtained the ", "doc_type":"", "kw":"Creating an MRS Hive Link,Tutorials,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Creating an MRS Hive Link", "githuburl":"" }, { "uri":"dataartsstudio_01_0131.html", + "node_id":"dataartsstudio_01_0131.xml", "product_code":"", "code":"113", "des":"MySQL links are applicable to third-party cloud MySQL services and MySQL created in a local data center or ECS. This tutorial describes how to create a MySQL link.You hav", "doc_type":"", "kw":"Creating a MySQL Link,Tutorials,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Creating a MySQL Link", "githuburl":"" }, { "uri":"dataartsstudio_01_0092.html", + "node_id":"dataartsstudio_01_0092.xml", "product_code":"cdm", "code":"114", "des":"MRS provides enterprise-level big data clusters on the cloud. It contains HDFS, Hive, and Spark components and is applicable to massive data analysis of enterprises.Hive ", "doc_type":"usermanual", "kw":"Migrating Data from MySQL to MRS Hive,Tutorials,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Migrating Data from MySQL to MRS Hive", "githuburl":"" }, { "uri":"dataartsstudio_01_0100.html", + "node_id":"dataartsstudio_01_0100.xml", "product_code":"", "code":"115", "des":"CDM supports table-to-OBS data migration. This section describes how to migrate tables from a MySQL database to OBS. The process is as follows:Creating a CDM Cluster and ", "doc_type":"", "kw":"Migrating Data from MySQL to OBS,Tutorials,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Migrating Data from MySQL to OBS", "githuburl":"" }, { "uri":"dataartsstudio_01_0101.html", + "node_id":"dataartsstudio_01_0101.xml", "product_code":"", "code":"116", "des":"CDM supports table-to-table data migration. This section describes how to migrate data from MySQL to DWS. The process is as follows:Creating a CDM Cluster and Binding an ", "doc_type":"", "kw":"Migrating Data from MySQL to DWS,Tutorials,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Migrating Data from MySQL to DWS", "githuburl":"" }, { "uri":"dataartsstudio_01_0098.html", + "node_id":"dataartsstudio_01_0098.xml", "product_code":"cdm", "code":"117", "des":"This section describes how to migrate the entire on-premises MySQL database to RDS using the CDM's entire DB migration function.Currently, CDM can migrate the entire on-p", "doc_type":"usermanual", "kw":"Migrating an Entire MySQL Database to RDS,Tutorials,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Migrating an Entire MySQL Database to RDS", "githuburl":"" }, { "uri":"dataartsstudio_01_0091.html", + "node_id":"dataartsstudio_01_0091.xml", "product_code":"cdm", "code":"118", "des":"Cloud Search Service provides users with structured and unstructured data search, statistics, and report capabilities. This section describes how to use CDM to migrate da", "doc_type":"usermanual", "kw":"Migrating Data from Oracle to CSS,Tutorials,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Migrating Data from Oracle to CSS", "githuburl":"" }, { "uri":"dataartsstudio_01_0133.html", + "node_id":"dataartsstudio_01_0133.xml", "product_code":"", "code":"119", "des":"CDM supports table-to-table migration. This section describes how to use CDM to migrate data from Oracle to Data Warehouse Service (DWS). The procedure is as follows:Crea", "doc_type":"", "kw":"Migrating Data from Oracle to DWS,Tutorials,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Migrating Data from Oracle to DWS", "githuburl":"" }, { "uri":"dataartsstudio_01_0088.html", + "node_id":"dataartsstudio_01_0088.xml", "product_code":"cdm", "code":"120", "des":"CDM supports data migration between cloud services. This section describes how to use CDM to migrate data from OBS to CSS. The procedure is as follows:Creating a CDM Clus", "doc_type":"usermanual", "kw":"Migrating Data from OBS to CSS,Tutorials,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Migrating Data from OBS to CSS", "githuburl":"" }, { "uri":"dataartsstudio_01_0089.html", + "node_id":"dataartsstudio_01_0089.xml", "product_code":"cdm", "code":"121", "des":"DLI is a fully hosted big data query service. This section describes how to use CDM to migrate data from OBS to DLI. The procedure includes four steps:Creating a CDM Clus", "doc_type":"usermanual", "kw":"Migrating Data from OBS to DLI,Tutorials,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Migrating Data from OBS to DLI", "githuburl":"" }, { "uri":"dataartsstudio_01_0103.html", + "node_id":"dataartsstudio_01_0103.xml", "product_code":"", "code":"122", "des":"CDM supports file-to-file data migration. This section describes how to migrate data from MRS HDFS to OBS. The process is as follows:Creating a CDM Cluster and Binding an", "doc_type":"", "kw":"Migrating Data from MRS HDFS to OBS,Tutorials,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Migrating Data from MRS HDFS to OBS", "githuburl":"" }, { "uri":"dataartsstudio_01_0099.html", + "node_id":"dataartsstudio_01_0099.xml", "product_code":"cdm", "code":"123", "des":"CSS provides users with structured and unstructured data search, statistics, and report capabilities. This section describes how to use CDM to migrate the entire Elastics", "doc_type":"usermanual", "kw":"Migrating the Entire Elasticsearch Database to CSS,Tutorials,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Migrating the Entire Elasticsearch Database to CSS", "githuburl":"" }, { "uri":"dataartsstudio_01_0087.html", + "node_id":"dataartsstudio_01_0087.xml", "product_code":"cdm", "code":"124", "des":"CDM allows you to migrate data from DDS to other data sources. This section describes how to use CDM to migrate data from DDS to DWS. The procedure includes four steps:Cr", "doc_type":"usermanual", "kw":"Migrating Data from DDS to DWS,Tutorials,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Migrating Data from DDS to DWS", "githuburl":"" }, { "uri":"dataartsstudio_01_0110.html", + "node_id":"dataartsstudio_01_0110.xml", "product_code":"cdm", "code":"125", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Advanced Operations", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Advanced Operations", "githuburl":"" }, { "uri":"dataartsstudio_01_0111.html", + "node_id":"dataartsstudio_01_0111.xml", "product_code":"dgc", "code":"126", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Incremental Migration", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Incremental Migration", "githuburl":"" }, { "uri":"dataartsstudio_01_0112.html", + "node_id":"dataartsstudio_01_0112.xml", "product_code":"cdm", "code":"127", "des":"CDM supports incremental migration of file systems. After full migration is complete, all new files or only specified directories or files can be exported.Currently, CDM ", "doc_type":"usermanual", "kw":"Incremental File Migration,Incremental Migration,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Incremental File Migration", "githuburl":"" }, { "uri":"dataartsstudio_01_0113.html", + "node_id":"dataartsstudio_01_0113.xml", "product_code":"cdm", "code":"128", "des":"CDM supports incremental migration of relational databases. After a full migration is complete, data in a specified period can be incrementally migrated. For example, dat", "doc_type":"usermanual", "kw":"Incremental Migration of Relational Databases,Incremental Migration,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Incremental Migration of Relational Databases", "githuburl":"" }, { "uri":"dataartsstudio_01_0114.html", + "node_id":"dataartsstudio_01_0114.xml", "product_code":"cdm", "code":"129", "des":"During the creation of table/file migration jobs, CDM supports the macro variables of date and time in the following parameters of the source and destination links:Source", "doc_type":"usermanual", "kw":"Using Macro Variables of Date and Time,Incremental Migration,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Using Macro Variables of Date and Time", "githuburl":"" }, { "uri":"dataartsstudio_01_0115.html", + "node_id":"dataartsstudio_01_0115.xml", "product_code":"cdm", "code":"130", "des":"You can use CDM to export data in a specified period of time from HBase (including MRS HBase, FusionInsight HBase, and Apache HBase) and CloudTable. The CDM scheduled job", "doc_type":"usermanual", "kw":"HBase/CloudTable Incremental Migration,Incremental Migration,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"HBase/CloudTable Incremental Migration", "githuburl":"" }, { "uri":"dataartsstudio_01_0116.html", + "node_id":"dataartsstudio_01_0116.xml", "product_code":"cdm", "code":"131", "des":"When a CDM job fails to be executed, CDM rolls back the data to the state before the job starts and automatically deletes data from the destination table.Parameter positi", "doc_type":"usermanual", "kw":"Migration in Transaction Mode,Advanced Operations,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Migration in Transaction Mode", "githuburl":"" }, { "uri":"dataartsstudio_01_0117.html", + "node_id":"dataartsstudio_01_0117.xml", "product_code":"cdm", "code":"132", "des":"When you migrate files to a file system, CDM can encrypt and decrypt those files. Currently, CDM supports the following encryption modes:AES-256-GCMKMS EncryptionAES-256-", "doc_type":"usermanual", "kw":"Encryption and Decryption During File Migration,Advanced Operations,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Encryption and Decryption During File Migration", "githuburl":"" }, { "uri":"dataartsstudio_01_0118.html", + "node_id":"dataartsstudio_01_0118.xml", "product_code":"cdm", "code":"133", "des":"CDM extracts data from the migration source and writes the data to the migration destination. Figure 1 shows the migration mode when files are migrated to OBS.During the ", "doc_type":"usermanual", "kw":"MD5 Verification,Advanced Operations,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"MD5 Verification", "githuburl":"" }, { "uri":"dataartsstudio_01_0104.html", + "node_id":"dataartsstudio_01_0104.xml", "product_code":"cdm", "code":"134", "des":"You can create a field converter on the Map Field page when creating a table/file migration job.Creating a field converterField mapping is not involved when the binary fo", "doc_type":"usermanual", "kw":"Field Conversion,Advanced Operations,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Field Conversion", "githuburl":"" }, { "uri":"dataartsstudio_01_0105.html", + "node_id":"dataartsstudio_01_0105.xml", "product_code":"cdm", "code":"135", "des":"You can migrate files (a maximum of 50) with specified names from FTP, SFTP, or OBS at a time. The exported files can only be written to the same directory on the migrati", "doc_type":"usermanual", "kw":"Migrating Files with Specified Names,Advanced Operations,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Migrating Files with Specified Names", "githuburl":"" }, { "uri":"dataartsstudio_01_0106.html", + "node_id":"dataartsstudio_01_0106.xml", "product_code":"cdm", "code":"136", "des":"During table/file migration, CDM uses delimiters to separate fields in CSV files. However, delimiters cannot be used in complex semi-structured data because the field val", "doc_type":"usermanual", "kw":"Regular Expressions for Separating Semi-structured Text,Advanced Operations,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Regular Expressions for Separating Semi-structured Text", "githuburl":"" }, { "uri":"dataartsstudio_01_0109.html", + "node_id":"dataartsstudio_01_0109.xml", "product_code":"", "code":"137", "des":"When you create a job on the CDM console to migrate tables or files of a relational database, you can add a field to record the time when they were written to the databas", "doc_type":"", "kw":"Recording the Time When Data Is Written to the Database,Advanced Operations,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Recording the Time When Data Is Written to the Database", "githuburl":"" }, { "uri":"dataartsstudio_01_0108.html", + "node_id":"dataartsstudio_01_0108.xml", "product_code":"cdm", "code":"138", "des":"When creating a CDM job, you need to specify File Format in the job parameters of the migration source and destination in some scenarios. This section describes the appli", "doc_type":"usermanual", "kw":"File Formats,Advanced Operations,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"File Formats", "githuburl":"" }, { "uri":"dataartsstudio_01_0400.html", + "node_id":"dataartsstudio_01_0400.xml", "product_code":"dgc", "code":"139", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"DataArts Factory", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"DataArts Factory", "githuburl":"" }, { "uri":"dataartsstudio_01_0401.html", + "node_id":"dataartsstudio_01_0401.xml", "product_code":"dgc", "code":"140", "des":"DataArts Factory is a one-stop big data collaborative development platform that provides fully managed big data scheduling capabilities. It manages various big data servi", "doc_type":"usermanual", "kw":"Overview,DataArts Factory,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Overview", "githuburl":"" }, { "uri":"dataartsstudio_01_0403.html", + "node_id":"dataartsstudio_01_0403.xml", "product_code":"dgc", "code":"141", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Data Management", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Data Management", "githuburl":"" }, { "uri":"dataartsstudio_01_0402.html", + "node_id":"dataartsstudio_01_0402.xml", "product_code":"dgc", "code":"142", "des":"The data management function helps you quickly establish data models and provides you with data entities for script and job development. With data management, you can:Man", "doc_type":"usermanual", "kw":"Data Management Process,Data Management,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Data Management Process", "githuburl":"" }, { "uri":"dataartsstudio_01_0404.html", + "node_id":"dataartsstudio_01_0404.xml", "product_code":"dgc", "code":"143", "des":"After a data connection is created, you can perform data operations on DataArts Factory, for example, managing databases, namespaces, database schema, and tables.With one", "doc_type":"usermanual", "kw":"Creating a Data Connection,Data Management,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Creating a Data Connection", "githuburl":"" }, { "uri":"dataartsstudio_01_0405.html", + "node_id":"dataartsstudio_01_0405.xml", "product_code":"dgc", "code":"144", "des":"After creating a data connection, you can create a database on the console or using a SQL script.(Recommended) Console: You can directly create a database on the DataArts", "doc_type":"usermanual", "kw":"Creating a Database,Data Management,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Creating a Database", "githuburl":"" }, { "uri":"dataartsstudio_01_0412.html", + "node_id":"dataartsstudio_01_0412.xml", "product_code":"dgc", "code":"145", "des":"After creating a DWS data connection, you can manage the database schemas under the DWS data connection.A DWS data connection has been created. For details, see Creating ", "doc_type":"usermanual", "kw":"(Optional) Creating a Database Schema,Data Management,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"(Optional) Creating a Database Schema", "githuburl":"" }, { "uri":"dataartsstudio_01_0416.html", + "node_id":"dataartsstudio_01_0416.xml", "product_code":"dgc", "code":"146", "des":"You can create a table on the DataArts Factory console, in DDL mode, or using a SQL script.(Recommended) Console: You can directly create a table on the DataArts Studio D", "doc_type":"usermanual", "kw":"Creating a Table,Data Management,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Creating a Table", "githuburl":"" }, { "uri":"dataartsstudio_01_0421.html", + "node_id":"dataartsstudio_01_0421.xml", "product_code":"dgc", "code":"147", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Script Development", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Script Development", "githuburl":"" }, { "uri":"dataartsstudio_01_0422.html", + "node_id":"dataartsstudio_01_0422.xml", "product_code":"dgc", "code":"148", "des":"The script development function provides the following capabilities:Provides an online script editor for developing and debugging SQL, Python, and Shell scripts.Supports ", "doc_type":"usermanual", "kw":"Script Development Process,Script Development,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Script Development Process", "githuburl":"" }, { "uri":"dataartsstudio_01_0423.html", + "node_id":"dataartsstudio_01_0423.xml", "product_code":"dgc", "code":"149", "des":"DataArts Factory allows you to edit, debug, and run scripts online. You must create a script before developing it.Currently, you can create the following types of scripts", "doc_type":"usermanual", "kw":"Creating a Script,Script Development,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Creating a Script", "githuburl":"" }, { "uri":"dataartsstudio_01_0406.html", + "node_id":"dataartsstudio_01_0406.xml", "product_code":"dgc", "code":"150", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Developing Scripts", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Developing Scripts", "githuburl":"" }, { "uri":"dataartsstudio_01_0424.html", + "node_id":"dataartsstudio_01_0424.xml", "product_code":"dgc", "code":"151", "des":"You can develop, debug, and run SQL scripts online. The developed scripts can be run in jobs. For details, see Developing a Job.A corresponding cloud service has been ena", "doc_type":"usermanual", "kw":"Developing an SQL Script,Developing Scripts,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Developing an SQL Script", "githuburl":"" }, { "uri":"dataartsstudio_01_0425.html", + "node_id":"dataartsstudio_01_0425.xml", "product_code":"dgc", "code":"152", "des":"You can develop, debug, and run shell scripts online. The developed scripts can be run in jobs. For details, see Developing a Job.A shell script has been added. For detai", "doc_type":"usermanual", "kw":"Developing a Shell Script,Developing Scripts,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Developing a Shell Script", "githuburl":"" }, { "uri":"dataartsstudio_01_4503.html", + "node_id":"dataartsstudio_01_4503.xml", "product_code":"dgc", "code":"153", "des":"You can develop, debug, and run Python scripts online. The developed scripts can be run in jobs. For details, see Developing a Job.A Python script has been added. For det", "doc_type":"usermanual", "kw":"Developing a Python Script,Developing Scripts,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Developing a Python Script", "githuburl":"" }, { "uri":"dataartsstudio_01_0901.html", + "node_id":"dataartsstudio_01_0901.xml", "product_code":"dgc", "code":"154", "des":"This involves the version management and lock functions.Version management: traces script and job changes, and supports version comparison and rollback. The system retain", "doc_type":"usermanual", "kw":"Submitting a Version and Unlocking the Script,Script Development,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Submitting a Version and Unlocking the Script", "githuburl":"" }, { "uri":"dataartsstudio_01_0407.html", + "node_id":"dataartsstudio_01_0407.xml", "product_code":"dgc", "code":"155", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"(Optional) Managing Scripts", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"(Optional) Managing Scripts", "githuburl":"" }, { "uri":"dataartsstudio_01_0430.html", + "node_id":"dataartsstudio_01_0430.xml", "product_code":"dgc", "code":"156", "des":"This section describes how to copy a script.A script has been developed. For details about how to develop scripts, see Developing Scripts.Log in to the DataArts Studio co", "doc_type":"usermanual", "kw":"Copying a Script,(Optional) Managing Scripts,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Copying a Script", "githuburl":"" }, { "uri":"dataartsstudio_01_0426.html", + "node_id":"dataartsstudio_01_0426.xml", "product_code":"dgc", "code":"157", "des":"You can copy the name of a script and rename a script.A script has been developed. For details about how to develop scripts, see Developing Scripts.Log in to the DataArts", "doc_type":"usermanual", "kw":"Copying the Script Name and Renaming a Script,(Optional) Managing Scripts,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Copying the Script Name and Renaming a Script", "githuburl":"" }, { "uri":"dataartsstudio_01_0427.html", + "node_id":"dataartsstudio_01_0427.xml", "product_code":"dgc", "code":"158", "des":"You can move a script file from one directory to another or move a script directory to another directory.A script has been developed. For details about how to develop scr", "doc_type":"usermanual", "kw":"Moving a Script or Script Directory,(Optional) Managing Scripts,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Moving a Script or Script Directory", "githuburl":"" }, { "uri":"dataartsstudio_01_0428.html", + "node_id":"dataartsstudio_01_0428.xml", "product_code":"dgc", "code":"159", "des":"You can export one or more script files from the script directory. The exported files store the latest content in the development state.Click in the script directory and", "doc_type":"usermanual", "kw":"Exporting and Importing a Script,(Optional) Managing Scripts,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Exporting and Importing a Script", "githuburl":"" }, { "uri":"dataartsstudio_01_0471.html", + "node_id":"dataartsstudio_01_0471.xml", "product_code":"dgc", "code":"160", "des":"This section describes how to view the references of a script or all the scripts in a folder.A script has been developed. For details about how to develop scripts, see De", "doc_type":"usermanual", "kw":"Viewing Script References,(Optional) Managing Scripts,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Viewing Script References", "githuburl":"" }, { "uri":"dataartsstudio_01_0429.html", + "node_id":"dataartsstudio_01_0429.xml", "product_code":"dgc", "code":"161", "des":"If you do not need to use a script any more, perform the following operations to delete it.When you delete a script, the system checks whether the script is being referen", "doc_type":"usermanual", "kw":"Deleting a Script,(Optional) Managing Scripts,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Deleting a Script", "githuburl":"" }, { "uri":"dataartsstudio_01_1102.html", + "node_id":"dataartsstudio_01_1102.xml", "product_code":"", "code":"162", "des":"DataArts Factory allows you to change the owner for scripts with a few clicks.Log in to the DataArts Studio console. Locate an instance and click Access. On the displayed", "doc_type":"", "kw":"Changing the Script Owner,(Optional) Managing Scripts,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Changing the Script Owner", "githuburl":"" }, { "uri":"dataartsstudio_01_1107.html", + "node_id":"dataartsstudio_01_1107.xml", "product_code":"", "code":"163", "des":"This section describes how to unlock scripts in batches.Log in to the DataArts Studio console. Locate an instance and click Access. On the displayed page, locate a worksp", "doc_type":"", "kw":"Unlocking Scripts,(Optional) Managing Scripts,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Unlocking Scripts", "githuburl":"" }, { "uri":"dataartsstudio_01_0431.html", + "node_id":"dataartsstudio_01_0431.xml", "product_code":"dgc", "code":"164", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Job Development", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Job Development", "githuburl":"" }, { "uri":"dataartsstudio_01_0432.html", + "node_id":"dataartsstudio_01_0432.xml", "product_code":"dgc", "code":"165", "des":"The job development function provides the following capabilities:Provides a graphical designer that allows you to quickly build a data processing workflow by drag-and-dro", "doc_type":"usermanual", "kw":"Job Development Process,Job Development,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Job Development Process", "githuburl":"" }, { "uri":"dataartsstudio_01_0434.html", + "node_id":"dataartsstudio_01_0434.xml", "product_code":"dgc", "code":"166", "des":"A job is composed of one or more nodes that are performed collaboratively to complete data operations. Before developing a job, create a new one.Each workspace can hold a", "doc_type":"usermanual", "kw":"Creating a Job,Job Development,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Creating a Job", "githuburl":"" }, { "uri":"dataartsstudio_01_0435.html", + "node_id":"dataartsstudio_01_0435.xml", "product_code":"dgc", "code":"167", "des":"This section describes how to develop and configure a job.You have created a job. For details about how to create a job, see Creating a Job.You have locked the job. Other", "doc_type":"usermanual", "kw":"Developing a Job,Job Development,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Developing a Job", "githuburl":"" }, { "uri":"dataartsstudio_01_0470.html", + "node_id":"dataartsstudio_01_0470.xml", "product_code":"dgc", "code":"168", "des":"This section describes how to set up scheduling for an orchestrated job.If the processing mode of a job is batch processing, configure scheduling types for jobs. Three sc", "doc_type":"usermanual", "kw":"Setting Up Scheduling for a Job,Job Development,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Setting Up Scheduling for a Job", "githuburl":"" }, { "uri":"dataartsstudio_01_0902.html", + "node_id":"dataartsstudio_01_0902.xml", "product_code":"dgc", "code":"169", "des":"This involves the version management and lock functions.Version management: traces script and job changes, and supports version comparison and rollback. The system retain", "doc_type":"usermanual", "kw":"Submitting a Version and Unlocking the Script,Job Development,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Submitting a Version and Unlocking the Script", "githuburl":"" }, { "uri":"dataartsstudio_01_0408.html", + "node_id":"dataartsstudio_01_0408.xml", "product_code":"dgc", "code":"170", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"(Optional) Managing Jobs", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"(Optional) Managing Jobs", "githuburl":"" }, { "uri":"dataartsstudio_01_0440.html", + "node_id":"dataartsstudio_01_0440.xml", "product_code":"dgc", "code":"171", "des":"This section describes how to copy a job.A job has been developed. For details about how to develop a job, see Developing a Job.Log in to the DataArts Studio console. Loc", "doc_type":"usermanual", "kw":"Copying a Job,(Optional) Managing Jobs,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Copying a Job", "githuburl":"" }, { "uri":"dataartsstudio_01_0436.html", + "node_id":"dataartsstudio_01_0436.xml", "product_code":"dgc", "code":"172", "des":"You can copy the name of a job and rename a job.A job has been developed. For details about how to develop a job, see Developing a Job.Log in to the DataArts Studio conso", "doc_type":"usermanual", "kw":"Copying the Job Name and Renaming a Job,(Optional) Managing Jobs,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Copying the Job Name and Renaming a Job", "githuburl":"" }, { "uri":"dataartsstudio_01_0437.html", + "node_id":"dataartsstudio_01_0437.xml", "product_code":"dgc", "code":"173", "des":"You can move a job file from one directory to another or move a job directory to another directory.A job has been developed. For details about how to develop a job, see D", "doc_type":"usermanual", "kw":"Moving a Job or Job Directory,(Optional) Managing Jobs,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Moving a Job or Job Directory", "githuburl":"" }, { "uri":"dataartsstudio_01_0438.html", + "node_id":"dataartsstudio_01_0438.xml", "product_code":"dgc", "code":"174", "des":"Exporting a job is to export the latest saved content in the development state.After a job is imported, the content in the development state is overwritten and a new vers", "doc_type":"usermanual", "kw":"Exporting and Importing a Job,(Optional) Managing Jobs,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Exporting and Importing a Job", "githuburl":"" }, { "uri":"dataartsstudio_01_0439.html", + "node_id":"dataartsstudio_01_0439.xml", "product_code":"dgc", "code":"175", "des":"If you do not need to use a job any more, perform the following operations to delete it to reduce the quota usage of the job.Deleted jobs cannot be recovered. Exercise ca", "doc_type":"usermanual", "kw":"Deleting a Job,(Optional) Managing Jobs,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Deleting a Job", "githuburl":"" }, { "uri":"dataartsstudio_01_1101.html", + "node_id":"dataartsstudio_01_1101.xml", "product_code":"", "code":"176", "des":"DataArts Factory allows you to change the owner for jobs with a few clicks.Log in to the DataArts Studio console. Locate an instance and click Access. On the displayed pa", "doc_type":"", "kw":"Changing the Job Owner,(Optional) Managing Jobs,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Changing the Job Owner", "githuburl":"" }, { "uri":"dataartsstudio_01_1108.html", + "node_id":"dataartsstudio_01_1108.xml", "product_code":"", "code":"177", "des":"This section describes how to unlock jobs in batches.Log in to the DataArts Studio console. Locate an instance and click Access. On the displayed page, locate a workspace", "doc_type":"", "kw":"Unlocking Jobs,(Optional) Managing Jobs,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Unlocking Jobs", "githuburl":"" }, { "uri":"dataartsstudio_01_0503.html", + "node_id":"dataartsstudio_01_0503.xml", "product_code":"dgc", "code":"178", "des":"The solution aims to provide users with convenient and systematic management operations and better meet service requirements and objectives. Each solution can contain one", "doc_type":"usermanual", "kw":"Solution,DataArts Factory,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Solution", "githuburl":"" }, { "uri":"dataartsstudio_01_1105.html", + "node_id":"dataartsstudio_01_1105.xml", "product_code":"", "code":"179", "des":"This section describes how to view the execution history of scripts, jobs, and nodes over a week.This function depends on OBS buckets. For details about how to configure ", "doc_type":"", "kw":"Execution History,DataArts Factory,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Execution History", "githuburl":"" }, { "uri":"dataartsstudio_01_0505.html", + "node_id":"dataartsstudio_01_0505.xml", "product_code":"dgc", "code":"180", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"O&M and Scheduling", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"O&M and Scheduling", "githuburl":"" }, { "uri":"dataartsstudio_01_0506.html", + "node_id":"dataartsstudio_01_0506.xml", "product_code":"dgc", "code":"181", "des":"Choose Monitoring > Overview. On the Overview page, you can view the statistics of job instances in charts. Currently, you can view four types of statistics:Today's Job I", "doc_type":"usermanual", "kw":"Overview,O&M and Scheduling,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Overview", "githuburl":"" }, { "uri":"dataartsstudio_01_0413.html", + "node_id":"dataartsstudio_01_0413.xml", "product_code":"dgc", "code":"182", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Monitoring a Job", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Monitoring a Job", "githuburl":"" }, { "uri":"dataartsstudio_01_0508.html", + "node_id":"dataartsstudio_01_0508.xml", "product_code":"dgc", "code":"183", "des":"In the batch processing mode, data is processed periodically in batches based on the job-level scheduling plan, which is used in scenarios with low real-time requirements", "doc_type":"usermanual", "kw":"Monitoring a Batch Job,Monitoring a Job,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Monitoring a Batch Job", "githuburl":"" }, { "uri":"dataartsstudio_01_0509.html", + "node_id":"dataartsstudio_01_0509.xml", "product_code":"dgc", "code":"184", "des":"In the real-time processing mode, data is processed in real time, which is used in scenarios with high real-time performance. This type of job is a pipeline that consists", "doc_type":"usermanual", "kw":"Monitoring a Real-Time Job,Monitoring a Job,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Monitoring a Real-Time Job", "githuburl":"" }, { "uri":"dataartsstudio_01_0511.html", + "node_id":"dataartsstudio_01_0511.xml", "product_code":"dgc", "code":"185", "des":"Each time a job is executed, a job instance record is generated. In the navigation pane of the DataArts Factory console, choose Monitoring. On the Monitor Instance page, ", "doc_type":"usermanual", "kw":"Monitoring an Instance,O&M and Scheduling,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Monitoring an Instance", "githuburl":"" }, { "uri":"dataartsstudio_01_0512.html", + "node_id":"dataartsstudio_01_0512.xml", "product_code":"dgc", "code":"186", "des":"In the navigation tree of the DataArts Factory console, choose MonitoringMonitor PatchData.On the PatchData Monitoring page, you can view the task status, service date, n", "doc_type":"usermanual", "kw":"Monitoring PatchData,O&M and Scheduling,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Monitoring PatchData", "githuburl":"" }, { "uri":"dataartsstudio_01_0414.html", + "node_id":"dataartsstudio_01_0414.xml", "product_code":"dgc", "code":"187", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Managing Notifications", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Managing Notifications", "githuburl":"" }, { "uri":"dataartsstudio_01_0514.html", + "node_id":"dataartsstudio_01_0514.xml", "product_code":"dgc", "code":"188", "des":"You can configure DLF to notify you of job success after it is performed.Before configuring a notification for a job:Message notification has been enabled and a topic has", "doc_type":"usermanual", "kw":"Managing a Notification,Managing Notifications,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Managing a Notification", "githuburl":"" }, { "uri":"dataartsstudio_01_0515.html", + "node_id":"dataartsstudio_01_0515.xml", "product_code":"dgc", "code":"189", "des":"Notifications can be set to specified personnel by day, week, or month, allowing related personnel to regularly understand job scheduling information about the quantity o", "doc_type":"usermanual", "kw":"Cycle Overview,Managing Notifications,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Cycle Overview", "githuburl":"" }, { "uri":"dataartsstudio_01_0516.html", + "node_id":"dataartsstudio_01_0516.xml", "product_code":"dgc", "code":"190", "des":"You can back up all jobs, scripts, resources, and environment variables on a daily basis.You can also restore assets that have been backed up, including jobs, scripts, re", "doc_type":"usermanual", "kw":"Managing Backups,O&M and Scheduling,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Managing Backups", "githuburl":"" }, { "uri":"dataartsstudio_01_0517.html", + "node_id":"dataartsstudio_01_0517.xml", "product_code":"dgc", "code":"191", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Configuration and Management", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Configuration and Management", "githuburl":"" }, { "uri":"dataartsstudio_01_0510.html", + "node_id":"dataartsstudio_01_0510.xml", "product_code":"dgc", "code":"192", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Configuring Resources", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Configuring Resources", "githuburl":"" }, { "uri":"dataartsstudio_01_0504.html", + "node_id":"dataartsstudio_01_0504.xml", "product_code":"dgc", "code":"193", "des":"This topic describes how to configure and use environment variables.Configure job parameters. If a parameter belongs to multiple jobs, you can extract this parameter as a", "doc_type":"usermanual", "kw":"Configuring Environment Variables,Configuring Resources,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Configuring Environment Variables", "githuburl":"" }, { "uri":"dataartsstudio_01_1106.html", + "node_id":"dataartsstudio_01_1106.xml", "product_code":"", "code":"194", "des":"The execution history of scripts, jobs, and nodes is stored in OBS buckets. If no OBS bucket is available, you cannot view the execution history. This section describes h", "doc_type":"", "kw":"Configuring an OBS Bucket,Configuring Resources,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Configuring an OBS Bucket", "githuburl":"" }, { "uri":"dataartsstudio_01_0532.html", + "node_id":"dataartsstudio_01_0532.xml", "product_code":"dgc", "code":"195", "des":"Job labels are used to label jobs of the same or similar purposes to facilitate job management and query. This section describes how to manage job labels, including addin", "doc_type":"usermanual", "kw":"Managing Job Labels,Configuring Resources,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Managing Job Labels", "githuburl":"" }, { "uri":"dataartsstudio_01_0555.html", + "node_id":"dataartsstudio_01_0555.xml", "product_code":"dgc", "code":"196", "des":"The following problems may occur during job execution in DataArts Factory:The job execution mechanism of the DataArts Factory module is to execute the job as the user who", "doc_type":"usermanual", "kw":"Configuring Agencies,Configuring Resources,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Configuring Agencies", "githuburl":"" }, { "uri":"dataartsstudio_01_04501.html", + "node_id":"dataartsstudio_01_04501.xml", "product_code":"", "code":"197", "des":"This section describes how to configure a default item.If a parameter is invoked by multiple jobs, you can use this parameter as the default configuration item. In this w", "doc_type":"", "kw":"Configuring a Default Item,Configuring Resources,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Configuring a Default Item", "githuburl":"" }, { "uri":"dataartsstudio_01_0519.html", + "node_id":"dataartsstudio_01_0519.xml", "product_code":"dgc", "code":"198", "des":"You can upload custom code or text files as resources on Manage Resource and schedule them when running nodes. Nodes that can invoke resources include DLI Spark, MRS Spar", "doc_type":"usermanual", "kw":"Managing Resources,Configuration and Management,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Managing Resources", "githuburl":"" }, { "uri":"dataartsstudio_01_0441.html", + "node_id":"dataartsstudio_01_0441.xml", "product_code":"dgc", "code":"199", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Node Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Node Reference", "githuburl":"" }, { "uri":"dataartsstudio_01_0442.html", + "node_id":"dataartsstudio_01_0442.xml", "product_code":"dgc", "code":"200", "des":"A node defines the operations performed on data. DataArts Factory provides nodes used for data integration, computing and analysis, database operations, and resource mana", "doc_type":"usermanual", "kw":"Node Overview,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Node Overview", "githuburl":"" }, { "uri":"dataartsstudio_01_0443.html", + "node_id":"dataartsstudio_01_0443.xml", "product_code":"dgc", "code":"201", "des":"The CDM Job node is used to run a predefined CDM job for data migration.Table 1, Table 2, and Table 3 describe the parameters of the CDM Job node. Configure the lineage t", "doc_type":"usermanual", "kw":"CDM Job,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"CDM Job", "githuburl":"" }, { "uri":"dataartsstudio_01_0447.html", + "node_id":"dataartsstudio_01_0447.xml", "product_code":"dgc", "code":"202", "des":"The Rest Client node is used to respond to RESTful requests in . Only the RESTful requests that have been authenticated by using IAM tokens are supported.If some APIs of ", "doc_type":"usermanual", "kw":"Rest Client,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Rest Client", "githuburl":"" }, { "uri":"dataartsstudio_01_0448.html", + "node_id":"dataartsstudio_01_0448.xml", "product_code":"dgc", "code":"203", "des":"The Import GES node is used to import files from an OBS bucket to a GES graph.Table 1 and Table 2 describe the parameters of the Import GES node.", "doc_type":"usermanual", "kw":"Import GES,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Import GES", "githuburl":"" }, { "uri":"dataartsstudio_01_0537.html", + "node_id":"dataartsstudio_01_0537.xml", "product_code":"dgc", "code":"204", "des":"The MRS Kafka node is used to query the number of messages that are not consumed by a topic.Table 1 and Table 2 describe the parameters of the MRS Kafka node.", "doc_type":"usermanual", "kw":"MRS Kafka,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"MRS Kafka", "githuburl":"" }, { "uri":"dataartsstudio_01_0538.html", + "node_id":"dataartsstudio_01_0538.xml", "product_code":"dgc", "code":"205", "des":"The Kafka Client node is used to send data to Kafka topics.Table 1 describes the parameters of the Kafka Client node.", "doc_type":"usermanual", "kw":"Kafka Client,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Kafka Client", "githuburl":"" }, { "uri":"dataartsstudio_01_1098.html", + "node_id":"dataartsstudio_01_1098.xml", "product_code":"dgc", "code":"206", "des":"The ROMA FDI Job node executes a predefined ROMA Connect data integration task to implement data integration and conversion between the source and destination.This node e", "doc_type":"usermanual", "kw":"ROMA FDI Job,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"ROMA FDI Job", "githuburl":"" }, { "uri":"dataartsstudio_01_0536.html", + "node_id":"dataartsstudio_01_0536.xml", "product_code":"dgc", "code":"207", "des":"The DLI Flink Job node is used to execute a predefined DLI job for real-time analysis of streaming data.This node enables you to start a DLI job or query whether a DLI jo", "doc_type":"usermanual", "kw":"DLI Flink Job,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"DLI Flink Job", "githuburl":"" }, { "uri":"dataartsstudio_01_0450.html", + "node_id":"dataartsstudio_01_0450.xml", "product_code":"dgc", "code":"208", "des":"The DLI SQL node is used to transfer SQL statements to DLI for data source analysis and exploration.This node enables you to execute DLI statements during periodical or r", "doc_type":"usermanual", "kw":"DLI SQL,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"DLI SQL", "githuburl":"" }, { "uri":"dataartsstudio_01_0451.html", + "node_id":"dataartsstudio_01_0451.xml", "product_code":"dgc", "code":"209", "des":"The DLI Spark node is used to execute a predefined Spark job.Table 1, Table 2, and Table 3 describe the parameters of the DLI Sparknode node.", "doc_type":"usermanual", "kw":"DLI Spark,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"DLI Spark", "githuburl":"" }, { "uri":"dataartsstudio_01_0452.html", + "node_id":"dataartsstudio_01_0452.xml", "product_code":"dgc", "code":"210", "des":"The DWS SQL node is used to transfer SQL statements to DWS.For details about how to use the DWS SQL operator, see Developing a DWS SQL Job.This node enables you to execut", "doc_type":"usermanual", "kw":"DWS SQL,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"DWS SQL", "githuburl":"" }, { "uri":"dataartsstudio_01_0453.html", + "node_id":"dataartsstudio_01_0453.xml", "product_code":"dgc", "code":"211", "des":"The MRS Spark SQL node is used to execute a predefined SparkSQL statement on MRS.Table 1, Table 2, and Table 3 describe the parameters of the MRS Spark SQLnode node.", "doc_type":"usermanual", "kw":"MRS Spark SQL,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"MRS Spark SQL", "githuburl":"" }, { "uri":"dataartsstudio_01_0454.html", + "node_id":"dataartsstudio_01_0454.xml", "product_code":"dgc", "code":"212", "des":"The MRS Hive SQL node is used to execute a predefined Hive SQL script on DLF.Table 1, Table 2, and Table 3 describe the parameters of the MRS Hive SQLnode node.", "doc_type":"usermanual", "kw":"MRS Hive SQL,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"MRS Hive SQL", "githuburl":"" }, { "uri":"dataartsstudio_01_1099.html", + "node_id":"dataartsstudio_01_1099.xml", "product_code":"dgc", "code":"213", "des":"The MRS Presto SQL node is used to execute the Presto SQL script predefined in DataArts Factory.Table 1, Table 2, and Table 3 describe the parameters of the MRS Presto SQ", "doc_type":"usermanual", "kw":"MRS Presto SQL,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"MRS Presto SQL", "githuburl":"" }, { "uri":"dataartsstudio_01_0455.html", + "node_id":"dataartsstudio_01_0455.xml", "product_code":"dgc", "code":"214", "des":"The MRS Spark node is used to execute a predefined Spark job on MRS.Table 1, Table 2, and Table 3 describe the parameters of the MRS Sparknode node.", "doc_type":"usermanual", "kw":"MRS Spark,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"MRS Spark", "githuburl":"" }, { "uri":"dataartsstudio_01_0456.html", + "node_id":"dataartsstudio_01_0456.xml", "product_code":"dgc", "code":"215", "des":"The MRS Spark Python node is used to execute a predefined Spark Python job on MRS.For details about how to use the MRS Spark Python operator, see Developing an MRS Spark ", "doc_type":"usermanual", "kw":"MRS Spark Python,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"MRS Spark Python", "githuburl":"" }, { "uri":"dataartsstudio_01_0554.html", + "node_id":"dataartsstudio_01_0554.xml", "product_code":"dgc", "code":"216", "des":"The MRS Flink node is used to execute predefined Flink jobs in MRS.Table 1 and Table 2 describe the parameters of the MRS Flink node.", "doc_type":"usermanual", "kw":"MRS Flink Job,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"MRS Flink Job", "githuburl":"" }, { "uri":"dataartsstudio_01_0457.html", + "node_id":"dataartsstudio_01_0457.xml", "product_code":"dgc", "code":"217", "des":"The MRS MapReduce node is used to execute a predefined MapReduce program on MRS.Table 1 and Table 2 describe the parameters of the MRS MapReduce node.", "doc_type":"usermanual", "kw":"MRS MapReduce,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"MRS MapReduce", "githuburl":"" }, { "uri":"dataartsstudio_01_0458.html", + "node_id":"dataartsstudio_01_0458.xml", "product_code":"dgc", "code":"218", "des":"The CSS node is used to process CSS requests and enable online distributed searching.Table 1 and Table 2 describe the parameters of the CSS node.", "doc_type":"usermanual", "kw":"CSS,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"CSS", "githuburl":"" }, { "uri":"dataartsstudio_01_0459.html", + "node_id":"dataartsstudio_01_0459.xml", "product_code":"dgc", "code":"219", "des":"The Shell node is used to execute a shell script.With EL expression #{Job.getNodeOutput()}, you can obtain the desired content (4000 characters at most and counted backwa", "doc_type":"usermanual", "kw":"Shell,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Shell", "githuburl":"" }, { "uri":"dataartsstudio_01_0460.html", + "node_id":"dataartsstudio_01_0460.xml", "product_code":"dgc", "code":"220", "des":"The RDS SQL node is used to transfer SQL statements to RDS.Table 1 and Table 2 describe the parameters of the RDS SQL node.", "doc_type":"usermanual", "kw":"RDS SQL,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"RDS SQL", "githuburl":"" }, { "uri":"dataartsstudio_01_0461.html", + "node_id":"dataartsstudio_01_0461.xml", "product_code":"dgc", "code":"221", "des":"The ETL Job node is used to extract data from a specified data source, preprocess the data, and import the data to the target data source.Table 1, Table 2, and Table 3 de", "doc_type":"usermanual", "kw":"ETL Job,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"ETL Job", "githuburl":"" }, { "uri":"dataartsstudio_01_4504.html", + "node_id":"dataartsstudio_01_4504.xml", "product_code":"dgc", "code":"222", "des":"The Python node is used to execute Python statements.Before using a Python node, ensure that the host connected to the node has an environment for executing Python script", "doc_type":"usermanual", "kw":"Python,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Python", "githuburl":"" }, { "uri":"dataartsstudio_01_0462.html", + "node_id":"dataartsstudio_01_0462.xml", "product_code":"dgc", "code":"223", "des":"This function depends on OBS.The Create OBS node is used to create buckets and directories on OBS.Table 1 and Table 2 describe the parameters of the Create OBS node.", "doc_type":"usermanual", "kw":"Create OBS,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Create OBS", "githuburl":"" }, { "uri":"dataartsstudio_01_0463.html", + "node_id":"dataartsstudio_01_0463.xml", "product_code":"dgc", "code":"224", "des":"This function depends on OBS.The Delete OBS node is used to delete a bucket or directory on OBS.Table 1 and Table 2 describe the parameters of the Delete OBS node.", "doc_type":"usermanual", "kw":"Delete OBS,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Delete OBS", "githuburl":"" }, { "uri":"dataartsstudio_01_0464.html", + "node_id":"dataartsstudio_01_0464.xml", "product_code":"dgc", "code":"225", "des":"This function depends on OBS.The OBS Manager node is used to move or copy files from an OBS bucket to a specified directory.Table 1, Table 2, and Table 3 describe the par", "doc_type":"usermanual", "kw":"OBS Manager,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"OBS Manager", "githuburl":"" }, { "uri":"dataartsstudio_01_0465.html", + "node_id":"dataartsstudio_01_0465.xml", "product_code":"dgc", "code":"226", "des":"You can use the Open/Close Resource node to enable or disable services as required.Table 1 and Table 2 describe the parameters of the Open/Close Resource node.", "doc_type":"usermanual", "kw":"Open/Close Resource,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Open/Close Resource", "githuburl":"" }, { "uri":"dataartsstudio_01_0472.html", + "node_id":"dataartsstudio_01_0472.xml", "product_code":"dgc", "code":"227", "des":"The Data Quality Monitor node is used to monitor the quality of running data.Table 1 and Table 2 describe the parameters of the Data Quality Monitor node.", "doc_type":"usermanual", "kw":"Data Quality Monitor,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Data Quality Monitor", "githuburl":"" }, { "uri":"dataartsstudio_01_0467.html", + "node_id":"dataartsstudio_01_0467.xml", "product_code":"dgc", "code":"228", "des":"The Subjob node is used to call the batch job that does not contain the subjob node.Table 1 and Table 2 describe the parameters of the Subjob node.", "doc_type":"usermanual", "kw":"Subjob,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Subjob", "githuburl":"" }, { "uri":"dataartsstudio_01_0535.html", + "node_id":"dataartsstudio_01_0535.xml", "product_code":"dgc", "code":"229", "des":"The For Each node specifies a subjob to be executed cyclically and assigns values to variables in a subjob with a dataset.Table 1 describes the parameters of the For Each", "doc_type":"usermanual", "kw":"For Each,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"For Each", "githuburl":"" }, { "uri":"dataartsstudio_01_0468.html", + "node_id":"dataartsstudio_01_0468.xml", "product_code":"dgc", "code":"230", "des":"The SMN node is used to send notifications to users.Table 1 and Table 2 describe the parameters of the SMN node.", "doc_type":"usermanual", "kw":"SMN,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"SMN", "githuburl":"" }, { "uri":"dataartsstudio_01_0469.html", + "node_id":"dataartsstudio_01_0469.xml", "product_code":"dgc", "code":"231", "des":"The Dummy node is empty and does not perform any operations. It is used to simplify the complex connection relationships of nodes. Figure 1 shows an example.Table 1 descr", "doc_type":"usermanual", "kw":"Dummy,Node Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Dummy", "githuburl":"" }, { "uri":"dataartsstudio_01_0493.html", + "node_id":"dataartsstudio_01_0493.xml", "product_code":"dgc", "code":"232", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"EL Expression Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"EL Expression Reference", "githuburl":"" }, { "uri":"dataartsstudio_01_0494.html", + "node_id":"dataartsstudio_01_0494.xml", "product_code":"dgc", "code":"233", "des":"Node parameter values in a DataArts Factory job can be dynamically generated based on the running environment by using Expression Language (EL). You can determine whether", "doc_type":"usermanual", "kw":"Expression Overview,EL Expression Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Expression Overview", "githuburl":"" }, { "uri":"dataartsstudio_01_0495.html", + "node_id":"dataartsstudio_01_0495.xml", "product_code":"dgc", "code":"234", "des":"EL supports most of the arithmetic and logic operators provided by Java.If variable a is empty, default is returned. If variable a is not empty, a itself is returned. The", "doc_type":"usermanual", "kw":"Basic Operators,EL Expression Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Basic Operators", "githuburl":"" }, { "uri":"dataartsstudio_01_0496.html", + "node_id":"dataartsstudio_01_0496.xml", "product_code":"dgc", "code":"235", "des":"The date and time in the EL expression can be displayed in a user-specified format. The date and time format is specified by the date and time mode character string. The ", "doc_type":"usermanual", "kw":"Date and Time Mode,EL Expression Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Date and Time Mode", "githuburl":"" }, { "uri":"dataartsstudio_01_0497.html", + "node_id":"dataartsstudio_01_0497.xml", "product_code":"dgc", "code":"236", "des":"An Env embedded object provides a method of obtaining an environment variable value.The EL expression used to obtain the value of environment variable test is as follows:", "doc_type":"usermanual", "kw":"Env Embedded Objects,EL Expression Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Env Embedded Objects", "githuburl":"" }, { "uri":"dataartsstudio_01_0498.html", + "node_id":"dataartsstudio_01_0498.xml", "product_code":"dgc", "code":"237", "des":"A job object provides properties and methods of obtaining the output message, job scheduling plan time, and job execution time of the previous node in a job.The expressio", "doc_type":"usermanual", "kw":"Job Embedded Objects,EL Expression Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Job Embedded Objects", "githuburl":"" }, { "uri":"dataartsstudio_01_0499.html", + "node_id":"dataartsstudio_01_0499.xml", "product_code":"dgc", "code":"238", "des":"A StringUtil embedded object provides methods of operating character strings, for example, truncating a substring from a character string.StringUtil is implemented throug", "doc_type":"usermanual", "kw":"StringUtil Embedded Objects,EL Expression Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"StringUtil Embedded Objects", "githuburl":"" }, { "uri":"dataartsstudio_01_0500.html", + "node_id":"dataartsstudio_01_0500.xml", "product_code":"dgc", "code":"239", "des":"A DateUtil embedded object provides methods of formatting time and calculating time.The previous day of the job scheduling plan time is used as the subdirectory name to g", "doc_type":"usermanual", "kw":"DateUtil Embedded Objects,EL Expression Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"DateUtil Embedded Objects", "githuburl":"" }, { "uri":"dataartsstudio_01_0501.html", + "node_id":"dataartsstudio_01_0501.xml", "product_code":"dgc", "code":"240", "des":"A JSONUtil embedded object provides JSON object methods.The content of variable str is as follows:The expression for obtaining the area code of city1 is as follows:", "doc_type":"usermanual", "kw":"JSONUtil Embedded Objects,EL Expression Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"JSONUtil Embedded Objects", "githuburl":"" }, { "uri":"dataartsstudio_01_0534.html", + "node_id":"dataartsstudio_01_0534.xml", "product_code":"dgc", "code":"241", "des":"You can use Loop embedded objects to obtain data from the For Each dataset.The EL expression for the Foreach operator to cyclically obtain the first column of the output ", "doc_type":"usermanual", "kw":"Loop Embedded Objects,EL Expression Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Loop Embedded Objects", "githuburl":"" }, { "uri":"dataartsstudio_01_0553.html", + "node_id":"dataartsstudio_01_0553.xml", "product_code":"dgc", "code":"242", "des":"The OBSUtil embedded objects provide a series of OBS operation methods, for example, checking whether an OBS file or directory exists.The following is the EL expression f", "doc_type":"usermanual", "kw":"OBSUtil Embedded Objects,EL Expression Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"OBSUtil Embedded Objects", "githuburl":"" }, { "uri":"dataartsstudio_01_0502.html", + "node_id":"dataartsstudio_01_0502.xml", "product_code":"dgc", "code":"243", "des":"With this example, you can understand how to use EL expressions in the following applications:Using variables in the SQL script of DataArts FactoryTransferring parameters", "doc_type":"usermanual", "kw":"Expression Use Example,EL Expression Reference,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Expression Use Example", "githuburl":"" }, { "uri":"dataartsstudio_01_0520.html", + "node_id":"dataartsstudio_01_0520.xml", "product_code":"dgc", "code":"244", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Usage Guidance", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Usage Guidance", "githuburl":"" }, { "uri":"dataartsstudio_01_0580.html", + "node_id":"dataartsstudio_01_0580.xml", "product_code":"", "code":"245", "des":"You can set a job that meets the scheduling period conditions as the dependency jobs for a job that is scheduled periodically. For details about how to set a dependency j", "doc_type":"", "kw":"Job Dependency,Usage Guidance,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Job Dependency", "githuburl":"" }, { "uri":"dataartsstudio_01_0583.html", + "node_id":"dataartsstudio_01_0583.xml", "product_code":"dgc", "code":"246", "des":"When developing and orchestrating jobs in DataArts Factory, you can use IF statements to determine the branch to execute.This section describes how to use IF statements i", "doc_type":"usermanual", "kw":"IF Statements,Usage Guidance,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"IF Statements", "githuburl":"" }, { "uri":"dataartsstudio_01_0581.html", + "node_id":"dataartsstudio_01_0581.xml", "product_code":"", "code":"247", "des":"The Rest Client node can execute RESTful requests.This tutorial describes how to obtain the return value of the Rest Client node, covering the following two application s", "doc_type":"", "kw":"Obtaining the Return Value of a Rest Client Node,Usage Guidance,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Obtaining the Return Value of a Rest Client Node", "githuburl":"" }, { "uri":"dataartsstudio_01_0582.html", + "node_id":"dataartsstudio_01_0582.xml", "product_code":"dgc", "code":"248", "des":"During job development, if some jobs have different parameters but the same processing logic, you can use For Each nodes to avoid repeated job development.You can use a F", "doc_type":"usermanual", "kw":"Using For Each Nodes,Usage Guidance,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Using For Each Nodes", "githuburl":"" }, { "uri":"dataartsstudio_01_0529.html", + "node_id":"dataartsstudio_01_0529.xml", "product_code":"", "code":"249", "des":"This section describes how to develop and execute a Python script using DataArts Factory.An ECS named ecs-dgc has been created.In this example, the ECS uses the CentOS 8.", "doc_type":"", "kw":"Developing a Python Script,Usage Guidance,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Developing a Python Script", "githuburl":"" }, { "uri":"dataartsstudio_01_0524.html", + "node_id":"dataartsstudio_01_0524.xml", "product_code":"", "code":"250", "des":"This section describes how to use the DWS SQL operator to develop a job on DataArts Factory.This tutorial describes how to develop a DWS job to collect the sales volume o", "doc_type":"", "kw":"Developing a DWS SQL Job,Usage Guidance,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Developing a DWS SQL Job", "githuburl":"" }, { "uri":"dataartsstudio_01_0522.html", + "node_id":"dataartsstudio_01_0522.xml", "product_code":"dgc", "code":"251", "des":"This section introduces how to develop Hive SQL scripts on DataArts Factory.As a one-stop big data development platform, DataArts Factory supports development of multiple", "doc_type":"usermanual", "kw":"Developing a Hive SQL Job,Usage Guidance,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Developing a Hive SQL Job", "githuburl":"" }, { "uri":"dataartsstudio_01_0521.html", + "node_id":"dataartsstudio_01_0521.xml", "product_code":"dgc", "code":"252", "des":"This section introduces how to develop a DLI Spark job on DataArts Factory.In most cases, SQL is used to analyze and process data when using Data Lake Insight (DLI). Howe", "doc_type":"usermanual", "kw":"Developing a DLI Spark Job,Usage Guidance,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "opensource":"true", + "IsBot":"yes", + "IsMulti":"No", + "documenttype":"usermanual" + } + ], "title":"Developing a DLI Spark Job", "githuburl":"" }, { "uri":"dataartsstudio_01_0526.html", + "node_id":"dataartsstudio_01_0526.xml", "product_code":"", "code":"253", "des":"This section describes how to develop an MRS Flink job on DataArts Factory. Use an MRS Flink job to count the number of words.You have the permission to access OBS paths.", "doc_type":"", "kw":"Developing an MRS Flink Job,Usage Guidance,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Developing an MRS Flink Job", "githuburl":"" }, { "uri":"dataartsstudio_01_0525.html", + "node_id":"dataartsstudio_01_0525.xml", "product_code":"", "code":"254", "des":"This section describes how to develop an MRS Spark Python on DataArts Factory.PrerequisitesYou have the permission to access OBS paths.Data preparationPrepare the script ", "doc_type":"", "kw":"Developing an MRS Spark Python Job,Usage Guidance,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Developing an MRS Spark Python Job", "githuburl":"" }, { "uri":"dataartsstudio_12_0005.html", + "node_id":"dataartsstudio_12_0005.xml", "product_code":"", "code":"255", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"", "kw":"FAQs", + "search_title":"", + "metedata":[ + { + + } + ], "title":"FAQs", "githuburl":"" }, { "uri":"dataartsstudio_03_0002.html", + "node_id":"dataartsstudio_03_0002.xml", "product_code":"dgc", "code":"256", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Consultation and Billing", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Consultation and Billing", "githuburl":"" }, { "uri":"dataartsstudio_03_0052.html", + "node_id":"dataartsstudio_03_0052.xml", "product_code":"dgc", "code":"257", "des":"We use a region to identify the location of a data center. You can create resources in a specific region.A region is a physical data center. Each region is completely ind", "doc_type":"usermanual", "kw":"Regions,Consultation and Billing,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Regions", "githuburl":"" }, { "uri":"dataartsstudio_03_0061.html", + "node_id":"dataartsstudio_03_0061.xml", "product_code":"dgc", "code":"258", "des":"Check whether the user has been added to the workspace. If not, perform the following steps to add the user:Log in to the DataArts Studio console and access the Workspace", "doc_type":"usermanual", "kw":"What Should I Do If a User Cannot View Existing Workspaces After I Have Assigned the Required Policy", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"What Should I Do If a User Cannot View Existing Workspaces After I Have Assigned the Required Policy to the User?", "githuburl":"" }, { "uri":"dataartsstudio_03_0222.html", + "node_id":"dataartsstudio_03_0222.xml", "product_code":"", "code":"259", "des":"After workspaces are created, they cannot be deleted. You can disable workspaces when they are no longer needed. You can enable them again when you need these workspaces.", "doc_type":"", "kw":"Can I Delete DataArts Studio Workspaces?,Consultation and Billing,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Can I Delete DataArts Studio Workspaces?", "githuburl":"" }, { "uri":"dataartsstudio_03_0131.html", + "node_id":"dataartsstudio_03_0131.xml", "product_code":"", "code":"260", "des":"No. the purchased or trial instance cannot be transferred to another account.", "doc_type":"", "kw":"Can I Transfer a Purchased or Trial Instance to Another Account?,Consultation and Billing,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Can I Transfer a Purchased or Trial Instance to Another Account?", "githuburl":"" }, { "uri":"dataartsstudio_03_0087.html", + "node_id":"dataartsstudio_03_0087.xml", "product_code":"dgc", "code":"261", "des":"No. You cannot downgrade a purchased DataArts Studio instance.", "doc_type":"usermanual", "kw":"Does DataArts Studio Support Version Downgrade?,Consultation and Billing,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Does DataArts Studio Support Version Downgrade?", "githuburl":"" }, { "uri":"dataartsstudio_03_0022.html", + "node_id":"dataartsstudio_03_0022.xml", "product_code":"dgc", "code":"262", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Management Center", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Management Center", "githuburl":"" }, { "uri":"dataartsstudio_03_0008.html", + "node_id":"dataartsstudio_03_0008.xml", "product_code":"dgc", "code":"263", "des":"For details about the data connections supported by DataArts Studio, see Data Sources Supported by DataArts Studio.", "doc_type":"usermanual", "kw":"Which Data Connections Are Supported by DataArts Studio?,Management Center,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Which Data Connections Are Supported by DataArts Studio?", "githuburl":"" }, { "uri":"dataartsstudio_03_0009.html", + "node_id":"dataartsstudio_03_0009.xml", "product_code":"dgc", "code":"264", "des":"When creating a DWS, MRS Hive, RDS, and SparkSQL data connection, you must bind an agent provided by the CDM cluster. Currently, a version of the CDM cluster earlier than", "doc_type":"usermanual", "kw":"What Are the Precautions for Creating Data Connections?,Management Center,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"What Are the Precautions for Creating Data Connections?", "githuburl":"" }, { "uri":"dataartsstudio_03_0016.html", + "node_id":"dataartsstudio_03_0016.xml", "product_code":"dgc", "code":"265", "des":"The possible cause is that the CDM cluster is stopped or a concurrency conflict occurs. You can switch to another agent to temporarily avoid this issue.To resolve this is", "doc_type":"usermanual", "kw":"Why Do DWS/Hive/HBase Data Connections Fail to Obtain the Information About Database or Tables?,Mana", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Why Do DWS/Hive/HBase Data Connections Fail to Obtain the Information About Database or Tables?", "githuburl":"" }, { "uri":"dataartsstudio_03_0017.html", + "node_id":"dataartsstudio_03_0017.xml", "product_code":"dgc", "code":"266", "des":"Possible causes are as follows:Hive/HBase components were not selected during MRS cluster creation.The network between the CDM cluster and MRS cluster was disconnected wh", "doc_type":"usermanual", "kw":"Why Are MRS Hive/HBase Clusters Not Displayed on the Page for Creating Data Connections?,Management ", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Why Are MRS Hive/HBase Clusters Not Displayed on the Page for Creating Data Connections?", "githuburl":"" }, { "uri":"dataartsstudio_03_0054.html", + "node_id":"dataartsstudio_03_0054.xml", "product_code":"dgc", "code":"267", "des":"The failure may be caused by the rights separation function of the DWS cluster. On the DWS console, click the corresponding cluster, choose Security Settings, and disable", "doc_type":"usermanual", "kw":"What Should I Do If the Connection Test Fails When I Enable the SSL Connection During the Creation o", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"What Should I Do If the Connection Test Fails When I Enable the SSL Connection During the Creation of a DWS Data Connection?", "githuburl":"" }, { "uri":"dataartsstudio_03_0089.html", + "node_id":"dataartsstudio_03_0089.xml", "product_code":"dgc", "code":"268", "des":"Multiple data connections of the same type or different types can be created in the same workspace, but their names must be unique.", "doc_type":"usermanual", "kw":"Can I Create Multiple Data Connections in a Workspace in Proxy Mode?,Management Center,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Can I Create Multiple Data Connections in a Workspace in Proxy Mode?", "githuburl":"" }, { "uri":"dataartsstudio_03_0137.html", + "node_id":"dataartsstudio_03_0137.xml", "product_code":"", "code":"269", "des":"You are advised to choose a proxy connection.", "doc_type":"", "kw":"Should I Choose a Direct or a Proxy Connection When Creating a DWS Connection?,Management Center,Use", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Should I Choose a Direct or a Proxy Connection When Creating a DWS Connection?", "githuburl":"" }, { "uri":"dataartsstudio_03_0153.html", + "node_id":"dataartsstudio_03_0153.xml", "product_code":"", "code":"270", "des":"You can export the jobs in DataArts Factory and then import them to DataArts Factory in another workspace.You can export data connections on the Migrate Resources page of", "doc_type":"", "kw":"How Do I Migrate the Data Development Jobs and Data Connections from One Workspace to Another?,Manag", + "search_title":"", + "metedata":[ + { + + } + ], "title":"How Do I Migrate the Data Development Jobs and Data Connections from One Workspace to Another?", "githuburl":"" }, { "uri":"dataartsstudio_03_0154.html", + "node_id":"dataartsstudio_03_0154.xml", "product_code":"", "code":"271", "des":"No, but you can change the names of workspaces.", "doc_type":"", "kw":"Can I Delete Workspaces?,Management Center,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Can I Delete Workspaces?", "githuburl":"" }, { "uri":"dataartsstudio_03_0027.html", + "node_id":"dataartsstudio_03_0027.xml", "product_code":"dgc", "code":"272", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"DataArts Migration", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"DataArts Migration", "githuburl":"" }, { "uri":"dataartsstudio_03_0138.html", + "node_id":"dataartsstudio_03_0138.xml", "product_code":"cdm", "code":"273", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"General", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"General", "githuburl":"" }, { "uri":"dataartsstudio_03_0139.html", + "node_id":"dataartsstudio_03_0139.xml", "product_code":"cdm", "code":"274", "des":"CDM is developed based on a distributed computing framework and leverages the parallel data processing technology. Table 1 details the advantages of CDM.", "doc_type":"usermanual", "kw":"What Are the Advantages of CDM?,General,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"What Are the Advantages of CDM?", "githuburl":"" }, { "uri":"dataartsstudio_03_0140.html", + "node_id":"dataartsstudio_03_0140.xml", "product_code":"cdm", "code":"275", "des":"CDM is a fully hosted service that provides the following capabilities to protect user data security:Instance isolation: CDM users can use only their own instances. Insta", "doc_type":"usermanual", "kw":"What Are the Security Protection Mechanisms of CDM?,General,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"What Are the Security Protection Mechanisms of CDM?", "githuburl":"" }, { "uri":"dataartsstudio_03_0099.html", + "node_id":"dataartsstudio_03_0099.xml", "product_code":"cdm", "code":"276", "des":"When migrating the data on the public network, use NAT Gateway to share the EIPs with other ECSs in the subnet. In this way, data on the on-premises data center or third-", "doc_type":"usermanual", "kw":"How Do I Reduce the Cost of Using CDM?,General,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"How Do I Reduce the Cost of Using CDM?", "githuburl":"" }, { "uri":"dataartsstudio_03_0302.html", + "node_id":"dataartsstudio_03_0302.xml", "product_code":"", "code":"277", "des":"No. To use a later version cluster, you can create one.", "doc_type":"", "kw":"Can I Upgrade a CDM Cluster?,General,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Can I Upgrade a CDM Cluster?", "githuburl":"" }, { "uri":"dataartsstudio_03_0141.html", + "node_id":"dataartsstudio_03_0141.xml", "product_code":"cdm", "code":"278", "des":"Theoretically, a cdm.large CDM instance can migrate 1 TB to 8 TB data per day. The actual transmission rate is affected by factors such as the Internet bandwidth, cluster", "doc_type":"usermanual", "kw":"How Is the Migration Performance of CDM?,General,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"How Is the Migration Performance of CDM?", "githuburl":"" }, { "uri":"dataartsstudio_03_0124.html", + "node_id":"dataartsstudio_03_0124.xml", "product_code":"", "code":"279", "des":"Table 1 lists the number of concurrent jobs for different CDM cluster versions.You are advised to use multiple CDM clusters in the following and other scenarios as needed", "doc_type":"", "kw":"What Is the Number of Concurrent Jobs for Different CDM Cluster Versions?,General,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"What Is the Number of Concurrent Jobs for Different CDM Cluster Versions?", "githuburl":"" }, { "uri":"dataartsstudio_03_0142.html", + "node_id":"dataartsstudio_03_0142.xml", "product_code":"cdm", "code":"280", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Functions", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Functions", "githuburl":"" }, { "uri":"dataartsstudio_03_0069.html", + "node_id":"dataartsstudio_03_0069.xml", "product_code":"cdm", "code":"281", "des":"CDM supports incremental data migration. With scheduled jobs and macro variables of date and time, CDM provides incremental data migration in the following scenarios:Incr", "doc_type":"usermanual", "kw":"Does CDM Support Incremental Data Migration?,Functions,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Does CDM Support Incremental Data Migration?", "githuburl":"" }, { "uri":"dataartsstudio_03_0028.html", + "node_id":"dataartsstudio_03_0028.xml", "product_code":"cdm", "code":"282", "des":"Yes. CDM supports the following field converters:AnonymizationTrimReverse StringReplace StringExpression ConversionYou can create a field converter on the Map Field page ", "doc_type":"usermanual", "kw":"Does CDM Support Field Conversion?,Functions,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Does CDM Support Field Conversion?", "githuburl":"" }, { "uri":"dataartsstudio_03_0107.html", + "node_id":"dataartsstudio_03_0107.xml", "product_code":"", "code":"283", "des":"The recommended component versions can be used as both the source and destination.", "doc_type":"", "kw":"What Component Versions Are Recommended for Migrating Hadoop Data Sources?,Functions,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"What Component Versions Are Recommended for Migrating Hadoop Data Sources?", "githuburl":"" }, { "uri":"dataartsstudio_03_0029.html", + "node_id":"dataartsstudio_03_0029.xml", "product_code":"cdm", "code":"284", "des":"CDM can read and write data in SequenceFile, TextFile, ORC, or Parquet format from the Hive data source.", "doc_type":"usermanual", "kw":"What Data Formats Are Supported When the Data Source Is Hive?,Functions,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"What Data Formats Are Supported When the Data Source Is Hive?", "githuburl":"" }, { "uri":"dataartsstudio_03_0030.html", + "node_id":"dataartsstudio_03_0030.xml", "product_code":"cdm", "code":"285", "des":"CDM does not support direct job migration across clusters. However, you can use the batch job import and export function to indirectly implement cross-cluster migration a", "doc_type":"usermanual", "kw":"Can I Synchronize Jobs to Other Clusters?,Functions,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Can I Synchronize Jobs to Other Clusters?", "githuburl":"" }, { "uri":"dataartsstudio_03_0031.html", + "node_id":"dataartsstudio_03_0031.xml", "product_code":"cdm", "code":"286", "des":"CDM supports batch job creation with the help of the batch import function. You can create jobs in batches as follows:Create a job manually.Export the job and save the jo", "doc_type":"usermanual", "kw":"Can I Create Jobs in Batches?,Functions,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Can I Create Jobs in Batches?", "githuburl":"" }, { "uri":"dataartsstudio_03_0100.html", + "node_id":"dataartsstudio_03_0100.xml", "product_code":"", "code":"287", "des":"Yes.Access the DataArts Factory module of the DataArts Studio service.In the navigation pane of the DataArts Factory homepage, choose Data Development > Develop Job to cr", "doc_type":"", "kw":"Can I Schedule Jobs in Batches?,Functions,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Can I Schedule Jobs in Batches?", "githuburl":"" }, { "uri":"dataartsstudio_03_0032.html", + "node_id":"dataartsstudio_03_0032.xml", "product_code":"cdm", "code":"288", "des":"Yes. If you do not need to use the CDM cluster for a long time, you can stop or delete it to reduce costs.Before the deletion, you can use the batch export function of CD", "doc_type":"usermanual", "kw":"How Do I Back Up CDM Jobs?,Functions,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"How Do I Back Up CDM Jobs?", "githuburl":"" }, { "uri":"dataartsstudio_03_0119.html", + "node_id":"dataartsstudio_03_0119.xml", "product_code":"", "code":"289", "des":"To ensure that CDM can communicate with the HANA cluster, perform the following operations:Disable Statement Routing of the HANA cluster. Note that this will increase the", "doc_type":"", "kw":"How Do I Configure the Connection If Only Some Nodes in the HANA Cluster Can Communicate with the CD", + "search_title":"", + "metedata":[ + { + + } + ], "title":"How Do I Configure the Connection If Only Some Nodes in the HANA Cluster Can Communicate with the CDM Cluster?", "githuburl":"" }, { "uri":"dataartsstudio_03_0101.html", + "node_id":"dataartsstudio_03_0101.xml", "product_code":"cdm", "code":"290", "des":"CDM provides RESTful APIs to implement automatic job creation or execution control by program invocation.The following describes how to use CDM to migrate data from table", "doc_type":"usermanual", "kw":"How Do I Use Java to Invoke CDM RESTful APIs to Create Data Migration Jobs?,Functions,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"How Do I Use Java to Invoke CDM RESTful APIs to Create Data Migration Jobs?", "githuburl":"" }, { "uri":"dataartsstudio_03_0033.html", + "node_id":"dataartsstudio_03_0033.xml", "product_code":"cdm", "code":"291", "des":"Many enterprises deploy key data sources on the intranet, such as databases and file servers. CDM runs on the cloud. To migrate the intranet data to the cloud using CDM, ", "doc_type":"usermanual", "kw":"How Do I Connect the On-Premises Intranet or Third-Party Private Network to CDM?,Functions,User Guid", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"How Do I Connect the On-Premises Intranet or Third-Party Private Network to CDM?", "githuburl":"" }, { "uri":"dataartsstudio_03_0336.html", + "node_id":"dataartsstudio_03_0336.xml", "product_code":"", "code":"292", "des":"The number of concurrent extractors in a CDM migration job is related to the cluster specifications and table size. The value range is 1 to 300. If the value is too large", "doc_type":"", "kw":"How Do I Set the Number of Concurrent Extractors for a CDM Migration Job?,Functions,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"How Do I Set the Number of Concurrent Extractors for a CDM Migration Job?", "githuburl":"" }, { "uri":"dataartsstudio_03_0337.html", + "node_id":"dataartsstudio_03_0337.xml", "product_code":"", "code":"293", "des":"No. If data is written to the source during the migration, an error may occur.", "doc_type":"", "kw":"Does CDM Support Real-Time Migration of Dynamic Data?,Functions,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Does CDM Support Real-Time Migration of Dynamic Data?", "githuburl":"" }, { "uri":"dataartsstudio_03_0143.html", + "node_id":"dataartsstudio_03_0143.xml", "product_code":"cdm", "code":"294", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Troubleshooting", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"Troubleshooting", "githuburl":"" }, { "uri":"dataartsstudio_03_0106.html", + "node_id":"dataartsstudio_03_0106.xml", "product_code":"cdm", "code":"295", "des":"When CDM is used to import data from OBS to SQL Server, the job fails to be executed and error message \"Unable to execute the SQL statement. Cause: \"String or binary data", "doc_type":"usermanual", "kw":"What Can I Do If Error Message \"Unable to execute the SQL statement\" Is Displayed When I Import Data", + "search_title":"", + "metedata":[ + { + "prodname":"cdm", + "documenttype":"usermanual" + } + ], "title":"What Can I Do If Error Message \"Unable to execute the SQL statement\" Is Displayed When I Import Data from OBS to SQL Server?", "githuburl":"" }, { "uri":"dataartsstudio_03_0071.html", + "node_id":"dataartsstudio_03_0071.xml", "product_code":"dgc", "code":"296", "des":"When CDM is used to migrate Oracle data to DWS, an error is reported, as shown in Figure 1.During data migration, if the entire table is queried and the table contains a ", "doc_type":"usermanual", "kw":"Why Is Error ORA-01555 Reported During Migration from Oracle to DWS?,Troubleshooting,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Why Is Error ORA-01555 Reported During Migration from Oracle to DWS?", "githuburl":"" }, { "uri":"dataartsstudio_03_0072.html", + "node_id":"dataartsstudio_03_0072.xml", "product_code":"dgc", "code":"297", "des":"By default, the userAdmin role has only the permissions to manage roles and users and does not have the read and write permissions on a database.If the MongoDB connection", "doc_type":"usermanual", "kw":"What Should I Do If the MongoDB Connection Migration Fails?,Troubleshooting,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"What Should I Do If the MongoDB Connection Migration Fails?", "githuburl":"" }, { "uri":"dataartsstudio_03_0093.html", + "node_id":"dataartsstudio_03_0093.xml", "product_code":"dgc", "code":"298", "des":"Manually stop the Hive migration job and add the following attribute settings to the Hive data connection:Attribute Name: hive.server2.idle.operation.timeoutValue: 10mIn ", "doc_type":"usermanual", "kw":"What Should I Do If a Hive Migration Job Is Suspended for a Long Period of Time?,Troubleshooting,Use", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"What Should I Do If a Hive Migration Job Is Suspended for a Long Period of Time?", "githuburl":"" }, { "uri":"dataartsstudio_03_0109.html", + "node_id":"dataartsstudio_03_0109.xml", "product_code":"", "code":"299", "des":"When you use CDM to migrate data to DWS, the migration job fails and the error message \"value too long for type character varying\" is displayed in the execution log.The p", "doc_type":"", "kw":"What Should I Do If an Error Is Reported Because the Field Type Mapping Does Not Match During Data M", + "search_title":"", + "metedata":[ + { + + } + ], "title":"What Should I Do If an Error Is Reported Because the Field Type Mapping Does Not Match During Data Migration Using CDM?", "githuburl":"" }, { "uri":"dataartsstudio_03_0110.html", + "node_id":"dataartsstudio_03_0110.xml", "product_code":"", "code":"300", "des":"The following error message is displayed during MySQL migration: \"Unable to connect to the database server. Cause: connect timed out.\"The table has a large data volume, a", "doc_type":"", "kw":"What Should I Do If a JDBC Connection Timeout Error Is Reported During MySQL Migration?,Troubleshoot", + "search_title":"", + "metedata":[ + { + + } + ], "title":"What Should I Do If a JDBC Connection Timeout Error Is Reported During MySQL Migration?", "githuburl":"" }, { "uri":"dataartsstudio_03_0121.html", + "node_id":"dataartsstudio_03_0121.xml", "product_code":"", "code":"301", "des":"You are advised to clear historical data and try again. In addition, when creating a migration job, you are advised to enable the system to clear historical data. This gr", "doc_type":"", "kw":"What Should I Do If a CDM Migration Job Fails After a Link from Hive to DWS Is Created?,Troubleshoot", + "search_title":"", + "metedata":[ + { + + } + ], "title":"What Should I Do If a CDM Migration Job Fails After a Link from Hive to DWS Is Created?", "githuburl":"" }, { "uri":"dataartsstudio_03_0122.html", + "node_id":"dataartsstudio_03_0122.xml", "product_code":"", "code":"302", "des":"CDM does not support this operation. You are advised to manually export a MySQL data file, enable the SFTP service on the server, and create a CDM job with SFTP as the so", "doc_type":"", "kw":"How Do I Use CDM to Export MySQL Data to an SQL File and Upload the File to an OBS Bucket?,Troublesh", + "search_title":"", + "metedata":[ + { + + } + ], "title":"How Do I Use CDM to Export MySQL Data to an SQL File and Upload the File to an OBS Bucket?", "githuburl":"" }, { "uri":"dataartsstudio_03_0123.html", + "node_id":"dataartsstudio_03_0123.xml", "product_code":"", "code":"303", "des":"Dirty data writing is configured, but no dirty data exists. You need to decrease the number of concurrent tasks to avoid this issue.", "doc_type":"", "kw":"What Should I Do If CDM Fails to Migrate Data from OBS to DLI?,Troubleshooting,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"What Should I Do If CDM Fails to Migrate Data from OBS to DLI?", "githuburl":"" }, { "uri":"dataartsstudio_03_0132.html", + "node_id":"dataartsstudio_03_0132.xml", "product_code":"", "code":"304", "des":"This error is reported because the customer's certificate has expired. Update the certificate and reconfigure the connector.", "doc_type":"", "kw":"What Should I Do If a CDM Connector Reports the Error \"Configuration Item [linkConfig.iamAuth] Does ", + "search_title":"", + "metedata":[ + { + + } + ], "title":"What Should I Do If a CDM Connector Reports the Error \"Configuration Item [linkConfig.iamAuth] Does Not Exist\"?", "githuburl":"" }, { "uri":"dataartsstudio_03_0333.html", + "node_id":"dataartsstudio_03_0333.xml", "product_code":"", "code":"305", "des":"If you create a link or save a job in a CDM cluster of an earlier version, and then access a CDM cluster of a later version, this error occurs occasionally.Manually clear", "doc_type":"", "kw":"What Should I Do If Error Message\"Configuration Item [throttlingConfig.concurrentSubJobs] Does Not E", + "search_title":"", + "metedata":[ + { + + } + ], "title":"What Should I Do If Error Message\"Configuration Item [throttlingConfig.concurrentSubJobs] Does Not Exist\" Is Displayed During Job Creation?", "githuburl":"" }, { "uri":"dataartsstudio_03_0166.html", + "node_id":"dataartsstudio_03_0166.xml", "product_code":"", "code":"306", "des":"This failure occurs because you do not have the required permissions. Create another service user, grant the required permissions to it, and try again.To create a data co", "doc_type":"", "kw":"What Should I Do If Message \"CORE_0031:Connect time out. (Cdm.0523)\" Is Displayed During the Creatio", + "search_title":"", + "metedata":[ + { + + } + ], "title":"What Should I Do If Message \"CORE_0031:Connect time out. (Cdm.0523)\" Is Displayed During the Creation of an MRS Hive Link?", "githuburl":"" }, { "uri":"dataartsstudio_03_0167.html", + "node_id":"dataartsstudio_03_0167.xml", "product_code":"", "code":"307", "des":"The cause is that the database table name contains special characters, resulting in incorrect syntax. You can resolve this issue by renaming the database table according ", "doc_type":"", "kw":"What Should I Do If Message \"CDM Does Not Support Auto Creation of an Empty Table with No Column\" Is", + "search_title":"", + "metedata":[ + { + + } + ], "title":"What Should I Do If Message \"CDM Does Not Support Auto Creation of an Empty Table with No Column\" Is Displayed When I Enable Auto Table Creation?", "githuburl":"" }, { "uri":"dataartsstudio_03_0334.html", + "node_id":"dataartsstudio_03_0334.xml", "product_code":"", "code":"308", "des":"This may be because you have uploaded the latest ORACLE_8 driver (for example, Oracle Database 21c (21.3) driver), which is not supported yet. You are advised to use the ", "doc_type":"", "kw":"What Should I Do If I Cannot Obtain the Schema Name When Creating an Oracle Relational Database Migr", + "search_title":"", + "metedata":[ + { + + } + ], "title":"What Should I Do If I Cannot Obtain the Schema Name When Creating an Oracle Relational Database Migration Job?", "githuburl":"" }, { "uri":"dataartsstudio_03_0035.html", + "node_id":"dataartsstudio_03_0035.xml", "product_code":"dgc", "code":"309", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"DataArts Factory", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"DataArts Factory", "githuburl":"" }, { "uri":"dataartsstudio_03_0036.html", + "node_id":"dataartsstudio_03_0036.xml", "product_code":"dgc", "code":"310", "des":"By default, each user can create a maximum of 10,000 jobs, and each job can contain a maximum of 200 nodes.In addition, the system allows you to adjust the maximum quota ", "doc_type":"usermanual", "kw":"How Many Jobs Can Be Created in DataArts Factory? Is There a Limit on the Number of Nodes in a Job?,", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"How Many Jobs Can Be Created in DataArts Factory? Is There a Limit on the Number of Nodes in a Job?", "githuburl":"" }, { "uri":"dataartsstudio_03_0041.html", + "node_id":"dataartsstudio_03_0041.xml", "product_code":"dgc", "code":"311", "des":"On the Running History page, there is a large difference between Job Execution Time and Start Time, as shown in the figure below. Job Execution Time is the time when the ", "doc_type":"usermanual", "kw":"Why Is There a Large Difference Between Job Execution Time and Start Time of a Job?,DataArts Factory", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Why Is There a Large Difference Between Job Execution Time and Start Time of a Job?", "githuburl":"" }, { "uri":"dataartsstudio_03_0042.html", + "node_id":"dataartsstudio_03_0042.xml", "product_code":"dgc", "code":"312", "des":"The subsequent jobs may be suspended, continued, or terminated, depending on the configuration.In this case, do not stop the job. You can rerun the failed job instance or", "doc_type":"usermanual", "kw":"Will Subsequent Jobs Be Affected If a Job Fails to Be Executed During Scheduling of Dependent Jobs? ", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Will Subsequent Jobs Be Affected If a Job Fails to Be Executed During Scheduling of Dependent Jobs? What Should I Do?", "githuburl":"" }, { "uri":"dataartsstudio_03_0149.html", + "node_id":"dataartsstudio_03_0149.xml", "product_code":"", "code":"313", "des":"Lock management is unavailable for DLI and MRS. Therefore, if you perform read and write operations on the tables simultaneously, data conflict will occur and the operati", "doc_type":"", "kw":"What Should I Pay Attention to When Using DataArts Studio to Schedule Big Data Services?,DataArts Fa", + "search_title":"", + "metedata":[ + { + + } + ], "title":"What Should I Pay Attention to When Using DataArts Studio to Schedule Big Data Services?", "githuburl":"" }, { "uri":"dataartsstudio_03_0150.html", + "node_id":"dataartsstudio_03_0150.xml", "product_code":"", "code":"314", "des":"Parameters can be set in environment variables, job parameters, and script parameters, but their application scopes are different. If there is a conflict when parameters ", "doc_type":"", "kw":"What Are the Differences and Connections Among Environment Variables, Job Parameters, and Script Par", + "search_title":"", + "metedata":[ + { + + } + ], "title":"What Are the Differences and Connections Among Environment Variables, Job Parameters, and Script Parameters?", "githuburl":"" }, { "uri":"dataartsstudio_03_0050.html", + "node_id":"dataartsstudio_03_0050.xml", "product_code":"dgc", "code":"315", "des":"Error logs are stored in OBS. The current account must have the OBS read permissions to view logs. You can check the OBS permissions and OBS bucket policies in IAM.When y", "doc_type":"usermanual", "kw":"What Do I Do If Node Error Logs Cannot Be Viewed When a Job Fails?,DataArts Factory,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"What Do I Do If Node Error Logs Cannot Be Viewed When a Job Fails?", "githuburl":"" }, { "uri":"dataartsstudio_03_0051.html", + "node_id":"dataartsstudio_03_0051.xml", "product_code":"dgc", "code":"316", "des":"When a workspace- or job-level agency is configured, the following error is reported when the agency list is viewed:Policy doesn't allow iam:agencies:listAgencies to be p", "doc_type":"usermanual", "kw":"What Should I Do If the Agency List Fails to Be Obtained During Agency Configuration?,DataArts Facto", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"What Should I Do If the Agency List Fails to Be Obtained During Agency Configuration?", "githuburl":"" }, { "uri":"dataartsstudio_03_0055.html", + "node_id":"dataartsstudio_03_0055.xml", "product_code":"dgc", "code":"317", "des":"If the number of daily executed nodes exceeds the upper limit, it may be caused by frequent job scheduling. Perform the following operations:In the left navigation tree o", "doc_type":"usermanual", "kw":"How Do I Locate Job Scheduling Nodes with a Large Number?,DataArts Factory,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"How Do I Locate Job Scheduling Nodes with a Large Number?", "githuburl":"" }, { "uri":"dataartsstudio_03_0056.html", + "node_id":"dataartsstudio_03_0056.xml", "product_code":"dgc", "code":"318", "des":"Ensure that the current instance and peripheral resources are in the same region and IAM project. If the enterprise project function is enabled for your account, the curr", "doc_type":"usermanual", "kw":"Why Cannot Specified Peripheral Resources Be Selected When a Data Connection Is Created in Data Deve", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Why Cannot Specified Peripheral Resources Be Selected When a Data Connection Is Created in Data Development?", "githuburl":"" }, { "uri":"dataartsstudio_03_0058.html", + "node_id":"dataartsstudio_03_0058.xml", "product_code":"dgc", "code":"319", "des":"On the Data Development page, choose MonitoringMonitor Job to check whether the target job is being scheduled. A job can be scheduled only within the scheduling period.Vi", "doc_type":"usermanual", "kw":"Why Is There No Job Running Scheduling Log on the Monitor Instance Page After Periodic Scheduling Is", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Why Is There No Job Running Scheduling Log on the Monitor Instance Page After Periodic Scheduling Is Configured for a Job?", "githuburl":"" }, { "uri":"dataartsstudio_03_0059.html", + "node_id":"dataartsstudio_03_0059.xml", "product_code":"dgc", "code":"320", "des":"Check whether the data connection used by the Hive SQL and Spark SQL scripts is direct connection or proxy connection.In direct connection mode, DataArts Studio users sub", "doc_type":"usermanual", "kw":"Why Does the GUI Display Only the Failure Result but Not the Specific Error Cause After Hive SQL and", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Why Does the GUI Display Only the Failure Result but Not the Specific Error Cause After Hive SQL and Spark SQL Scripts Fail to Be Executed?", "githuburl":"" }, { "uri":"dataartsstudio_03_0060.html", + "node_id":"dataartsstudio_03_0060.xml", "product_code":"dgc", "code":"321", "des":"Check whether the permissions of the current user in IAM are changed, whether the user is removed from the user group, or whether the permission policy of the user group ", "doc_type":"usermanual", "kw":"What Do I Do If the Token Is Invalid During the Running of a Data Development Node?,DataArts Factory", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"What Do I Do If the Token Is Invalid During the Running of a Data Development Node?", "githuburl":"" }, { "uri":"dataartsstudio_03_0062.html", + "node_id":"dataartsstudio_03_0062.xml", "product_code":"dgc", "code":"322", "des":"Method 1: After the node test is complete, right-click the current node and choose View Log from the shortcut menu.Method 2: Click Monitorin the upper part of the canvas,", "doc_type":"usermanual", "kw":"How Do I View Run Logs After a Job Is Tested?,DataArts Factory,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"How Do I View Run Logs After a Job Is Tested?", "githuburl":"" }, { "uri":"dataartsstudio_03_0063.html", + "node_id":"dataartsstudio_03_0063.xml", "product_code":"dgc", "code":"323", "des":"Jobs scheduled by month depend on jobs scheduled by day. Why does a job scheduled by month start running before the job scheduled by day is complete?Although jobs schedul", "doc_type":"usermanual", "kw":"Why Does a Job Scheduled by Month Start Running Before the Job Scheduled by Day Is Complete?,DataArt", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Why Does a Job Scheduled by Month Start Running Before the Job Scheduled by Day Is Complete?", "githuburl":"" }, { "uri":"dataartsstudio_03_0065.html", + "node_id":"dataartsstudio_03_0065.xml", "product_code":"dgc", "code":"324", "des":"Check whether the current user has the DLI Service User or DLI Service Admin permissions in IAM.", "doc_type":"usermanual", "kw":"What Should I Do If Invalid Authentication Is Reported When I Run a DLI Script?,DataArts Factory,Use", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"What Should I Do If Invalid Authentication Is Reported When I Run a DLI Script?", "githuburl":"" }, { "uri":"dataartsstudio_03_0066.html", + "node_id":"dataartsstudio_03_0066.xml", "product_code":"dgc", "code":"325", "des":"Check whether the CDM cluster is stopped. If it is stopped, restart it.", "doc_type":"usermanual", "kw":"Why Cannot I Select the Desired CDM Cluster in Proxy Mode When Creating a Data Connection?,DataArts ", + "search_title":"", + "metedata":[ + { + "prodname":"dgc", + "IsMulti":"no", + "documenttype":"usermanual", + "IsBot":"yes" + } + ], "title":"Why Cannot I Select the Desired CDM Cluster in Proxy Mode When Creating a Data Connection?", "githuburl":"" }, { "uri":"dataartsstudio_03_0111.html", + "node_id":"dataartsstudio_03_0111.xml", "product_code":"", "code":"326", "des":"Daily scheduling is configured for the job, but there is no job scheduling record in the instance.Cause 1: Check whether the job scheduling is started. If not, the job wi", "doc_type":"", "kw":"Why Is There No Job Running Scheduling Record After Daily Scheduling Is Configured for the Job?,Data", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Why Is There No Job Running Scheduling Record After Daily Scheduling Is Configured for the Job?", "githuburl":"" }, { "uri":"dataartsstudio_03_0112.html", + "node_id":"dataartsstudio_03_0112.xml", "product_code":"", "code":"327", "des":"There is no content contained in the job log.Check whether the user has the global permission of the object storage service (OBS) in IAM to ensure that the user can creat", "doc_type":"", "kw":"What Do I Do If No Content Is Displayed in Job Logs?,DataArts Factory,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"What Do I Do If No Content Is Displayed in Job Logs?", "githuburl":"" }, { "uri":"dataartsstudio_03_0113.html", + "node_id":"dataartsstudio_03_0113.xml", "product_code":"", "code":"328", "des":"Two jobs are created, but the dependency relationship cannot be established.Check whether the two jobs' recurrence are both every week or every month. Currently, if the t", "doc_type":"", "kw":"Why Do I Fail to Establish a Dependency Between Two Jobs?,DataArts Factory,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Why Do I Fail to Establish a Dependency Between Two Jobs?", "githuburl":"" }, { "uri":"dataartsstudio_03_0114.html", + "node_id":"dataartsstudio_03_0114.xml", "product_code":"", "code":"329", "des":"An error is reported when DataArts Studio executes scheduling: The job does not have a submitted version. Submit the job version first.Job scheduling process begins befor", "doc_type":"", "kw":"What Should I Do If an Error Is Displayed During DataArts Studio Scheduling: The Job Does Not Have a", + "search_title":"", + "metedata":[ + { + + } + ], "title":"What Should I Do If an Error Is Displayed During DataArts Studio Scheduling: The Job Does Not Have a Submitted Version?", "githuburl":"" }, { "uri":"dataartsstudio_03_0115.html", + "node_id":"dataartsstudio_03_0115.xml", "product_code":"", "code":"330", "des":"An error is reported when DataArts Studio executes scheduling: The script associated with node XXX in the job is not submitted.Job scheduling process begins before the sc", "doc_type":"", "kw":"What Do I Do If an Error Is Displayed During DataArts Studio Scheduling: The Script Associated with ", + "search_title":"", + "metedata":[ + { + + } + ], "title":"What Do I Do If an Error Is Displayed During DataArts Studio Scheduling: The Script Associated with Node XXX in the Job Is Not Submitted?", "githuburl":"" }, { "uri":"dataartsstudio_03_0116.html", + "node_id":"dataartsstudio_03_0116.xml", "product_code":"", "code":"331", "des":"After a job is submitted for scheduling, the job fails to be executed and the following error is displayed \"depend job [XXX] is not running or pause\".The upstream depende", "doc_type":"", "kw":"What Should I Do If a Job Fails to Be Executed After Being Submitted for Scheduling and an Error Dis", + "search_title":"", + "metedata":[ + { + + } + ], "title":"What Should I Do If a Job Fails to Be Executed After Being Submitted for Scheduling and an Error Displayed: Depend Job [XXX] Is Not Running Or Pause?", "githuburl":"" }, { "uri":"dataartsstudio_03_0127.html", + "node_id":"dataartsstudio_03_0127.xml", "product_code":"", "code":"332", "des":"Databases and data tables can be created in DLI.A database does not correspond to a data connection. A data connection is a connection channel for creating DataArts Studi", "doc_type":"", "kw":"How Do I Create a Database And Data Table? Is the database a data connection?,DataArts Factory,User ", + "search_title":"", + "metedata":[ + { + + } + ], "title":"How Do I Create a Database And Data Table? Is the database a data connection?", "githuburl":"" }, { "uri":"dataartsstudio_03_0129.html", + "node_id":"dataartsstudio_03_0129.xml", "product_code":"", "code":"333", "des":"Solution: Clear the cache data and use the direct connection to display the data.", "doc_type":"", "kw":"Why Is No Result Displayed After an HIVE Task Is Executed?,DataArts Factory,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Why Is No Result Displayed After an HIVE Task Is Executed?", "githuburl":"" }, { "uri":"dataartsstudio_03_0135.html", + "node_id":"dataartsstudio_03_0135.xml", "product_code":"", "code":"334", "des":"The last instance status indicates a job has been executed, and the status can only be successful or failed. The Monitor Instance page displays all statuses of the job, i", "doc_type":"", "kw":"Why Does the Last Instance Status On the Monitor Instance page Only Display Succeeded or Failed?,Dat", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Why Does the Last Instance Status On the Monitor Instance page Only Display Succeeded or Failed?", "githuburl":"" }, { "uri":"dataartsstudio_03_0148.html", + "node_id":"dataartsstudio_03_0148.xml", "product_code":"", "code":"335", "des":"Choose Monitoring > Monitor Job and click the Batch Job Monitoring tab.Select the jobs to be configured and click Configure Notification.Creating a notificationSet notifi", "doc_type":"", "kw":"How Do I Create a Notification for All Jobs?,DataArts Factory,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"How Do I Create a Notification for All Jobs?", "githuburl":"" }, { "uri":"dataartsstudio_03_0200.html", + "node_id":"dataartsstudio_03_0200.xml", "product_code":"", "code":"336", "des":"The following table lists the number of nodes that can be executed concurrently in each DataArts Studio version.", "doc_type":"", "kw":"How Many Nodes Can Be Executed Concurrently in Each DataArts Studio Version?,DataArts Factory,User G", + "search_title":"", + "metedata":[ + { + + } + ], "title":"How Many Nodes Can Be Executed Concurrently in Each DataArts Studio Version?", "githuburl":"" }, { "uri":"dataartsstudio_03_0201.html", + "node_id":"dataartsstudio_03_0201.xml", "product_code":"", "code":"337", "des":"The system obtains permissions for the job agency, workspace agency, and execution user in sequence, and then executes jobs with the permissions.By default, a job is exec", "doc_type":"", "kw":"What Is the Priority of the Startup User, Execution User, Workspace Agency, and Job Agency?,DataArts", + "search_title":"", + "metedata":[ + { + + } + ], "title":"What Is the Priority of the Startup User, Execution User, Workspace Agency, and Job Agency?", "githuburl":"" }, { "uri":"dataartsstudio_12_0006.html", + "node_id":"dataartsstudio_12_0006.xml", "product_code":"", "code":"338", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"", "kw":"Change History,User Guide", + "search_title":"", + "metedata":[ + { + + } + ], "title":"Change History", "githuburl":"" } diff --git a/docs/dataartsstudio/umn/CLASS.TXT.json b/docs/dataartsstudio/umn/CLASS.TXT.json index 5723b981..833ad426 100644 --- a/docs/dataartsstudio/umn/CLASS.TXT.json +++ b/docs/dataartsstudio/umn/CLASS.TXT.json @@ -99,7 +99,7 @@ "code":"11" }, { - "desc":"To use DataArts Studio, create a cloud platform account, create a DataArts Studio instance, and authorize a user to use DataArts Studio.For details about the preparations", + "desc":"To use DataArts Studio, create a a cloud platform account, create a DataArts Studio instance, and authorize a user to use DataArts Studio.For details about the preparatio", "product_code":"dataartsstudio", "title":"Preparations", "uri":"dataartsstudio_01_0003.html", @@ -117,7 +117,7 @@ "code":"13" }, { - "desc":"Only cloud platform account users with the DAYU Administrator or Tenant Administrator permissions can create DataArts Studio instances or DataArts Studio incremental pack", + "desc":"Only a cloud platform account users with the DAYU Administrator or Tenant Administrator permissions can create DataArts Studio instances or DataArts Studio incremental pa", "product_code":"dataartsstudio", "title":"Creating a DataArts Studio Basic Package", "uri":"dataartsstudio_01_0115_0.html", diff --git a/docs/dataartsstudio/umn/dataartsstudio_01_0003.html b/docs/dataartsstudio/umn/dataartsstudio_01_0003.html index e8abc9ef..157f3b05 100644 --- a/docs/dataartsstudio/umn/dataartsstudio_01_0003.html +++ b/docs/dataartsstudio/umn/dataartsstudio_01_0003.html @@ -1,7 +1,7 @@
To use DataArts Studio, create a cloud platform account, create a DataArts Studio instance, and authorize a user to use DataArts Studio.
+To use DataArts Studio, create a a cloud platform account, create a DataArts Studio instance, and authorize a user to use DataArts Studio.
For details about the preparations and operations, see later sections.
If you do not require individual IAM users for permissions management, skip this section.
For details, see "User Groups and Authorization" > "Creating a User Group and Assigning Permissions" in Identity and Access Management User Guide.
+For details, see "User Groups and Authorization" > "Creating a User Group and Assigning Permissions" in Identity and Access Management User Guide.
For details, see "IAM Users" > "Creating an IAM User" in Identity and Access Management User Guide.
diff --git a/docs/dataartsstudio/umn/dataartsstudio_01_0021.html b/docs/dataartsstudio/umn/dataartsstudio_01_0021.html index 6f3c0820..27d1924d 100644 --- a/docs/dataartsstudio/umn/dataartsstudio_01_0021.html +++ b/docs/dataartsstudio/umn/dataartsstudio_01_0021.html @@ -3,7 +3,7 @@After creating a CDM cluster, you can view its basic information and modify its configurations.
After disabling User Isolation, restart the cluster VM for the settings to take effect.
+After disabling User Isolation, restart the cluster VM for the settings to take effect.
Redis Server List
List of MongoDB server addresses. Enter each address in the format of IP address or domain name of the database server:port number, and separate the entered addresses with semicolons (;).
+List of Redis server addresses. Enter each address in the format of IP address or domain name of the database server:port number, and separate the entered addresses with semicolons (;).
192.168.0.1:7300;192.168.0.2:7301
Parameter |
Description @@ -14,7 +14,7 @@ | Database to which data is to be imported |
-mongodb + | DB_dds |
---|---|---|---|---|
Collection Name diff --git a/docs/dataartsstudio/umn/dataartsstudio_01_0115_0.html b/docs/dataartsstudio/umn/dataartsstudio_01_0115_0.html index 560cf616..98ca7d11 100644 --- a/docs/dataartsstudio/umn/dataartsstudio_01_0115_0.html +++ b/docs/dataartsstudio/umn/dataartsstudio_01_0115_0.html @@ -1,7 +1,7 @@Creating a DataArts Studio Basic Package-BackgroundOnly cloud platform account users with the DAYU Administrator or Tenant Administrator permissions can create DataArts Studio instances or DataArts Studio incremental packages. ![]() Users with the Tenant Administrator permissions can perform all operations except IAM user management. For security purposes, you are not advised to grant the Tenant Administrator permissions to IAM users. +BackgroundOnly a cloud platform account users with the DAYU Administrator or Tenant Administrator permissions can create DataArts Studio instances or DataArts Studio incremental packages.
![]() Users with the Tenant Administrator permissions can perform all operations except IAM user management. For security purposes, you are not advised to grant the Tenant Administrator permissions to IAM users. Developing a Python Script
|
Figure 1 describes the field mapping between DWS tables created by CDM and source tables. For example, if you use CDM to migrate the Oracle database to DWS, CDM automatically creates a table on DWS and maps the NUMBER(3,0) field of the Oracle database to the SMALLINT field of DWS.
- +A workspace member can be assigned the role of admin, developer, operator, or viewer. This topic describes the permissions of each role.
Accounts and users with the DAYU Administrator or Tenant Administrator role have all the permissions on cloud platform account, including permissions to createDataArts Studio instances and DataArts Studio incremental packages. By default, other users do not have the permissions to create DataArts Studio instances. If they want to create DataArts Studio instances, they must obtain the required permissions.
+Accounts and users with the DAYU Administrator or Tenant Administrator role have all the permissions on a cloud platform account, including permissions to createDataArts Studio instances and DataArts Studio incremental packages. By default, other users do not have the permissions to create DataArts Studio instances. If they want to create DataArts Studio instances, they must obtain the required permissions.
Accounts and users with the Tenant Administrator role can perform all operations except IAM user management. For security purposes, you are not advised to assign this role to IAM users. Exercise caution when performing this operation.
2023-04-30
+2024-03-01
+Update the following sections:
+ +2023-04-30
Update the following sections:
diff --git a/docs/dataartsstudio/umn/en-us_image_0000001322408176.png b/docs/dataartsstudio/umn/en-us_image_0000001322408176.png deleted file mode 100644 index 59dcc7f8..00000000 Binary files a/docs/dataartsstudio/umn/en-us_image_0000001322408176.png and /dev/null differ diff --git a/docs/dataartsstudio/umn/en-us_image_0000001694876605.png b/docs/dataartsstudio/umn/en-us_image_0000001694876605.png new file mode 100644 index 00000000..b935943e Binary files /dev/null and b/docs/dataartsstudio/umn/en-us_image_0000001694876605.png differ