diff --git a/docs/dli/umn/ALL_META.TXT.json b/docs/dli/umn/ALL_META.TXT.json index 2b5b0b17..8f4d0d36 100644 --- a/docs/dli/umn/ALL_META.TXT.json +++ b/docs/dli/umn/ALL_META.TXT.json @@ -72,9 +72,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -347,7 +346,7 @@ "node_id":"dli_01_0017.xml", "product_code":"dli", "code":"18", - "des":"SQL jobs allow you to execute SQL statements entered in the SQL job editing window, import data, and export data.SQL job management provides the following functions:Searc", + "des":"SQL jobs allow you to execute SQL statements in the SQL job editing window, import data, and export data.SQL job management provides the following functions:Searching for", "doc_type":"usermanual", "kw":"SQL Job Management,Job Management,User Guide", "search_title":"", @@ -530,7 +529,7 @@ "node_id":"dli_01_0462.xml", "product_code":"dli", "code":"27", - "des":"After creating a job, you can view the job details to learn about the following information:Viewing Job DetailsChecking the Job Monitoring InformationViewing the Task Lis", + "des":"After creating a job, you can view the job details to learn about the following information:Viewing Job DetailsChecking Job Monitoring InformationViewing the Task List of", "doc_type":"usermanual", "kw":"Flink Job Details,Flink Job Management,User Guide", "search_title":"", @@ -769,11 +768,29 @@ "title":"Deleting a Queue", "githuburl":"" }, + { + "uri":"dli_01_0565.html", + "node_id":"dli_01_0565.xml", + "product_code":"dli", + "code":"39", + "des":"You can create enterprise projects matching the organizational structure of your enterprises to centrally manage cloud resources across regions by project. Then you can c", + "doc_type":"usermanual", + "kw":"Allocating a Queue to an Enterprise Project,Queue Management,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"usermanual" + } + ], + "title":"Allocating a Queue to an Enterprise Project", + "githuburl":"" + }, { "uri":"dli_01_0443.html", "node_id":"dli_01_0443.xml", "product_code":"dli", - "code":"39", + "code":"40", "des":"If the CIDR block of the DLI queue conflicts with that of the user data source, you can change the CIDR block of the queue.If the queue whose CIDR block is to be modified", "doc_type":"usermanual", "kw":"Modifying the CIDR Block,Queue Management,User Guide", @@ -794,7 +811,7 @@ "uri":"dli_01_0487.html", "node_id":"dli_01_0487.xml", "product_code":"dli", - "code":"40", + "code":"41", "des":"Elastic scaling can be performed for a newly created queue only when there were jobs running in this queue.Queues with 16 CUs do not support scale-out or scale-in.Queues ", "doc_type":"usermanual", "kw":"Elastic Queue Scaling,Queue Management,User Guide", @@ -814,7 +831,7 @@ "uri":"dli_01_0488.html", "node_id":"dli_01_0488.xml", "product_code":"dli", - "code":"41", + "code":"42", "des":"When services are busy, you might need to use more compute resources to process services in a period. After this period, you do not require the same amount of resources. ", "doc_type":"usermanual", "kw":"Scheduling CU Changes,Queue Management,User Guide", @@ -834,7 +851,7 @@ "uri":"dli_01_0489.html", "node_id":"dli_01_0489.xml", "product_code":"dli", - "code":"42", + "code":"43", "des":"It can be used to test the connectivity between the DLI queue and the peer IP address specified by the user in common scenarios, or the connectivity between the DLI queue", "doc_type":"usermanual", "kw":"Testing Address Connectivity,Queue Management,User Guide", @@ -842,9 +859,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -855,7 +871,7 @@ "uri":"dli_01_0421.html", "node_id":"dli_01_0421.xml", "product_code":"dli", - "code":"43", + "code":"44", "des":"Once you have created an SMN topic, you can easily subscribe to it by going to the Topic Management > Topics page of the SMN console. You can choose to receive notificati", "doc_type":"usermanual", "kw":"Creating an SMN Topic,Queue Management,User Guide", @@ -875,7 +891,7 @@ "uri":"dli_01_0022.html", "node_id":"dli_01_0022.xml", "product_code":"dli", - "code":"44", + "code":"45", "des":"A tag is a key-value pair that you can customize to identify cloud resources. It helps you to classify and search for cloud resources. A tag consists of a tag key and a t", "doc_type":"usermanual", "kw":"Managing Queue Tags,Queue Management,User Guide", @@ -889,11 +905,350 @@ "title":"Managing Queue Tags", "githuburl":"" }, + { + "uri":"dli_01_0563.html", + "node_id":"dli_01_0563.xml", + "product_code":"dli", + "code":"46", + "des":"DLI allows you to set properties for queues.You can set Spark driver parameters to improve the scheduling efficiency of queues.This section describes how to set queue pro", + "doc_type":"usermanual", + "kw":"Setting Queue Properties,Queue Management,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"usermanual" + } + ], + "title":"Setting Queue Properties", + "githuburl":"" + }, + { + "uri":"dli_01_0508.html", + "node_id":"dli_01_0508.xml", + "product_code":"dli", + "code":"47", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"Elastic Resource Pool", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"Elastic Resource Pool", + "githuburl":"" + }, + { + "uri":"dli_01_0528.html", + "node_id":"dli_01_0528.xml", + "product_code":"dli", + "code":"48", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"Before You Start", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"Before You Start", + "githuburl":"" + }, + { + "uri":"dli_01_0504.html", + "node_id":"dli_01_0504.xml", + "product_code":"dli", + "code":"49", + "des":"An elastic resource pool provides compute resources (CPU and memory) for running DLI jobs. The unit is CU. One CU contains one CPU and 4 GB memory.You can create multiple", + "doc_type":"usermanual", + "kw":"Overview,Before You Start,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"Yes" + } + ], + "title":"Overview", + "githuburl":"" + }, + { + "uri":"dli_01_0515.html", + "node_id":"dli_01_0515.xml", + "product_code":"dli", + "code":"50", + "des":"This section walks you through the procedure of adding a queue to an elastic resource pool and binding an enhanced datasource connection to the elastic resource pool.Proc", + "doc_type":"usermanual", + "kw":"Creating an Elastic Resource Pool and Running a Job,Before You Start,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"Yes" + } + ], + "title":"Creating an Elastic Resource Pool and Running a Job", + "githuburl":"" + }, + { + "uri":"dli_01_0516.html", + "node_id":"dli_01_0516.xml", + "product_code":"dli", + "code":"51", + "des":"A company has multiple departments that perform data analysis in different periods during a day.Department A requires a large number of compute resources from 00:00 a.m. ", + "doc_type":"usermanual", + "kw":"Configuring Scaling Policies for Queues,Before You Start,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"Yes" + } + ], + "title":"Configuring Scaling Policies for Queues", + "githuburl":"" + }, + { + "uri":"dli_01_0529.html", + "node_id":"dli_01_0529.xml", + "product_code":"dli", + "code":"52", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"Regular Operations", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"Regular Operations", + "githuburl":"" + }, + { + "uri":"dli_01_0505.html", + "node_id":"dli_01_0505.xml", + "product_code":"dli", + "code":"53", + "des":"For details about the application scenarios of elastic resource pools, see the Overview. This section describes how to create an elastic resource pool.If you use an enhan", + "doc_type":"usermanual", + "kw":"Creating an Elastic Resource Pool,Regular Operations,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"Yes" + } + ], + "title":"Creating an Elastic Resource Pool", + "githuburl":"" + }, + { + "uri":"dli_01_0526.html", + "node_id":"dli_01_0526.xml", + "product_code":"dli", + "code":"54", + "des":"Administrators can assign permissions of different operation scopes to users for each elastic resource pool.The administrator and elastic resource pool owner have all per", + "doc_type":"usermanual", + "kw":"Managing Permissions,Regular Operations,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"Yes" + } + ], + "title":"Managing Permissions", + "githuburl":"" + }, + { + "uri":"dli_01_0509.html", + "node_id":"dli_01_0509.xml", + "product_code":"dli", + "code":"55", + "des":"You can add one or more queues to an elastic resource pool to run jobs. This section describes how to add a queue to an elastic resource pool.Automatic scaling of an elas", + "doc_type":"usermanual", + "kw":"Adding a Queue,Regular Operations,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"Yes" + } + ], + "title":"Adding a Queue", + "githuburl":"" + }, + { + "uri":"dli_01_0530.html", + "node_id":"dli_01_0530.xml", + "product_code":"dli", + "code":"56", + "des":"If you want a queue to use resources in an elastic resource pool, bind the queue to the pool.You can click Associate Queue on the Resource Pool page to bind a queue to an", + "doc_type":"usermanual", + "kw":"Binding a Queue,Regular Operations,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"Yes" + } + ], + "title":"Binding a Queue", + "githuburl":"" + }, + { + "uri":"dli_01_0506.html", + "node_id":"dli_01_0506.xml", + "product_code":"dli", + "code":"57", + "des":"Multiple queues can be added to an elastic resource pool. For details about how to add a queue, see Adding a Queue. You can configure the number of CUs you want based on ", + "doc_type":"usermanual", + "kw":"Managing Queues,Regular Operations,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"Yes" + } + ], + "title":"Managing Queues", + "githuburl":"" + }, + { + "uri":"dli_01_0507.html", + "node_id":"dli_01_0507.xml", + "product_code":"dli", + "code":"58", + "des":"CU settings are used to control the maximum and minimum CU ranges for elastic resource pools to avoid unlimited resource scaling.For example, an elastic resource pool has", + "doc_type":"usermanual", + "kw":"Setting CUs,Regular Operations,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"Yes" + } + ], + "title":"Setting CUs", + "githuburl":"" + }, + { + "uri":"dli_01_0524.html", + "node_id":"dli_01_0524.xml", + "product_code":"dli", + "code":"59", + "des":"If CUs of a yearly/monthly elastic resource pool cannot meet your service requirements, you can modify the CUs. In this case, you will be charged based on the number of C", + "doc_type":"usermanual", + "kw":"Modifying Specifications,Regular Operations,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"Yes" + } + ], + "title":"Modifying Specifications", + "githuburl":"" + }, + { + "uri":"dli_01_0525.html", + "node_id":"dli_01_0525.xml", + "product_code":"dli", + "code":"60", + "des":"A tag is a key-value pair that you can customize to identify cloud resources. It helps you to classify and search for cloud resources. A tag consists of a tag key and a t", + "doc_type":"usermanual", + "kw":"Managing Tags,Regular Operations,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"Yes" + } + ], + "title":"Managing Tags", + "githuburl":"" + }, + { + "uri":"dli_01_0532.html", + "node_id":"dli_01_0532.xml", + "product_code":"dli", + "code":"61", + "des":"If you added a queue to or deleted one from an elastic resource pool, or you scaled an added queue, the CU quantity of the elastic resource pool may be changed. You can v", + "doc_type":"usermanual", + "kw":"Viewing Scaling History,Regular Operations,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "IsMulti":"No", + "documenttype":"usermanual", + "IsBot":"Yes" + } + ], + "title":"Viewing Scaling History", + "githuburl":"" + }, + { + "uri":"dli_01_0566.html", + "node_id":"dli_01_0566.xml", + "product_code":"dli", + "code":"62", + "des":"You can create enterprise projects matching the organizational structure of your enterprises to centrally manage cloud resources across regions by project. Then you can c", + "doc_type":"usermanual", + "kw":"Allocating to an Enterprise Project,Regular Operations,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"usermanual" + } + ], + "title":"Allocating to an Enterprise Project", + "githuburl":"" + }, { "uri":"dli_01_0004.html", "node_id":"dli_01_0004.xml", "product_code":"dli", - "code":"45", + "code":"63", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Data Management", @@ -914,7 +1269,7 @@ "uri":"dli_01_0390.html", "node_id":"dli_01_0390.xml", "product_code":"dli", - "code":"46", + "code":"64", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Databases and Tables", @@ -935,7 +1290,7 @@ "uri":"dli_01_0228.html", "node_id":"dli_01_0228.xml", "product_code":"dli", - "code":"47", + "code":"65", "des":"DLI database and table management provide the following functions:Database Permission ManagementTable Permission ManagementCreating a Database or a TableDeleting a Databa", "doc_type":"usermanual", "kw":"Overview,Databases and Tables,User Guide", @@ -955,7 +1310,7 @@ "uri":"dli_01_0447.html", "node_id":"dli_01_0447.xml", "product_code":"dli", - "code":"48", + "code":"66", "des":"By setting permissions, you can assign varying database permissions to different users.The administrator and database owner have all permissions, which cannot be set or m", "doc_type":"usermanual", "kw":"Managing Database Permissions,Databases and Tables,User Guide", @@ -975,7 +1330,7 @@ "uri":"dli_01_0448.html", "node_id":"dli_01_0448.xml", "product_code":"dli", - "code":"49", + "code":"67", "des":"By setting permissions, you can assign varying table permissions to different users.The administrator and table owner have all permissions, which cannot be set or modifie", "doc_type":"usermanual", "kw":"Managing Table Permissions,Databases and Tables,User Guide", @@ -995,7 +1350,7 @@ "uri":"dli_01_0005.html", "node_id":"dli_01_0005.xml", "product_code":"dli", - "code":"50", + "code":"68", "des":"A database, built on the computer storage device, is a data warehouse where data is organized, stored, and managed based on its structure.The table is an important part o", "doc_type":"usermanual", "kw":"Creating a Database or a Table,Databases and Tables,User Guide", @@ -1015,7 +1370,7 @@ "uri":"dli_01_0011.html", "node_id":"dli_01_0011.xml", "product_code":"dli", - "code":"51", + "code":"69", "des":"You can delete unnecessary databases and tables based on actual conditions.You are not allowed to delete databases or tables that are being used for running jobs.The admi", "doc_type":"usermanual", "kw":"Deleting a Database or a Table,Databases and Tables,User Guide", @@ -1023,9 +1378,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -1036,7 +1390,7 @@ "uri":"dli_01_0376.html", "node_id":"dli_01_0376.xml", "product_code":"dli", - "code":"52", + "code":"70", "des":"During actual use, developers create databases and tables and submit them to test personnel for testing. After the test is complete, the databases and tables are transfer", "doc_type":"usermanual", "kw":"Modifying the Owners of Databases and Tables,Databases and Tables,User Guide", @@ -1044,9 +1398,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -1057,7 +1410,7 @@ "uri":"dli_01_0253.html", "node_id":"dli_01_0253.xml", "product_code":"dli", - "code":"53", + "code":"71", "des":"You can import data from OBS to a table created in DLI.Only one path can be specified during data import. The path cannot contain commas (,).To import data in CSV format ", "doc_type":"usermanual", "kw":"Importing Data to the Table,Databases and Tables,User Guide", @@ -1065,9 +1418,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -1078,7 +1430,7 @@ "uri":"dli_01_0010.html", "node_id":"dli_01_0010.xml", "product_code":"dli", - "code":"54", + "code":"72", "des":"You can export data from a DLI table to OBS. During the export, a folder is created in OBS or the content in the existing folder is overwritten.The exported file can be i", "doc_type":"usermanual", "kw":"Exporting Data from DLI to OBS,Databases and Tables,User Guide", @@ -1086,9 +1438,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -1099,7 +1450,7 @@ "uri":"dli_01_0008.html", "node_id":"dli_01_0008.xml", "product_code":"dli", - "code":"55", + "code":"73", "des":"Metadata is used to define data types. It describes information about the data, including the source, size, format, and other data features. In database fields, metadata ", "doc_type":"usermanual", "kw":"Viewing Metadata,Databases and Tables,User Guide", @@ -1107,9 +1458,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -1120,7 +1470,7 @@ "uri":"dli_01_0007.html", "node_id":"dli_01_0007.xml", "product_code":"dli", - "code":"56", + "code":"74", "des":"The Preview page displays the first 10 records in the table.You can preview data on either the Data Management page or the SQL Editor page.To preview data on the Data Man", "doc_type":"usermanual", "kw":"Previewing Data,Databases and Tables,User Guide", @@ -1128,9 +1478,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -1141,7 +1490,7 @@ "uri":"dli_01_0552.html", "node_id":"dli_01_0552.xml", "product_code":"dli", - "code":"57", + "code":"75", "des":"A tag is a key-value pair that you can customize to identify cloud resources. It helps you to classify and search for cloud resources. A tag consists of a tag key and a t", "doc_type":"usermanual", "kw":"Managing Tags,Databases and Tables,User Guide", @@ -1161,7 +1510,7 @@ "uri":"dli_01_0366.html", "node_id":"dli_01_0366.xml", "product_code":"dli", - "code":"58", + "code":"76", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Package Management", @@ -1182,7 +1531,7 @@ "uri":"dli_01_0407.html", "node_id":"dli_01_0407.xml", "product_code":"dli", - "code":"59", + "code":"77", "des":"Package management provides the following functions:Managing Package PermissionsCreating a PackageDeleting a PackageYou can delete program packages in batches.You can del", "doc_type":"usermanual", "kw":"Overview,Package Management,User Guide", @@ -1202,7 +1551,7 @@ "uri":"dli_01_0477.html", "node_id":"dli_01_0477.xml", "product_code":"dli", - "code":"60", + "code":"78", "des":"You can isolate package groups or packages allocated to different users by setting permissions to ensure data query performance.The administrator and the owner of a packa", "doc_type":"usermanual", "kw":"Managing Permissions on Packages and Package Groups,Package Management,User Guide", @@ -1222,7 +1571,7 @@ "uri":"dli_01_0367.html", "node_id":"dli_01_0367.xml", "product_code":"dli", - "code":"61", + "code":"79", "des":"DLI allows you to submit program packages in batches to the general-use queue for running.If you need to update a package, you can use the same package or file to upload ", "doc_type":"usermanual", "kw":"Creating a Package,Package Management,User Guide", @@ -1242,7 +1591,7 @@ "uri":"dli_01_0369.html", "node_id":"dli_01_0369.xml", "product_code":"dli", - "code":"62", + "code":"80", "des":"You can delete a package based on actual conditions.On the left of the management console, choose Data Management > Package Management.Click Delete in the Operation colum", "doc_type":"usermanual", "kw":"Deleting a Package,Package Management,User Guide", @@ -1263,7 +1612,7 @@ "uri":"dli_01_0478.html", "node_id":"dli_01_0478.xml", "product_code":"dli", - "code":"63", + "code":"81", "des":"To change the owner of a package, click More > Modify Owner in the Operation column of a package on the Package Management page.If the package has been grouped, you can m", "doc_type":"usermanual", "kw":"Modifying the Owner,Package Management,User Guide", @@ -1284,7 +1633,7 @@ "uri":"dli_01_0397.html", "node_id":"dli_01_0397.xml", "product_code":"dli", - "code":"64", + "code":"82", "des":"DLI built-in dependencies are provided by the platform by default. In case of conflicts, you do not need to upload them when packaging JAR packages of Spark or Flink Jar ", "doc_type":"usermanual", "kw":"Built-in Dependencies,Package Management,User Guide", @@ -1304,7 +1653,7 @@ "uri":"dli_01_0379.html", "node_id":"dli_01_0379.xml", "product_code":"dli", - "code":"65", + "code":"83", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Job Templates", @@ -1325,7 +1674,7 @@ "uri":"dli_01_0021.html", "node_id":"dli_01_0021.xml", "product_code":"dli", - "code":"66", + "code":"84", "des":"To facilitate SQL operation execution, DLI allows you to customize query templates or save the SQL statements in use as templates. After templates are saved, you do not n", "doc_type":"usermanual", "kw":"Managing SQL Templates,Job Templates,User Guide", @@ -1346,7 +1695,7 @@ "uri":"dli_01_0464.html", "node_id":"dli_01_0464.xml", "product_code":"dli", - "code":"67", + "code":"85", "des":"Flink templates include sample templates and custom templates. You can modify an existing sample template to meet the actual job logic requirements and save time for edit", "doc_type":"usermanual", "kw":"Managing Flink Templates,Job Templates,User Guide", @@ -1366,7 +1715,7 @@ "uri":"dli_01_0551.html", "node_id":"dli_01_0551.xml", "product_code":"dli", - "code":"68", + "code":"86", "des":"You can modify a sample template to meet the Spark job requirements, saving time for editing SQL statements.Currently, the cloud platform does not provide preset Spark te", "doc_type":"usermanual", "kw":"Managing Spark SQL Templates,Job Templates,User Guide", @@ -1387,7 +1736,7 @@ "uri":"dli_01_05110.html", "node_id":"dli_01_05110.xml", "product_code":"dli", - "code":"69", + "code":"87", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Appendix", @@ -1408,7 +1757,7 @@ "uri":"dli_01_05111.html", "node_id":"dli_01_05111.xml", "product_code":"dli", - "code":"70", + "code":"88", "des":"TPC-H is a test set developed by the Transaction Processing Performance Council (TPC) to simulate decision-making support applications. It is widely used in academia and ", "doc_type":"usermanual", "kw":"TPC-H Sample Data in the SQL Template,Appendix,User Guide", @@ -1428,7 +1777,7 @@ "uri":"dli_01_0426.html", "node_id":"dli_01_0426.xml", "product_code":"dli", - "code":"71", + "code":"89", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Enhanced Datasource Connections", @@ -1449,7 +1798,7 @@ "uri":"dli_01_0003.html", "node_id":"dli_01_0003.xml", "product_code":"dli", - "code":"72", + "code":"90", "des":"In cross-source data analysis scenarios, DLI needs to connect to external data sources. However, due to the different VPCs between the data source and DLI, the network ca", "doc_type":"usermanual", "kw":"Overview,Enhanced Datasource Connections,User Guide", @@ -1470,7 +1819,7 @@ "uri":"dli_01_0410.html", "node_id":"dli_01_0410.xml", "product_code":"dli", - "code":"73", + "code":"91", "des":"If DLI needs to access external data sources, you need to establish enhanced datasource connections to enable the network between DLI and the data sources, and then devel", "doc_type":"usermanual", "kw":"Cross-Source Analysis Development Methods,Enhanced Datasource Connections,User Guide", @@ -1491,7 +1840,7 @@ "uri":"dli_01_0006.html", "node_id":"dli_01_0006.xml", "product_code":"dli", - "code":"74", + "code":"92", "des":"Create an enhanced datasource connection for DLI to access, import, query, and analyze data of other data sources.For example, to connect DLI to the MRS, RDS, CSS, Kafka,", "doc_type":"usermanual", "kw":"Creating an Enhanced Datasource Connection,Enhanced Datasource Connections,User Guide", @@ -1512,7 +1861,7 @@ "uri":"dli_01_0553.html", "node_id":"dli_01_0553.xml", "product_code":"dli", - "code":"75", + "code":"93", "des":"Delete an enhanced datasource connection that is no longer used on the console.Log in to the DLI management console.In the left navigation pane, choose Datasource Connect", "doc_type":"usermanual", "kw":"Deleting an Enhanced Datasource Connection,Enhanced Datasource Connections,User Guide", @@ -1533,7 +1882,7 @@ "uri":"dli_01_0013.html", "node_id":"dli_01_0013.xml", "product_code":"dli", - "code":"76", + "code":"94", "des":"Host information is the mapping between host IP addresses and domain names. After you configure host information, jobs can only use the configured domain names to access ", "doc_type":"usermanual", "kw":"Modifying Host Information,Enhanced Datasource Connections,User Guide", @@ -1554,7 +1903,7 @@ "uri":"dli_01_0514.html", "node_id":"dli_01_0514.xml", "product_code":"dli", - "code":"77", + "code":"95", "des":"The CIDR block of the DLI queue that is bound with a datasource connection cannot overlap with that of the data source.The default queue cannot be bound with a connection", "doc_type":"usermanual", "kw":"Binding and Unbinding a Queue,Enhanced Datasource Connections,User Guide", @@ -1562,9 +1911,9 @@ "metedata":[ { "prodname":"dli", + "IsMulti":"Yes", "opensource":"true", "documenttype":"usermanual", - "IsMulti":"No", "IsBot":"Yes" } ], @@ -1575,7 +1924,7 @@ "uri":"dli_01_0014.html", "node_id":"dli_01_0014.xml", "product_code":"dli", - "code":"78", + "code":"96", "des":"A route is configured with the destination, next hop type, and next hop to determine where the network traffic is directed. Routes are classified into system routes and c", "doc_type":"usermanual", "kw":"Adding a Route,Enhanced Datasource Connections,User Guide", @@ -1596,7 +1945,7 @@ "uri":"dli_01_0556.html", "node_id":"dli_01_0556.xml", "product_code":"dli", - "code":"79", + "code":"97", "des":"Delete a route that is no longer used.A custom route table cannot be deleted if it is associated with a subnet.Log in to the DLI management console.In the left navigation", "doc_type":"usermanual", "kw":"Deleting a Route,Enhanced Datasource Connections,User Guide", @@ -1617,7 +1966,7 @@ "uri":"dli_01_0018.html", "node_id":"dli_01_0018.xml", "product_code":"dli", - "code":"80", + "code":"98", "des":"Enhanced connections support user authorization by project. After authorization, users in the project have the permission to perform operations on the enhanced connection", "doc_type":"usermanual", "kw":"Enhanced Connection Permission Management,Enhanced Datasource Connections,User Guide", @@ -1625,9 +1974,9 @@ "metedata":[ { "prodname":"dli", + "IsMulti":"Yes", "opensource":"true", "documenttype":"usermanual", - "IsMulti":"No", "IsBot":"Yes" } ], @@ -1638,7 +1987,7 @@ "uri":"dli_01_0019.html", "node_id":"dli_01_0019.xml", "product_code":"dli", - "code":"81", + "code":"99", "des":"A tag is a key-value pair customized by users and used to identify cloud resources. It helps users to classify and search for cloud resources. A tag consists of a tag key", "doc_type":"usermanual", "kw":"Enhanced Datasource Connection Tag Management,Enhanced Datasource Connections,User Guide", @@ -1659,7 +2008,7 @@ "uri":"dli_01_0422.html", "node_id":"dli_01_0422.xml", "product_code":"dli", - "code":"82", + "code":"100", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Datasource Authentication", @@ -1680,7 +2029,7 @@ "uri":"dli_01_0561.html", "node_id":"dli_01_0561.xml", "product_code":"dli", - "code":"83", + "code":"101", "des":"When analyzing across multiple sources, it is not recommended to configure authentication information directly in a job as it can lead to password leakage. Instead, you a", "doc_type":"usermanual", "kw":"Overview,Datasource Authentication,User Guide", @@ -1701,7 +2050,7 @@ "uri":"dli_01_0427.html", "node_id":"dli_01_0427.xml", "product_code":"dli", - "code":"84", + "code":"102", "des":"Create a CSS datasource authentication on the DLI console to store the authentication information of the CSS security cluster to DLI. This will allow you to access to the", "doc_type":"usermanual", "kw":"Creating a CSS Datasource Authentication,Datasource Authentication,User Guide", @@ -1722,7 +2071,7 @@ "uri":"dli_01_0558.html", "node_id":"dli_01_0558.xml", "product_code":"dli", - "code":"85", + "code":"103", "des":"Create a Kerberos datasource authentication on the DLI console to store the authentication information of the data source to DLI. This will allow you to access to the dat", "doc_type":"usermanual", "kw":"Creating a Kerberos Datasource Authentication,Datasource Authentication,User Guide", @@ -1743,7 +2092,7 @@ "uri":"dli_01_0560.html", "node_id":"dli_01_0560.xml", "product_code":"dli", - "code":"86", + "code":"104", "des":"Create a Kafka_SSL datasource authentication on the DLI console to store the Kafka authentication information to DLI. This will allow you to access to Kafka instances wit", "doc_type":"usermanual", "kw":"Creating a Kafka_SSL Datasource Authentication,Datasource Authentication,User Guide", @@ -1764,7 +2113,7 @@ "uri":"dli_01_0559.html", "node_id":"dli_01_0559.xml", "product_code":"dli", - "code":"87", + "code":"105", "des":"Create a password datasource authentication on the DLI console to store passwords of the GaussDB(DWS), RDS, DCS, and DDS data sources to DLI. This will allow you to acces", "doc_type":"usermanual", "kw":"Creating a Password Datasource Authentication,Datasource Authentication,User Guide", @@ -1785,7 +2134,7 @@ "uri":"dli_01_0480.html", "node_id":"dli_01_0480.xml", "product_code":"dli", - "code":"88", + "code":"106", "des":"Grant permissions on a datasource authentication to users so multiple user jobs can use the datasource authentication without affecting each other.The administrator and t", "doc_type":"usermanual", "kw":"Datasource Authentication Permission Management,Datasource Authentication,User Guide", @@ -1806,7 +2155,7 @@ "uri":"dli_01_0485.html", "node_id":"dli_01_0485.xml", "product_code":"dli", - "code":"89", + "code":"107", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Global Configuration", @@ -1827,7 +2176,7 @@ "uri":"dli_01_0476.html", "node_id":"dli_01_0476.xml", "product_code":"dli", - "code":"90", + "code":"108", "des":"DLI allows you to set variables that are frequently used during job development as global variables on the DLI management console. This avoids repeated definitions during", "doc_type":"usermanual", "kw":"Global Variables,Global Configuration,User Guide", @@ -1847,7 +2196,7 @@ "uri":"dli_01_0533.html", "node_id":"dli_01_0533.xml", "product_code":"dli", - "code":"91", + "code":"109", "des":"You can grant permissions on a global variable to users.The administrator and the global variable owner have all permissions. You do not need to set permissions for them,", "doc_type":"usermanual", "kw":"Permission Management for Global Variables,Global Configuration,User Guide", @@ -1863,32 +2212,11 @@ "title":"Permission Management for Global Variables", "githuburl":"" }, - { - "uri":"dli_01_0486.html", - "node_id":"dli_01_0486.xml", - "product_code":"dli", - "code":"92", - "des":"Only the tenant account or a subaccount of user group admin can authorize access.After entering the DLI management console, you are advised to set agency permissions to e", - "doc_type":"usermanual", - "kw":"Service Authorization,Global Configuration,User Guide", - "search_title":"", - "metedata":[ - { - "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" - } - ], - "title":"Service Authorization", - "githuburl":"" - }, { "uri":"dli_01_0408.html", "node_id":"dli_01_0408.xml", "product_code":"dli", - "code":"93", + "code":"110", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Permissions Management", @@ -1906,7 +2234,7 @@ "uri":"dli_01_0440.html", "node_id":"dli_01_0440.xml", "product_code":"dli", - "code":"94", + "code":"111", "des":"DLI has a comprehensive permission control mechanism and supports fine-grained authentication through Identity and Access Management (IAM). You can create policies in IAM", "doc_type":"usermanual", "kw":"Overview,Permissions Management,User Guide", @@ -1926,7 +2254,7 @@ "uri":"dli_01_0418.html", "node_id":"dli_01_0418.xml", "product_code":"dli", - "code":"95", + "code":"112", "des":"You can use Identity and Access Management (IAM) to implement fine-grained permissions control on DLI resources. For details, see Overview.If your cloud account does not ", "doc_type":"usermanual", "kw":"Creating an IAM User and Granting Permissions,Permissions Management,User Guide", @@ -1934,9 +2262,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -1947,7 +2274,7 @@ "uri":"dli_01_0451.html", "node_id":"dli_01_0451.xml", "product_code":"dli", - "code":"96", + "code":"113", "des":"Custom policies can be created as a supplement to the system policies of DLI. You can add actions to custom policies. For the actions supported for custom policies, see \"", "doc_type":"usermanual", "kw":"Creating a Custom Policy,Permissions Management,User Guide", @@ -1967,7 +2294,7 @@ "uri":"dli_01_0417.html", "node_id":"dli_01_0417.xml", "product_code":"dli", - "code":"97", + "code":"114", "des":"A resource is an object that exists within a service. You can select DLI resources by specifying their paths.", "doc_type":"usermanual", "kw":"DLI Resources,Permissions Management,User Guide", @@ -1987,7 +2314,7 @@ "uri":"dli_01_0475.html", "node_id":"dli_01_0475.xml", "product_code":"dli", - "code":"98", + "code":"115", "des":"Request conditions are useful in determining when a custom policy takes effect. A request condition consists of a condition key and operator. Condition keys are either gl", "doc_type":"usermanual", "kw":"DLI Request Conditions,Permissions Management,User Guide", @@ -2007,7 +2334,7 @@ "uri":"dli_01_0441.html", "node_id":"dli_01_0441.xml", "product_code":"dli", - "code":"99", + "code":"116", "des":"Table 1 lists the common operations supported by each system policy of DLI. Choose proper system policies according to this table. For details about the SQL statement per", "doc_type":"usermanual", "kw":"Common Operations Supported by DLI System Policy,Permissions Management,User Guide", @@ -2015,9 +2342,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -2028,7 +2354,7 @@ "uri":"dli_01_0513.html", "node_id":"dli_01_0513.xml", "product_code":"dli", - "code":"100", + "code":"117", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Other Common Operations", @@ -2049,7 +2375,7 @@ "uri":"dli_01_0420.html", "node_id":"dli_01_0420.xml", "product_code":"dli", - "code":"101", + "code":"118", "des":"On the DLI management console, you can import data stored on OBS to DLI tables from Data Management > Databases and Tables > Table Management and SQL Editor pages. For de", "doc_type":"usermanual", "kw":"Importing Data to a DLI Table,Other Common Operations,User Guide", @@ -2070,7 +2396,7 @@ "uri":"dli_01_0445.html", "node_id":"dli_01_0445.xml", "product_code":"dli", - "code":"102", + "code":"119", "des":"This section describes metrics reported by DLI to Cloud Eye as well as their namespaces and dimensions. You can use the management console or APIs provided by Cloud Eye t", "doc_type":"usermanual", "kw":"Viewing Monitoring Metrics,Other Common Operations,User Guide", @@ -2090,7 +2416,7 @@ "uri":"dli_01_0318.html", "node_id":"dli_01_0318.xml", "product_code":"dli", - "code":"103", + "code":"120", "des":"With CTS, you can record operations associated with DLI for later query, audit, and backtrack operations.", "doc_type":"usermanual", "kw":"DLI Operations That Can Be Recorded by CTS,Other Common Operations,User Guide", @@ -2110,28 +2436,27 @@ "uri":"dli_01_0550.html", "node_id":"dli_01_0550.xml", "product_code":"dli", - "code":"104", + "code":"121", "des":"A quota limits the quantity of a resource available to users, thereby preventing spikes in the usage of the resource.You can also request for an increased quota if your e", "doc_type":"usermanual", - "kw":"Quotas,Other Common Operations,User Guide", + "kw":"Quota Management,Other Common Operations,User Guide", "search_title":"", "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], - "title":"Quotas", + "title":"Quota Management", "githuburl":"" }, { "uri":"dli_01_0539.html", "node_id":"dli_01_0539.xml", "product_code":"dli", - "code":"105", + "code":"122", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"FAQ", @@ -2152,7 +2477,7 @@ "uri":"dli_03_0037.html", "node_id":"dli_03_0037.xml", "product_code":"dli", - "code":"106", + "code":"123", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Flink Jobs", @@ -2169,14 +2494,35 @@ "title":"Flink Jobs", "githuburl":"" }, + { + "uri":"dli_03_0137.html", + "node_id":"dli_03_0137.xml", + "product_code":"dli", + "code":"124", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"Usage", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"Usage", + "githuburl":"" + }, { "uri":"dli_03_0083.html", "node_id":"dli_03_0083.xml", "product_code":"dli", - "code":"107", + "code":"125", "des":"DLI Flink jobs support the following data formats:Avro, Avro_merge, BLOB, CSV, EMAIL, JSON, ORC, Parquet, and XML.DLI Flink jobs support data from the following data sour", "doc_type":"usermanual", - "kw":"What Data Formats and Data Sources Are Supported by DLI Flink Jobs?,Flink Jobs,User Guide", + "kw":"What Data Formats and Data Sources Are Supported by DLI Flink Jobs?,Usage,User Guide", "search_title":"", "metedata":[ { @@ -2194,10 +2540,10 @@ "uri":"dli_03_0139.html", "node_id":"dli_03_0139.xml", "product_code":"dli", - "code":"108", + "code":"126", "des":"A sub-user can view queues but cannot view Flink jobs. You can authorize the sub-user using DLI or IAM.Authorization on DLILog in to the DLI console using a tenant accoun", "doc_type":"usermanual", - "kw":"How Do I Authorize a Subuser to View Flink Jobs?,Flink Jobs,User Guide", + "kw":"How Do I Authorize a Subuser to View Flink Jobs?,Usage,User Guide", "search_title":"", "metedata":[ { @@ -2215,10 +2561,10 @@ "uri":"dli_03_0090.html", "node_id":"dli_03_0090.xml", "product_code":"dli", - "code":"109", + "code":"127", "des":"DLI Flink jobs are highly available. You can enable the automatic restart function to automatically restart your jobs after short-time faults of peripheral services are r", "doc_type":"usermanual", - "kw":"How Do I Set Auto Restart upon Exception for a Flink Job?,Flink Jobs,User Guide", + "kw":"How Do I Set Auto Restart upon Exception for a Flink Job?,Usage,User Guide", "search_title":"", "metedata":[ { @@ -2236,10 +2582,10 @@ "uri":"dli_03_0099.html", "node_id":"dli_03_0099.xml", "product_code":"dli", - "code":"110", + "code":"128", "des":"When you create a Flink SQL job or Flink Jar job, you can select Save Job Log on the job editing page to save job running logs to OBS.To set the OBS bucket for storing th", "doc_type":"usermanual", - "kw":"How Do I Save Flink Job Logs?,Flink Jobs,User Guide", + "kw":"How Do I Save Flink Job Logs?,Usage,User Guide", "search_title":"", "metedata":[ { @@ -2257,10 +2603,10 @@ "uri":"dli_03_0043.html", "node_id":"dli_03_0043.xml", "product_code":"dli", - "code":"111", + "code":"129", "des":"DLI can output Flink job results to DIS. You can view the results in DIS. For details, see \"Obtaining Data from DIS\" in Data Ingestion Service User Guide.DLI can output F", "doc_type":"usermanual", - "kw":"How Can I Check Flink Job Results?,Flink Jobs,User Guide", + "kw":"How Can I Check Flink Job Results?,Usage,User Guide", "search_title":"", "metedata":[ { @@ -2278,7 +2624,7 @@ "uri":"dli_03_0160.html", "node_id":"dli_03_0160.xml", "product_code":"dli", - "code":"112", + "code":"130", "des":"Choose Job Management > Flink Jobs. In the Operation column of the target job, choose More > Permissions. When a new user is authorized, No such user. userName:xxxx. is d", "doc_type":"usermanual", "kw":"Why Is Error \"No such user. userName:xxxx.\" Reported on the Flink Job Management Page When I Grant P", @@ -2299,7 +2645,7 @@ "uri":"dli_03_0180.html", "node_id":"dli_03_0180.xml", "product_code":"dli", - "code":"113", + "code":"131", "des":"Checkpoint was enabled when a Flink job is created, and the OBS bucket for storing checkpoints was specified. After a Flink job is manually stopped, no message is display", "doc_type":"usermanual", "kw":"How Do I Know Which Checkpoint the Flink Job I Stopped Will Be Restored to When I Start the Job Agai", @@ -2320,7 +2666,7 @@ "uri":"dli_03_0036.html", "node_id":"dli_03_0036.xml", "product_code":"dli", - "code":"114", + "code":"132", "des":"When you set running parameters of a DLI Flink job, you can enable Alarm Generation upon Job Exception to receive alarms when the job runs abnormally or is in arrears.If ", "doc_type":"usermanual", "kw":"Why Is a Message Displayed Indicating That the SMN Topic Does Not Exist When I Use the SMN Topic in ", @@ -2337,14 +2683,35 @@ "title":"Why Is a Message Displayed Indicating That the SMN Topic Does Not Exist When I Use the SMN Topic in DLI?", "githuburl":"" }, + { + "uri":"dli_03_0131.html", + "node_id":"dli_03_0131.xml", + "product_code":"dli", + "code":"133", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"Flink SQL", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"Flink SQL", + "githuburl":"" + }, { "uri":"dli_03_0130.html", "node_id":"dli_03_0130.xml", "product_code":"dli", - "code":"115", + "code":"134", "des":"The consumption capability of a Flink SQL job depends on the data source transmission, queue size, and job parameter settings. The peak consumption is 10 Mbit/s.", "doc_type":"usermanual", - "kw":"How Much Data Can Be Processed in a Day by a Flink SQL Job?,Flink Jobs,User Guide", + "kw":"How Much Data Can Be Processed in a Day by a Flink SQL Job?,Flink SQL,User Guide", "search_title":"", "metedata":[ { @@ -2362,7 +2729,7 @@ "uri":"dli_03_0061.html", "node_id":"dli_03_0061.xml", "product_code":"dli", - "code":"116", + "code":"135", "des":"The temp stream in Flink SQL is similar to a subquery. It is a logical stream used to simplify the SQL logic and does not generate data storage. Therefore, there is no ne", "doc_type":"usermanual", "kw":"Does Data in the Temporary Stream of Flink SQL Need to Be Cleared Periodically? How Do I Clear the D", @@ -2383,7 +2750,7 @@ "uri":"dli_03_0138.html", "node_id":"dli_03_0138.xml", "product_code":"dli", - "code":"117", + "code":"136", "des":"SymptomWhen you create a Flink SQL job and configure the parameters, you select an OBS bucket you have created. The system displays a message indicating that the OBS buck", "doc_type":"usermanual", "kw":"Why Is a Message Displayed Indicating That the OBS Bucket Is Not Authorized When I Select an OBS Buc", @@ -2404,10 +2771,10 @@ "uri":"dli_03_0089.html", "node_id":"dli_03_0089.xml", "product_code":"dli", - "code":"118", + "code":"137", "des":"When using a Flink SQL job, you need to create an OBS partition table for subsequent batch processing.In the following example, the day field is used as the partition fie", "doc_type":"usermanual", - "kw":"How Do I Create an OBS Partitioned Table for a Flink SQL Job?,Flink Jobs,User Guide", + "kw":"How Do I Create an OBS Partitioned Table for a Flink SQL Job?,Flink SQL,User Guide", "search_title":"", "metedata":[ { @@ -2425,10 +2792,10 @@ "uri":"dli_03_0075.html", "node_id":"dli_03_0075.xml", "product_code":"dli", - "code":"119", + "code":"138", "des":"In this example, the day field is used as the partition field with the parquet encoding format (only the parquet format is supported currently) to dump car_info data to O", "doc_type":"usermanual", - "kw":"How Do I Dump Data to OBS and Create an OBS Partitioned Table?,Flink Jobs,User Guide", + "kw":"How Do I Dump Data to OBS and Create an OBS Partitioned Table?,Flink SQL,User Guide", "search_title":"", "metedata":[ { @@ -2446,7 +2813,7 @@ "uri":"dli_03_0167.html", "node_id":"dli_03_0167.xml", "product_code":"dli", - "code":"120", + "code":"139", "des":"When I run the creation statement with an EL expression in the table name in a Flink SQL job, the following error message is displayed:DLI.0005: AnalysisException: t_user", "doc_type":"usermanual", "kw":"Why Is Error Message \"DLI.0005\" Displayed When I Use an EL Expression to Create a Table in a Flink S", @@ -2467,7 +2834,7 @@ "uri":"dli_03_0168.html", "node_id":"dli_03_0168.xml", "product_code":"dli", - "code":"121", + "code":"140", "des":"After data is written to OBS through the Flink job output stream, data cannot be queried from the DLI table created in the OBS file path.For example, use the following Fl", "doc_type":"usermanual", "kw":"Why Is No Data Queried in the DLI Table Created Using the OBS File Path When Data Is Written to OBS ", @@ -2488,7 +2855,7 @@ "uri":"dli_03_0174.html", "node_id":"dli_03_0174.xml", "product_code":"dli", - "code":"122", + "code":"141", "des":"After a Flink SQL job is submitted on DLI, the job fails to be executed. The following error information is displayed in the job log:connect to DIS failed java.lang.Illeg", "doc_type":"usermanual", "kw":"Why Does a Flink SQL Job Fails to Be Executed, and Is \"connect to DIS failed java.lang.IllegalArgume", @@ -2509,10 +2876,10 @@ "uri":"dli_03_0176.html", "node_id":"dli_03_0176.xml", "product_code":"dli", - "code":"123", + "code":"142", "des":"Semantic verification for a Flink SQL job (reading DIS data) fails. The following information is displayed when the job fails:Get dis channel xxxinfo failed. error info: ", "doc_type":"usermanual", - "kw":"Why Is Error \"Not authorized\" Reported When a Flink SQL Job Reads DIS Data?,Flink Jobs,User Guide", + "kw":"Why Is Error \"Not authorized\" Reported When a Flink SQL Job Reads DIS Data?,Flink SQL,User Guide", "search_title":"", "metedata":[ { @@ -2530,7 +2897,7 @@ "uri":"dli_03_0232.html", "node_id":"dli_03_0232.xml", "product_code":"dli", - "code":"124", + "code":"143", "des":"After a Flink SQL job consumed Kafka and sent data to the Elasticsearch cluster, the job was successfully executed, but no data is available.Possible causes are as follow", "doc_type":"usermanual", "kw":"Data Writing Fails After a Flink SQL Job Consumed Kafka and Sank Data to the Elasticsearch Cluster,F", @@ -2547,14 +2914,35 @@ "title":"Data Writing Fails After a Flink SQL Job Consumed Kafka and Sank Data to the Elasticsearch Cluster", "githuburl":"" }, + { + "uri":"dli_03_0132.html", + "node_id":"dli_03_0132.xml", + "product_code":"dli", + "code":"144", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"Flink Jar Jobs", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"Flink Jar Jobs", + "githuburl":"" + }, { "uri":"dli_03_0038.html", "node_id":"dli_03_0038.xml", "product_code":"dli", - "code":"125", + "code":"145", "des":"The procedure is as follows:Add the following code to the JAR file code of the Flink Jar job:// Configure the pom file on which the StreamExecutionEnvironment depends.\nSt", "doc_type":"usermanual", - "kw":"How Do I Configure Checkpoints for Flink Jar Jobs and Save the Checkpoints to OBS?,Flink Jobs,User G", + "kw":"How Do I Configure Checkpoints for Flink Jar Jobs and Save the Checkpoints to OBS?,Flink Jar Jobs,Us", "search_title":"", "metedata":[ { @@ -2572,7 +2960,7 @@ "uri":"dli_03_0044.html", "node_id":"dli_03_0044.xml", "product_code":"dli", - "code":"126", + "code":"146", "des":"Configuration files can be uploaded for user-defined jobs (JAR).Upload the configuration file to DLI through Package Management.In the Other Dependencies area of the Flin", "doc_type":"usermanual", "kw":"Does a Flink JAR Job Support Configuration File Upload? How Do I Upload a Configuration File?,Flink ", @@ -2593,10 +2981,10 @@ "uri":"dli_03_0119.html", "node_id":"dli_03_0119.xml", "product_code":"dli", - "code":"127", + "code":"147", "des":"The dependency of your Flink job conflicts with a built-in dependency of the DLI Flink platform. As a result, the job submission fails.Delete your JAR file that is the sa", "doc_type":"usermanual", - "kw":"Why Does the Submission Fail Due to Flink JAR File Conflict?,Flink Jobs,User Guide", + "kw":"Why Does the Submission Fail Due to Flink JAR File Conflict?,Flink Jar Jobs,User Guide", "search_title":"", "metedata":[ { @@ -2614,7 +3002,7 @@ "uri":"dli_03_0161.html", "node_id":"dli_03_0161.xml", "product_code":"dli", - "code":"128", + "code":"148", "des":"When a Flink Jar job is submitted to access GaussDB(DWS), an error message is displayed indicating that the job fails to be started. The job log contains the following er", "doc_type":"usermanual", "kw":"Why Does a Flink Jar Job Fail to Access GaussDB(DWS) and a Message Is Displayed Indicating Too Many ", @@ -2635,10 +3023,10 @@ "uri":"dli_03_0165.html", "node_id":"dli_03_0165.xml", "product_code":"dli", - "code":"129", + "code":"149", "des":"An exception occurred when a Flink Jar job is running. The following error information is displayed in the job log:org.apache.flink.shaded.curator.org.apache.curator.Conn", "doc_type":"usermanual", - "kw":"Why Is Error Message \"Authentication failed\" Displayed During Flink Jar Job Running?,Flink Jobs,User", + "kw":"Why Is Error Message \"Authentication failed\" Displayed During Flink Jar Job Running?,Flink Jar Jobs,", "search_title":"", "metedata":[ { @@ -2656,10 +3044,10 @@ "uri":"dli_03_0233.html", "node_id":"dli_03_0233.xml", "product_code":"dli", - "code":"130", + "code":"150", "des":"The storage path of the Flink Jar job checkpoints was set to an OBS bucket. The job failed to be submitted, and an error message indicating an invalid OBS bucket name was", "doc_type":"usermanual", - "kw":"Why Is Error Invalid OBS Bucket Name Reported After a Flink Job Submission Failed?,Flink Jobs,User G", + "kw":"Why Is Error Invalid OBS Bucket Name Reported After a Flink Job Submission Failed?,Flink Jar Jobs,Us", "search_title":"", "metedata":[ { @@ -2677,10 +3065,10 @@ "uri":"dli_03_0234.html", "node_id":"dli_03_0234.xml", "product_code":"dli", - "code":"131", + "code":"151", "des":"Flink Job submission failed. The exception information is as follows:Flink JAR files conflicted. The submitted Flink JAR file conflicted with the HDFS JAR file of the DLI", "doc_type":"usermanual", - "kw":"Why Does the Flink Submission Fail Due to Hadoop JAR File Conflict?,Flink Jobs,User Guide", + "kw":"Why Does the Flink Submission Fail Due to Hadoop JAR File Conflict?,Flink Jar Jobs,User Guide", "search_title":"", "metedata":[ { @@ -2698,10 +3086,10 @@ "uri":"dli_03_0266.html", "node_id":"dli_03_0266.xml", "product_code":"dli", - "code":"132", + "code":"152", "des":"You can use Flink Jar to connect to Kafka with SASL SSL authentication enabled.", "doc_type":"usermanual", - "kw":"How Do I Connect a Flink jar Job to SASL_SSL?,Flink Jobs,User Guide", + "kw":"How Do I Connect a Flink jar Job to SASL_SSL?,Flink Jar Jobs,User Guide", "search_title":"", "metedata":[ { @@ -2715,14 +3103,35 @@ "title":"How Do I Connect a Flink jar Job to SASL_SSL?", "githuburl":"" }, + { + "uri":"dli_03_0133.html", + "node_id":"dli_03_0133.xml", + "product_code":"dli", + "code":"153", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"Performance Tuning", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"Performance Tuning", + "githuburl":"" + }, { "uri":"dli_03_0106.html", "node_id":"dli_03_0106.xml", "product_code":"dli", - "code":"133", + "code":"154", "des":"Data Stacking in a Consumer GroupThe accumulated data of a consumer group can be calculated by the following formula: Total amount of data to be consumed by the consumer ", "doc_type":"usermanual", - "kw":"How Do I Optimize Performance of a Flink Job?,Flink Jobs,User Guide", + "kw":"How Do I Optimize Performance of a Flink Job?,Performance Tuning,User Guide", "search_title":"", "metedata":[ { @@ -2740,10 +3149,10 @@ "uri":"dli_03_0048.html", "node_id":"dli_03_0048.xml", "product_code":"dli", - "code":"134", + "code":"155", "des":"Add the following SQL statements to the Flink job:", "doc_type":"usermanual", - "kw":"How Do I Write Data to Different Elasticsearch Clusters in a Flink Job?,Flink Jobs,User Guide", + "kw":"How Do I Write Data to Different Elasticsearch Clusters in a Flink Job?,Performance Tuning,User Guid", "search_title":"", "metedata":[ { @@ -2761,10 +3170,10 @@ "uri":"dli_03_0096.html", "node_id":"dli_03_0096.xml", "product_code":"dli", - "code":"135", + "code":"156", "des":"The DLI Flink checkpoint/savepoint mechanism is complete and reliable. You can use this mechanism to prevent data loss when a job is manually restarted or restarted due t", "doc_type":"usermanual", - "kw":"How Do I Prevent Data Loss After Flink Job Restart?,Flink Jobs,User Guide", + "kw":"How Do I Prevent Data Loss After Flink Job Restart?,Performance Tuning,User Guide", "search_title":"", "metedata":[ { @@ -2778,14 +3187,35 @@ "title":"How Do I Prevent Data Loss After Flink Job Restart?", "githuburl":"" }, + { + "uri":"dli_03_0135.html", + "node_id":"dli_03_0135.xml", + "product_code":"dli", + "code":"157", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"O&M Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"O&M Guide", + "githuburl":"" + }, { "uri":"dli_03_0103.html", "node_id":"dli_03_0103.xml", "product_code":"dli", - "code":"136", + "code":"158", "des":"On the Flink job management page, hover the cursor on the status of the job that fails to be submitted to view the brief information about the failure.The possible causes", "doc_type":"usermanual", - "kw":"How Do I Locate a Flink Job Submission Error?,Flink Jobs,User Guide", + "kw":"How Do I Locate a Flink Job Submission Error?,O&M Guide,User Guide", "search_title":"", "metedata":[ { @@ -2803,10 +3233,10 @@ "uri":"dli_03_0105.html", "node_id":"dli_03_0105.xml", "product_code":"dli", - "code":"137", + "code":"159", "des":"On the Flink job management, click Edit in the Operation column of the target job. On the displayed page, check whether Save Job Log in the Running Parameters tab is enab", "doc_type":"usermanual", - "kw":"How Do I Locate a Flink Job Running Error?,Flink Jobs,User Guide", + "kw":"How Do I Locate a Flink Job Running Error?,O&M Guide,User Guide", "search_title":"", "metedata":[ { @@ -2824,10 +3254,10 @@ "uri":"dli_03_0136.html", "node_id":"dli_03_0136.xml", "product_code":"dli", - "code":"138", + "code":"160", "des":"Flink's checkpointing is a fault tolerance and recovery mechanism. This mechanism ensures that real-time programs can self-recover in case of exceptions or machine issues", "doc_type":"usermanual", - "kw":"How Can I Check if a Flink Job Can Be Restored From a Checkpoint After Restarting It?,Flink Jobs,Use", + "kw":"How Can I Check if a Flink Job Can Be Restored From a Checkpoint After Restarting It?,O&M Guide,User", "search_title":"", "metedata":[ { @@ -2845,10 +3275,10 @@ "uri":"dli_03_0040.html", "node_id":"dli_03_0040.xml", "product_code":"dli", - "code":"139", + "code":"161", "des":"To rectify this fault, perform the following steps:Log in to the DIS management console. In the navigation pane, choose Stream Management. View the Flink job SQL statemen", "doc_type":"usermanual", - "kw":"Why Does DIS Stream Not Exist During Job Semantic Check?,Flink Jobs,User Guide", + "kw":"Why Does DIS Stream Not Exist During Job Semantic Check?,O&M Guide,User Guide", "search_title":"", "metedata":[ { @@ -2866,10 +3296,10 @@ "uri":"dli_03_0045.html", "node_id":"dli_03_0045.xml", "product_code":"dli", - "code":"140", + "code":"162", "des":"If the OBS bucket selected for a job is not authorized, perform the following steps:Select Enable Checkpointing or Save Job Log.Specify OBS Bucket.Select Authorize OBS.", "doc_type":"usermanual", - "kw":"Why Is the OBS Bucket Selected for Job Not Authorized?,Flink Jobs,User Guide", + "kw":"Why Is the OBS Bucket Selected for Job Not Authorized?,O&M Guide,User Guide", "search_title":"", "metedata":[ { @@ -2887,7 +3317,7 @@ "uri":"dli_03_0064.html", "node_id":"dli_03_0064.xml", "product_code":"dli", - "code":"141", + "code":"163", "des":"Mode for storing generated job logs when a DLI Flink job fails to be submitted or executed. The options are as follows:If the submission fails, a submission log is genera", "doc_type":"usermanual", "kw":"Why Are Logs Not Written to the OBS Bucket After a DLI Flink Job Fails to Be Submitted for Running?,", @@ -2908,10 +3338,10 @@ "uri":"dli_03_0235.html", "node_id":"dli_03_0235.xml", "product_code":"dli", - "code":"142", + "code":"164", "des":"The Flink/Spark UI was displayed with incomplete information.When the queue is used to run a job, the system releases the cluster and takes about 10 minutes to create a n", "doc_type":"usermanual", - "kw":"Why Is Information Displayed on the FlinkUI/Spark UI Page Incomplete?,Flink Jobs,User Guide", + "kw":"Why Is Information Displayed on the FlinkUI/Spark UI Page Incomplete?,O&M Guide,User Guide", "search_title":"", "metedata":[ { @@ -2929,10 +3359,10 @@ "uri":"dli_03_0236.html", "node_id":"dli_03_0236.xml", "product_code":"dli", - "code":"143", + "code":"165", "des":"JobManager and TaskManager heartbeats timed out. As a result, the Flink job is abnormal.Check whether the network is intermittently disconnected and whether the cluster l", "doc_type":"usermanual", - "kw":"Why Is the Flink Job Abnormal Due to Heartbeat Timeout Between JobManager and TaskManager?,Flink Job", + "kw":"Why Is the Flink Job Abnormal Due to Heartbeat Timeout Between JobManager and TaskManager?,O&M Guide", "search_title":"", "metedata":[ { @@ -2950,7 +3380,7 @@ "uri":"dli_03_0265.html", "node_id":"dli_03_0265.xml", "product_code":"dli", - "code":"144", + "code":"166", "des":"Test address connectivity.If the network is unreachable, rectify the network connection first. Ensure that the network connection between the DLI queue and the external d", "doc_type":"usermanual", "kw":"Why Is Error \"Timeout expired while fetching topic metadata\" Repeatedly Reported in Flink JobManager", @@ -2971,7 +3401,7 @@ "uri":"dli_03_0020.html", "node_id":"dli_03_0020.xml", "product_code":"dli", - "code":"145", + "code":"167", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Problems Related to SQL Jobs", @@ -2988,14 +3418,35 @@ "title":"Problems Related to SQL Jobs", "githuburl":"" }, + { + "uri":"dli_03_0216.html", + "node_id":"dli_03_0216.xml", + "product_code":"dli", + "code":"168", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"Usage", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"Usage", + "githuburl":"" + }, { "uri":"dli_03_0200.html", "node_id":"dli_03_0200.xml", "product_code":"dli", - "code":"146", + "code":"169", "des":"A temporary table is used to store intermediate results. When a transaction or session ends, the data in the temporary table can be automatically deleted. For example, in", "doc_type":"usermanual", - "kw":"SQL Jobs,Problems Related to SQL Jobs,User Guide", + "kw":"SQL Jobs,Usage,User Guide", "search_title":"", "metedata":[ { @@ -3009,14 +3460,35 @@ "title":"SQL Jobs", "githuburl":"" }, + { + "uri":"dli_03_0204.html", + "node_id":"dli_03_0204.xml", + "product_code":"dli", + "code":"170", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"Job Development", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"Job Development", + "githuburl":"" + }, { "uri":"dli_03_0086.html", "node_id":"dli_03_0086.xml", "product_code":"dli", - "code":"147", + "code":"171", "des":"If a large number of small files are generated during SQL execution, job execution and table query will take a long time. In this case, you should merge small files.Set t", "doc_type":"usermanual", - "kw":"How Do I Merge Small Files?,Problems Related to SQL Jobs,User Guide", + "kw":"How Do I Merge Small Files?,Job Development,User Guide", "search_title":"", "metedata":[ { @@ -3034,10 +3506,10 @@ "uri":"dli_03_0092.html", "node_id":"dli_03_0092.xml", "product_code":"dli", - "code":"148", + "code":"172", "des":"When creating an OBS table, you must specify a table path in the database. The path format is as follows: obs://xxx/database name/table name.If the specified path is akdc", "doc_type":"usermanual", - "kw":"How Do I Specify an OBS Path When Creating an OBS Table?,Problems Related to SQL Jobs,User Guide", + "kw":"How Do I Specify an OBS Path When Creating an OBS Table?,Job Development,User Guide", "search_title":"", "metedata":[ { @@ -3055,10 +3527,10 @@ "uri":"dli_03_0108.html", "node_id":"dli_03_0108.xml", "product_code":"dli", - "code":"149", + "code":"173", "des":"DLI allows you to associate JSON data in an OBS bucket to create tables in asynchronous mode.The statement for creating the table is as follows:", "doc_type":"usermanual", - "kw":"How Do I Create a Table Using JSON Data in an OBS Bucket?,Problems Related to SQL Jobs,User Guide", + "kw":"How Do I Create a Table Using JSON Data in an OBS Bucket?,Job Development,User Guide", "search_title":"", "metedata":[ { @@ -3076,10 +3548,10 @@ "uri":"dli_03_0087.html", "node_id":"dli_03_0087.xml", "product_code":"dli", - "code":"150", + "code":"174", "des":"You can use the where condition statement in the select statement to filter data. For example:", "doc_type":"usermanual", - "kw":"How Do I Set Local Variables in SQL Statements?,Problems Related to SQL Jobs,User Guide", + "kw":"How Do I Set Local Variables in SQL Statements?,Job Development,User Guide", "search_title":"", "metedata":[ { @@ -3097,10 +3569,10 @@ "uri":"dli_03_0069.html", "node_id":"dli_03_0069.xml", "product_code":"dli", - "code":"151", + "code":"175", "des":"The correct method for using the count function to perform aggregation is as follows:OrIf an incorrect method is used, an error will be reported.", "doc_type":"usermanual", - "kw":"How Can I Use the count Function to Perform Aggregation?,Problems Related to SQL Jobs,User Guide", + "kw":"How Can I Use the count Function to Perform Aggregation?,Job Development,User Guide", "search_title":"", "metedata":[ { @@ -3118,10 +3590,10 @@ "uri":"dli_03_0072.html", "node_id":"dli_03_0072.xml", "product_code":"dli", - "code":"152", + "code":"176", "des":"You can use the cross-region replication function of OBS. The procedure is as follows:Export the DLI table data in region 1 to the user-defined OBS bucket.Use the OBS cro", "doc_type":"usermanual", - "kw":"How Do I Synchronize DLI Table Data from One Region to Another?,Problems Related to SQL Jobs,User Gu", + "kw":"How Do I Synchronize DLI Table Data from One Region to Another?,Job Development,User Guide", "search_title":"", "metedata":[ { @@ -3139,10 +3611,10 @@ "uri":"dli_03_0191.html", "node_id":"dli_03_0191.xml", "product_code":"dli", - "code":"153", + "code":"177", "des":"Currently, DLI does not allow you to insert table data into specific fields. To insert table data, you must insert data of all table fields at a time.", "doc_type":"usermanual", - "kw":"How Do I Insert Table Data into Specific Fields of a Table Using a SQL Job?,Problems Related to SQL ", + "kw":"How Do I Insert Table Data into Specific Fields of a Table Using a SQL Job?,Job Development,User Gui", "search_title":"", "metedata":[ { @@ -3156,14 +3628,35 @@ "title":"How Do I Insert Table Data into Specific Fields of a Table Using a SQL Job?", "githuburl":"" }, + { + "uri":"dli_03_0206.html", + "node_id":"dli_03_0206.xml", + "product_code":"dli", + "code":"178", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"Job O&M Errors", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"Job O&M Errors", + "githuburl":"" + }, { "uri":"dli_03_0014.html", "node_id":"dli_03_0014.xml", "product_code":"dli", - "code":"154", + "code":"179", "des":"Create an OBS directory with a unique name. Alternatively, you can manually delete the existing OBS directory and submit the job again. However, exercise caution when del", "doc_type":"usermanual", - "kw":"Why Is Error \"path obs://xxx already exists\" Reported When Data Is Exported to OBS?,Problems Related", + "kw":"Why Is Error \"path obs://xxx already exists\" Reported When Data Is Exported to OBS?,Job O&M Errors,U", "search_title":"", "metedata":[ { @@ -3181,7 +3674,7 @@ "uri":"dli_03_0066.html", "node_id":"dli_03_0066.xml", "product_code":"dli", - "code":"155", + "code":"180", "des":"This message indicates that the two tables to be joined contain the same column, but the owner of the column is not specified when the command is executed.For example, ta", "doc_type":"usermanual", "kw":"Why Is Error \"SQL_ANALYSIS_ERROR: Reference 't.id' is ambiguous, could be: t.id, t.id.;\" Displayed W", @@ -3202,7 +3695,7 @@ "uri":"dli_03_0071.html", "node_id":"dli_03_0071.xml", "product_code":"dli", - "code":"156", + "code":"181", "des":"Check if your account is in arrears and top it up if necessary.If the same error message persists after the top-up, log out of your account and log back in.", "doc_type":"usermanual", "kw":"Why Is Error \"The current account does not have permission to perform this operation,the current acc", @@ -3223,7 +3716,7 @@ "uri":"dli_03_0145.html", "node_id":"dli_03_0145.xml", "product_code":"dli", - "code":"157", + "code":"182", "des":"Cause AnalysisWhen you query the partitioned table XX.YYY, the partition column is not specified in the search criteria.A partitioned table can be queried only when the q", "doc_type":"usermanual", "kw":"Why Is Error \"There should be at least one partition pruning predicate on partitioned table XX.YYY\" ", @@ -3244,7 +3737,7 @@ "uri":"dli_03_0169.html", "node_id":"dli_03_0169.xml", "product_code":"dli", - "code":"158", + "code":"183", "des":"The following error message is displayed when the LOAD DATA command is executed by a Spark SQL job to import data to a DLI table:In some cases ,the following error messag", "doc_type":"usermanual", "kw":"Why Is Error \"IllegalArgumentException: Buffer size too small. size\" Reported When Data Is Loaded to", @@ -3265,10 +3758,10 @@ "uri":"dli_03_0189.html", "node_id":"dli_03_0189.xml", "product_code":"dli", - "code":"159", + "code":"184", "des":"An error is reported during SQL job execution:Please contact DLI service. DLI.0002: FileNotFoundException: getFileStatus on obs://xxx: status [404]Check whether there is ", "doc_type":"usermanual", - "kw":"Why Is Error \"DLI.0002 FileNotFoundException\" Reported During SQL Job Running?,Problems Related to S", + "kw":"Why Is Error \"DLI.0002 FileNotFoundException\" Reported During SQL Job Running?,Job O&M Errors,User G", "search_title":"", "metedata":[ { @@ -3286,10 +3779,10 @@ "uri":"dli_03_0046.html", "node_id":"dli_03_0046.xml", "product_code":"dli", - "code":"160", + "code":"185", "des":"Currently, DLI supports the Hive syntax for creating tables of the TEXTFILE, SEQUENCEFILE, RCFILE, ORC, AVRO, and PARQUET file types. If the file format specified for cre", "doc_type":"usermanual", - "kw":"Why Is a Schema Parsing Error Reported When I Create a Hive Table Using CTAS?,Problems Related to SQ", + "kw":"Why Is a Schema Parsing Error Reported When I Create a Hive Table Using CTAS?,Job O&M Errors,User Gu", "search_title":"", "metedata":[ { @@ -3307,7 +3800,7 @@ "uri":"dli_03_0173.html", "node_id":"dli_03_0173.xml", "product_code":"dli", - "code":"161", + "code":"186", "des":"When you run a DLI SQL script on DataArts Studio, the log shows that the statements fail to be executed. The error information is as follows:DLI.0999: RuntimeException: o", "doc_type":"usermanual", "kw":"Why Is Error \"org.apache.hadoop.fs.obs.OBSIOException\" Reported When I Run DLI SQL Scripts on DataAr", @@ -3328,7 +3821,7 @@ "uri":"dli_03_0172.html", "node_id":"dli_03_0172.xml", "product_code":"dli", - "code":"162", + "code":"187", "des":"After the migration job is submitted, the following error information is displayed in the log:org.apache.sqoop.common.SqoopException:UQUERY_CONNECTOR_0001:Invoke DLI serv", "doc_type":"usermanual", "kw":"Why Is Error \"UQUERY_CONNECTOR_0001:Invoke DLI service api failed\" Reported in the Job Log When I Us", @@ -3349,10 +3842,10 @@ "uri":"dli_03_0207.html", "node_id":"dli_03_0207.xml", "product_code":"dli", - "code":"163", + "code":"188", "des":"Error message \"File not Found\" is displayed when a SQL job is accessed.Generally, the file cannot be found due to a read/write conflict. Check whether a job is overwritin", "doc_type":"usermanual", - "kw":"Why Is Error \"File not Found\" Reported When I Access a SQL Job?,Problems Related to SQL Jobs,User Gu", + "kw":"Why Is Error \"File not Found\" Reported When I Access a SQL Job?,Job O&M Errors,User Guide", "search_title":"", "metedata":[ { @@ -3370,10 +3863,10 @@ "uri":"dli_03_0208.html", "node_id":"dli_03_0208.xml", "product_code":"dli", - "code":"164", + "code":"189", "des":"Error message \"DLI.0003: AccessControlException XXX\" is reported when a SQL job is accessed.Check the OBS bucket written in the AccessControlException to confirm if your ", "doc_type":"usermanual", - "kw":"Why Is Error \"DLI.0003: AccessControlException XXX\" Reported When I Access a SQL Job?,Problems Relat", + "kw":"Why Is Error \"DLI.0003: AccessControlException XXX\" Reported When I Access a SQL Job?,Job O&M Errors", "search_title":"", "metedata":[ { @@ -3391,7 +3884,7 @@ "uri":"dli_03_0209.html", "node_id":"dli_03_0209.xml", "product_code":"dli", - "code":"165", + "code":"190", "des":"Error message \"DLI.0001: org.apache.hadoop.security.AccessControlException: verifyBucketExists on {{bucket name}}: status [403]\" is reported when a SQL job is Accessed.Yo", "doc_type":"usermanual", "kw":"Why Is Error \"DLI.0001: org.apache.hadoop.security.AccessControlException: verifyBucketExists on {{b", @@ -3412,7 +3905,7 @@ "uri":"dli_03_0210.html", "node_id":"dli_03_0210.xml", "product_code":"dli", - "code":"166", + "code":"191", "des":"Error message \"The current account does not have permission to perform this operation,the current account was restricted.\" is reported during SQL statement execution.Chec", "doc_type":"usermanual", "kw":"Why Is Error \"The current account does not have permission to perform this operation,the current acc", @@ -3429,14 +3922,35 @@ "title":"Why Is Error \"The current account does not have permission to perform this operation,the current account was restricted. Restricted for no budget\" Reported During SQL Statement Execution? Restricted for no budget.", "githuburl":"" }, + { + "uri":"dli_03_0211.html", + "node_id":"dli_03_0211.xml", + "product_code":"dli", + "code":"192", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"O&M Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"O&M Guide", + "githuburl":"" + }, { "uri":"dli_03_0196.html", "node_id":"dli_03_0196.xml", "product_code":"dli", - "code":"167", + "code":"193", "des":"If the job runs slowly, perform the following steps to find the causes and rectify the fault:Check whether the problem is caused by FullGC.Log in to the DLI console. In t", "doc_type":"usermanual", - "kw":"How Do I Troubleshoot Slow SQL Jobs?,Problems Related to SQL Jobs,User Guide", + "kw":"How Do I Troubleshoot Slow SQL Jobs?,O&M Guide,User Guide", "search_title":"", "metedata":[ { @@ -3454,10 +3968,10 @@ "uri":"dli_03_0091.html", "node_id":"dli_03_0091.xml", "product_code":"dli", - "code":"168", + "code":"194", "des":"You can view SQL job logs for routine O&M.Obtain the ID of the DLI job executed on the DataArts Studio console.Job IDOn the DLI console, choose Job Management > SQL Jobs.", "doc_type":"usermanual", - "kw":"How Do I View DLI SQL Logs?,Problems Related to SQL Jobs,User Guide", + "kw":"How Do I View DLI SQL Logs?,O&M Guide,User Guide", "search_title":"", "metedata":[ { @@ -3475,10 +3989,10 @@ "uri":"dli_03_0116.html", "node_id":"dli_03_0116.xml", "product_code":"dli", - "code":"169", + "code":"195", "des":"You can view the job execution records when a job is running.Log in to the DLI management console.In the navigation pane on the left, choose Job Management > SQL Jobs.Ent", "doc_type":"usermanual", - "kw":"How Do I View SQL Execution Records?,Problems Related to SQL Jobs,User Guide", + "kw":"How Do I View SQL Execution Records?,O&M Guide,User Guide", "search_title":"", "metedata":[ { @@ -3496,10 +4010,10 @@ "uri":"dli_03_0093.html", "node_id":"dli_03_0093.xml", "product_code":"dli", - "code":"170", + "code":"196", "des":"If the execution of an SQL statement takes a long time, you need to access the Spark UI to check the execution status.If data skew occurs, the running time of a stage exc", "doc_type":"usermanual", - "kw":"How Do I Eliminate Data Skew by Configuring AE Parameters?,Problems Related to SQL Jobs,User Guide", + "kw":"How Do I Eliminate Data Skew by Configuring AE Parameters?,O&M Guide,User Guide", "search_title":"", "metedata":[ { @@ -3517,10 +4031,10 @@ "uri":"dli_03_0184.html", "node_id":"dli_03_0184.xml", "product_code":"dli", - "code":"171", + "code":"197", "des":"A DLI table exists but cannot be queried on the DLI console.If a table exists but cannot be queried, there is a high probability that the current user does not have the p", "doc_type":"usermanual", - "kw":"What Can I Do If a Table Cannot Be Queried on the DLI Console?,Problems Related to SQL Jobs,User Gui", + "kw":"What Can I Do If a Table Cannot Be Queried on the DLI Console?,O&M Guide,User Guide", "search_title":"", "metedata":[ { @@ -3538,10 +4052,10 @@ "uri":"dli_03_0013.html", "node_id":"dli_03_0013.xml", "product_code":"dli", - "code":"172", + "code":"198", "des":"A high compression ratio of OBS tables in the Parquet or ORC format (for example, a compression ratio of 5 or higher compared with text compression) will lead to large da", "doc_type":"usermanual", - "kw":"The Compression Ratio of OBS Tables Is Too High,Problems Related to SQL Jobs,User Guide", + "kw":"The Compression Ratio of OBS Tables Is Too High,O&M Guide,User Guide", "search_title":"", "metedata":[ { @@ -3559,10 +4073,10 @@ "uri":"dli_03_0009.html", "node_id":"dli_03_0009.xml", "product_code":"dli", - "code":"173", + "code":"199", "des":"DLI supports only UTF-8-encoded texts. Ensure that data is encoded using UTF-8 during table creation and import.", "doc_type":"usermanual", - "kw":"How Can I Avoid Garbled Characters Caused by Inconsistent Character Codes?,Problems Related to SQL J", + "kw":"How Can I Avoid Garbled Characters Caused by Inconsistent Character Codes?,O&M Guide,User Guide", "search_title":"", "metedata":[ { @@ -3580,7 +4094,7 @@ "uri":"dli_03_0175.html", "node_id":"dli_03_0175.xml", "product_code":"dli", - "code":"174", + "code":"200", "des":"User A created the testTable table in a database through a SQL job and granted user B the permission to insert and delete table data. User A deleted the testTable table a", "doc_type":"usermanual", "kw":"Do I Need to Grant Table Permissions to a User and Project After I Delete a Table and Create One wit", @@ -3601,7 +4115,7 @@ "uri":"dli_03_0177.html", "node_id":"dli_03_0177.xml", "product_code":"dli", - "code":"175", + "code":"201", "des":"A CSV file is imported to a DLI partitioned table, but the imported file data does not contain the data in the partitioning column. The partitioning column needs to be sp", "doc_type":"usermanual", "kw":"Why Can't I Query Table Data After Data Is Imported to a DLI Partitioned Table Because the File to B", @@ -3622,7 +4136,7 @@ "uri":"dli_03_0181.html", "node_id":"dli_03_0181.xml", "product_code":"dli", - "code":"176", + "code":"202", "des":"When an OBS foreign table is created, a field in the specified OBS file contains a carriage return line feed (CRLF) character. As a result, the data is incorrect.The stat", "doc_type":"usermanual", "kw":"How Do I Fix the Data Error Caused by CRLF Characters in a Field of the OBS File Used to Create an E", @@ -3643,10 +4157,10 @@ "uri":"dli_03_0182.html", "node_id":"dli_03_0182.xml", "product_code":"dli", - "code":"177", + "code":"203", "des":"A SQL job contains join operations. After the job is submitted, it is stuck in the Running state and no result is returned.When a Spark SQL job has join operations on sma", "doc_type":"usermanual", - "kw":"Why Does a SQL Job That Has Join Operations Stay in the Running State?,Problems Related to SQL Jobs,", + "kw":"Why Does a SQL Job That Has Join Operations Stay in the Running State?,O&M Guide,User Guide", "search_title":"", "metedata":[ { @@ -3664,7 +4178,7 @@ "uri":"dli_03_0187.html", "node_id":"dli_03_0187.xml", "product_code":"dli", - "code":"178", + "code":"204", "des":"The on clause was not added to the SQL statement for joining tables. As a result, the Cartesian product query occurs due to multi-table association, and the queue resourc", "doc_type":"usermanual", "kw":"The on Clause Is Not Added When Tables Are Joined. Cartesian Product Query Causes High Resource Usag", @@ -3685,10 +4199,10 @@ "uri":"dli_03_0190.html", "node_id":"dli_03_0190.xml", "product_code":"dli", - "code":"179", + "code":"205", "des":"Partition data is manually uploaded to a partition of an OBS table. However, the data cannot be queried using DLI SQL editor.After manually adding partition data, you nee", "doc_type":"usermanual", - "kw":"Why Can't I Query Data After I Manually Add Data to the Partition Directory of an OBS Table?,Problem", + "kw":"Why Can't I Query Data After I Manually Add Data to the Partition Directory of an OBS Table?,O&M Gui", "search_title":"", "metedata":[ { @@ -3706,10 +4220,10 @@ "uri":"dli_03_0212.html", "node_id":"dli_03_0212.xml", "product_code":"dli", - "code":"180", + "code":"206", "des":"To dynamically overwrite the specified partitioned data in the DataSource table, set dli.sql.dynamicPartitionOverwrite.enabled to true and then run the insert overwrite s", "doc_type":"usermanual", - "kw":"Why Is All Data Overwritten When insert overwrite Is Used to Overwrite Partitioned Table?,Problems R", + "kw":"Why Is All Data Overwritten When insert overwrite Is Used to Overwrite Partitioned Table?,O&M Guide,", "search_title":"", "metedata":[ { @@ -3727,10 +4241,10 @@ "uri":"dli_03_0213.html", "node_id":"dli_03_0213.xml", "product_code":"dli", - "code":"181", + "code":"207", "des":"The possible causes and solutions are as follows:After you purchase a DLI queue and submit a SQL job for the first time, wait for 5 to 10 minutes. After the cluster is st", "doc_type":"usermanual", - "kw":"Why Is a SQL Job Stuck in the Submitting State?,Problems Related to SQL Jobs,User Guide", + "kw":"Why Is a SQL Job Stuck in the Submitting State?,O&M Guide,User Guide", "search_title":"", "metedata":[ { @@ -3748,10 +4262,10 @@ "uri":"dli_03_0214.html", "node_id":"dli_03_0214.xml", "product_code":"dli", - "code":"182", + "code":"208", "des":"Spark does not have the datetime type and uses the TIMESTAMP type instead.You can use a function to convert data types.The following is an example.select cast(create_date", "doc_type":"usermanual", - "kw":"Why Is the create_date Field in the RDS Table Is a Timestamp in the DLI query result?,Problems Relat", + "kw":"Why Is the create_date Field in the RDS Table Is a Timestamp in the DLI query result?,O&M Guide,User", "search_title":"", "metedata":[ { @@ -3769,10 +4283,10 @@ "uri":"dli_03_0215.html", "node_id":"dli_03_0215.xml", "product_code":"dli", - "code":"183", + "code":"209", "des":"If the table name is changed immediately after SQL statements are executed, the data size of the table may be incorrect.If you need to change the table name, change it 5 ", "doc_type":"usermanual", - "kw":"What Can I Do If datasize Cannot Be Changed After the Table Name Is Changed in a Finished SQL Job?,P", + "kw":"What Can I Do If datasize Cannot Be Changed After the Table Name Is Changed in a Finished SQL Job?,O", "search_title":"", "metedata":[ { @@ -3790,10 +4304,10 @@ "uri":"dli_03_0231.html", "node_id":"dli_03_0231.xml", "product_code":"dli", - "code":"184", + "code":"210", "des":"When DLI is used to insert data into an OBS temporary table, only part of data is imported.Possible causes are as follows:The amount of data read during job execution is ", "doc_type":"usermanual", - "kw":"Why Is the Data Volume Changes When Data Is Imported from DLI to OBS?,Problems Related to SQL Jobs,U", + "kw":"Why Is the Data Volume Changes When Data Is Imported from DLI to OBS?,O&M Guide,User Guide", "search_title":"", "metedata":[ { @@ -3811,7 +4325,7 @@ "uri":"dli_03_0021.html", "node_id":"dli_03_0021.xml", "product_code":"dli", - "code":"185", + "code":"211", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Problems Related to Spark Jobs", @@ -3828,14 +4342,35 @@ "title":"Problems Related to Spark Jobs", "githuburl":"" }, + { + "uri":"dli_03_0163.html", + "node_id":"dli_03_0163.xml", + "product_code":"dli", + "code":"212", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"Usage", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"Usage", + "githuburl":"" + }, { "uri":"dli_03_0201.html", "node_id":"dli_03_0201.xml", "product_code":"dli", - "code":"186", + "code":"213", "des":"DLI Spark does not support job scheduling. You can use other services, such as DataArts Studio, or use APIs or SDKs to customize job schedule.The Spark SQL syntax does no", "doc_type":"usermanual", - "kw":"Spark Jobs,Problems Related to Spark Jobs,User Guide", + "kw":"Spark Jobs,Usage,User Guide", "search_title":"", "metedata":[ { @@ -3849,14 +4384,35 @@ "title":"Spark Jobs", "githuburl":"" }, + { + "uri":"dli_03_0217.html", + "node_id":"dli_03_0217.xml", + "product_code":"dli", + "code":"214", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"Job Development", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"Job Development", + "githuburl":"" + }, { "uri":"dli_03_0107.html", "node_id":"dli_03_0107.xml", "product_code":"dli", - "code":"187", + "code":"215", "des":"To use Spark to write data into a DLI table, configure the following parameters:fs.obs.access.keyfs.obs.secret.keyfs.obs.implfs.obs.endpointThe following is an example:", "doc_type":"usermanual", - "kw":"How Do I Use Spark to Write Data into a DLI Table?,Problems Related to Spark Jobs,User Guide", + "kw":"How Do I Use Spark to Write Data into a DLI Table?,Job Development,User Guide", "search_title":"", "metedata":[ { @@ -3874,10 +4430,10 @@ "uri":"dli_03_0017.html", "node_id":"dli_03_0017.xml", "product_code":"dli", - "code":"188", + "code":"216", "des":"Hard-coded or plaintext AK and SK pose significant security risks. To ensure security, encrypt your AK and SK, store them in configuration files or environment variables,", "doc_type":"usermanual", - "kw":"How Do I Set the AK/SK for a Queue to Operate an OBS Table?,Problems Related to Spark Jobs,User Guid", + "kw":"How Do I Set the AK/SK for a Queue to Operate an OBS Table?,Job Development,User Guide", "search_title":"", "metedata":[ { @@ -3895,10 +4451,10 @@ "uri":"dli_03_0102.html", "node_id":"dli_03_0102.xml", "product_code":"dli", - "code":"189", + "code":"217", "des":"Log in to the DLI console. In the navigation pane, choose Job Management > Spark Jobs. In the job list, locate the target job and click next to Job ID to view the parame", "doc_type":"usermanual", - "kw":"How Do I View the Resource Usage of DLI Spark Jobs?,Problems Related to Spark Jobs,User Guide", + "kw":"How Do I View the Resource Usage of DLI Spark Jobs?,Job Development,User Guide", "search_title":"", "metedata":[ { @@ -3916,7 +4472,7 @@ "uri":"dli_03_0076.html", "node_id":"dli_03_0076.xml", "product_code":"dli", - "code":"190", + "code":"218", "des":"If the pymysql module is missing, check whether the corresponding EGG package exists. If the package does not exist, upload the pyFile package on the Package Management p", "doc_type":"usermanual", "kw":"How Do I Use Python Scripts to Access the MySQL Database If the pymysql Module Is Missing from the S", @@ -3937,10 +4493,10 @@ "uri":"dli_03_0082.html", "node_id":"dli_03_0082.xml", "product_code":"dli", - "code":"191", + "code":"219", "des":"DLI natively supports PySpark.For most cases, Python is preferred for data analysis, and PySpark is the best choice for big data analysis. Generally, JVM programs are pac", "doc_type":"usermanual", - "kw":"How Do I Run a Complex PySpark Program in DLI?,Problems Related to Spark Jobs,User Guide", + "kw":"How Do I Run a Complex PySpark Program in DLI?,Job Development,User Guide", "search_title":"", "metedata":[ { @@ -3958,10 +4514,10 @@ "uri":"dli_03_0127.html", "node_id":"dli_03_0127.xml", "product_code":"dli", - "code":"192", + "code":"220", "des":"You can use DLI Spark jobs to access data in the MySQL database using either of the following methods:Solution 1: Buy a queue, create an enhanced datasource connection, a", "doc_type":"usermanual", - "kw":"How Does a Spark Job Access a MySQL Database?,Problems Related to Spark Jobs,User Guide", + "kw":"How Does a Spark Job Access a MySQL Database?,Job Development,User Guide", "search_title":"", "metedata":[ { @@ -3979,7 +4535,7 @@ "uri":"dli_03_0068.html", "node_id":"dli_03_0068.xml", "product_code":"dli", - "code":"193", + "code":"221", "des":"When shuffle statements, such as GROUP BY and JOIN, are executed in Spark jobs, data skew occurs, which slows down the job execution.To solve this problem, you can config", "doc_type":"usermanual", "kw":"How Do I Use JDBC to Set the spark.sql.shuffle.partitions Parameter to Improve the Task Concurrency?", @@ -4000,10 +4556,10 @@ "uri":"dli_03_0118.html", "node_id":"dli_03_0118.xml", "product_code":"dli", - "code":"194", + "code":"222", "des":"You can use SparkFiles to read the file submitted using –-file form a local path: SparkFiles.get(\"Name of the uploaded file\").The file path in the Driver is different fro", "doc_type":"usermanual", - "kw":"How Do I Read Uploaded Files for a Spark Jar Job?,Problems Related to Spark Jobs,User Guide", + "kw":"How Do I Read Uploaded Files for a Spark Jar Job?,Job Development,User Guide", "search_title":"", "metedata":[ { @@ -4017,11 +4573,32 @@ "title":"How Do I Read Uploaded Files for a Spark Jar Job?", "githuburl":"" }, + { + "uri":"dli_03_0218.html", + "node_id":"dli_03_0218.xml", + "product_code":"dli", + "code":"223", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"Job O&M Errors", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"Job O&M Errors", + "githuburl":"" + }, { "uri":"dli_03_0156.html", "node_id":"dli_03_0156.xml", "product_code":"dli", - "code":"195", + "code":"224", "des":"The following error is reported when a Spark job accesses OBS data:Set the AK/SK to enable Spark jobs to access OBS data. For details, see How Do I Set the AK/SK for a Qu", "doc_type":"usermanual", "kw":"Why Are Errors \"ResponseCode: 403\" and \"ResponseStatus: Forbidden\" Reported When a Spark Job Accesse", @@ -4042,7 +4619,7 @@ "uri":"dli_03_0164.html", "node_id":"dli_03_0164.xml", "product_code":"dli", - "code":"196", + "code":"225", "des":"Check whether the OBS bucket is used to store DLI logs on the Global Configuration > Job Configurations page. The job log bucket cannot be used for other purpose.", "doc_type":"usermanual", "kw":"Why Is Error \"verifyBucketExists on XXXX: status [403]\" Reported When I Use a Spark Job to Access an", @@ -4063,10 +4640,10 @@ "uri":"dli_03_0157.html", "node_id":"dli_03_0157.xml", "product_code":"dli", - "code":"197", + "code":"226", "des":"When a Spark job accesses a large amount of data, for example, accessing data in a GaussDB(DWS) database, you are advised to set the number of concurrent tasks and enable", "doc_type":"usermanual", - "kw":"Why Is a Job Running Timeout Reported When a Spark Job Runs a Large Amount of Data?,Problems Related", + "kw":"Why Is a Job Running Timeout Reported When a Spark Job Runs a Large Amount of Data?,Job O&M Errors,U", "search_title":"", "metedata":[ { @@ -4084,7 +4661,7 @@ "uri":"dli_03_0188.html", "node_id":"dli_03_0188.xml", "product_code":"dli", - "code":"198", + "code":"227", "des":"Spark jobs cannot access SFTP. Upload the files you want to access to OBS and then you can analyze the data using Spark jobs.", "doc_type":"usermanual", "kw":"Why Does the Job Fail to Be Executed and the Log Shows that the File Directory Is Abnormal When I Us", @@ -4105,10 +4682,10 @@ "uri":"dli_03_0192.html", "node_id":"dli_03_0192.xml", "product_code":"dli", - "code":"199", + "code":"228", "des":"When a Spark job is running, an error message is displayed, indicating that the user does not have the database permission. The error information is as follows:org.apache", "doc_type":"usermanual", - "kw":"Why Does the Job Fail to Be Executed Due to Insufficient Database and Table Permissions?,Problems Re", + "kw":"Why Does the Job Fail to Be Executed Due to Insufficient Database and Table Permissions?,Job O&M Err", "search_title":"", "metedata":[ { @@ -4122,14 +4699,35 @@ "title":"Why Does the Job Fail to Be Executed Due to Insufficient Database and Table Permissions?", "githuburl":"" }, + { + "uri":"dli_03_0219.html", + "node_id":"dli_03_0219.xml", + "product_code":"dli", + "code":"229", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"O&M Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"O&M Guide", + "githuburl":"" + }, { "uri":"dli_03_0077.html", "node_id":"dli_03_0077.xml", "product_code":"dli", - "code":"200", + "code":"230", "des":"I cannot find the specified Python environment after adding the Python 3 package.Set spark.yarn.appMasterEnv.PYSPARK_PYTHON to python3 in the conf file to specify the Pyt", "doc_type":"usermanual", - "kw":"Why Can't I Find the Specified Python Environment After Adding the Python Package?,Problems Related ", + "kw":"Why Can't I Find the Specified Python Environment After Adding the Python Package?,O&M Guide,User Gu", "search_title":"", "metedata":[ { @@ -4147,10 +4745,10 @@ "uri":"dli_03_0220.html", "node_id":"dli_03_0220.xml", "product_code":"dli", - "code":"201", + "code":"231", "des":"The remaining CUs in the queue may be insufficient. As a result, the job cannot be submitted.To view the remaining CUs of a queue, perform the following steps:Check the C", "doc_type":"usermanual", - "kw":"Why Is a Spark Jar Job Stuck in the Submitting State?,Problems Related to Spark Jobs,User Guide", + "kw":"Why Is a Spark Jar Job Stuck in the Submitting State?,O&M Guide,User Guide", "search_title":"", "metedata":[ { @@ -4168,7 +4766,7 @@ "uri":"dli_03_0001.html", "node_id":"dli_03_0001.xml", "product_code":"dli", - "code":"202", + "code":"232", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Product Consultation", @@ -4185,14 +4783,35 @@ "title":"Product Consultation", "githuburl":"" }, + { + "uri":"dli_03_0221.html", + "node_id":"dli_03_0221.xml", + "product_code":"dli", + "code":"233", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"Usage", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"Usage", + "githuburl":"" + }, { "uri":"dli_03_0002.html", "node_id":"dli_03_0002.xml", "product_code":"dli", - "code":"203", + "code":"234", "des":"Data Lake Insight (DLI) is a serverless data processing and analysis service fully compatible with Apache Spark and Apache Flink ecosystems. It frees you from managing an", "doc_type":"usermanual", - "kw":"What Is DLI?,Product Consultation,User Guide", + "kw":"What Is DLI?,Usage,User Guide", "search_title":"", "metedata":[ { @@ -4210,10 +4829,10 @@ "uri":"dli_03_0025.html", "node_id":"dli_03_0025.xml", "product_code":"dli", - "code":"204", + "code":"235", "des":"DLI supports the following data formats:ParquetCSVORCJsonAvro", "doc_type":"usermanual", - "kw":"Which Data Formats Does DLI Support?,Product Consultation,User Guide", + "kw":"Which Data Formats Does DLI Support?,Usage,User Guide", "search_title":"", "metedata":[ { @@ -4231,10 +4850,10 @@ "uri":"dli_03_0115.html", "node_id":"dli_03_0115.xml", "product_code":"dli", - "code":"205", + "code":"236", "des":"The Spark component of DLI is a fully managed service. You can only use the DLI Spark through its APIs. .The Spark component of MRS is built on the VM in an MRS cluster. ", "doc_type":"usermanual", - "kw":"What Are the Differences Between MRS Spark and DLI Spark?,Product Consultation,User Guide", + "kw":"What Are the Differences Between MRS Spark and DLI Spark?,Usage,User Guide", "search_title":"", "metedata":[ { @@ -4252,10 +4871,10 @@ "uri":"dli_03_0029.html", "node_id":"dli_03_0029.xml", "product_code":"dli", - "code":"206", + "code":"237", "des":"DLI data can be stored in either of the following:OBS: Data used by SQL jobs, Spark jobs, and Flink jobs can be stored in OBS, reducing storage costs.DLI: The column-base", "doc_type":"usermanual", - "kw":"Where Can DLI Data Be Stored?,Product Consultation,User Guide", + "kw":"Where Can DLI Data Be Stored?,Usage,User Guide", "search_title":"", "metedata":[ { @@ -4273,10 +4892,10 @@ "uri":"dli_03_0117.html", "node_id":"dli_03_0117.xml", "product_code":"dli", - "code":"207", + "code":"238", "des":"DLI tables store data within the DLI service, and you do not need to know the data storage path.OBS tables store data in your OBS buckets, and you need to manage the sour", "doc_type":"usermanual", - "kw":"What Are the Differences Between DLI Tables and OBS Tables?,Product Consultation,User Guide", + "kw":"What Are the Differences Between DLI Tables and OBS Tables?,Usage,User Guide", "search_title":"", "metedata":[ { @@ -4294,10 +4913,10 @@ "uri":"dli_03_0010.html", "node_id":"dli_03_0010.xml", "product_code":"dli", - "code":"208", + "code":"239", "des":"Currently, DLI supports analysis only on the data uploaded to the cloud. In scenarios where regular (for example, on a per day basis) one-off analysis on incremental data", "doc_type":"usermanual", - "kw":"How Can I Use DLI If Data Is Not Uploaded to OBS?,Product Consultation,User Guide", + "kw":"How Can I Use DLI If Data Is Not Uploaded to OBS?,Usage,User Guide", "search_title":"", "metedata":[ { @@ -4315,10 +4934,10 @@ "uri":"dli_03_0129.html", "node_id":"dli_03_0129.xml", "product_code":"dli", - "code":"209", + "code":"240", "des":"Data in the OBS bucket shared by IAM users under the same account can be imported. You cannot import data in the OBS bucket shared with other IAM account.", "doc_type":"usermanual", - "kw":"Can I Import OBS Bucket Data Shared by Other Tenants into DLI?,Product Consultation,User Guide", + "kw":"Can I Import OBS Bucket Data Shared by Other Tenants into DLI?,Usage,User Guide", "search_title":"", "metedata":[ { @@ -4336,7 +4955,7 @@ "uri":"dli_03_0264.html", "node_id":"dli_03_0264.xml", "product_code":"dli", - "code":"210", + "code":"241", "des":"Log in to the management console.Click in the upper left corner and select a region and a project.Click the My Quota icon in the upper right corner of the page.The Serv", "doc_type":"usermanual", "kw":"Why Is Error \"Failed to create the database. {\"error_code\":\"DLI.1028\";\"error_msg\":\"Already reached t", @@ -4357,10 +4976,10 @@ "uri":"dli_03_0263.html", "node_id":"dli_03_0263.xml", "product_code":"dli", - "code":"211", + "code":"242", "des":"No, a global variable can only be used by the user who created it. Global variables can be used to simplify complex parameters. For example, long and difficult variables ", "doc_type":"usermanual", - "kw":"Can a Member Account Use Global Variables Created by Other Member Accounts?,Product Consultation,Use", + "kw":"Can a Member Account Use Global Variables Created by Other Member Accounts?,Usage,User Guide", "search_title":"", "metedata":[ { @@ -4374,14 +4993,35 @@ "title":"Can a Member Account Use Global Variables Created by Other Member Accounts?", "githuburl":"" }, + { + "uri":"dli_03_0222.html", + "node_id":"dli_03_0222.xml", + "product_code":"dli", + "code":"243", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"Job Management", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"Job Management", + "githuburl":"" + }, { "uri":"dli_03_0126.html", "node_id":"dli_03_0126.xml", "product_code":"dli", - "code":"212", + "code":"244", "des":"If you are suggested to perform following operations to run a large number of DLI jobs:Group the DLI jobs by type, and run each group on a queue.Alternatively, create IAM", "doc_type":"usermanual", - "kw":"How Do I Manage Tens of Thousands of Jobs Running on DLI?,Product Consultation,User Guide", + "kw":"How Do I Manage Tens of Thousands of Jobs Running on DLI?,Job Management,User Guide", "search_title":"", "metedata":[ { @@ -4399,10 +5039,10 @@ "uri":"dli_03_0162.html", "node_id":"dli_03_0162.xml", "product_code":"dli", - "code":"213", + "code":"245", "des":"The field names of tables that have been created cannot be changed.You can create a table, define new table fields, and migrate data from the old table to the new one.", "doc_type":"usermanual", - "kw":"How Do I Change the Name of a Field in a Created Table?,Product Consultation,User Guide", + "kw":"How Do I Change the Name of a Field in a Created Table?,Job Management,User Guide", "search_title":"", "metedata":[ { @@ -4416,14 +5056,35 @@ "title":"How Do I Change the Name of a Field in a Created Table?", "githuburl":"" }, + { + "uri":"dli_03_0261.html", + "node_id":"dli_03_0261.xml", + "product_code":"dli", + "code":"246", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"Privacy and Security", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"Privacy and Security", + "githuburl":"" + }, { "uri":"dli_03_0260.html", "node_id":"dli_03_0260.xml", "product_code":"dli", - "code":"214", + "code":"247", "des":"No. The spark.acls.enable configuration item is not used in DLI. The Apache Spark command injection vulnerability (CVE-2022-33891) does not exist in DLI.", "doc_type":"usermanual", - "kw":"Does DLI Have the Apache Spark Command Injection Vulnerability (CVE-2022-33891)?,Product Consultatio", + "kw":"Does DLI Have the Apache Spark Command Injection Vulnerability (CVE-2022-33891)?,Privacy and Securit", "search_title":"", "metedata":[ { @@ -4441,7 +5102,7 @@ "uri":"dli_03_0053.html", "node_id":"dli_03_0053.xml", "product_code":"dli", - "code":"215", + "code":"248", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Quota", @@ -4462,7 +5123,7 @@ "uri":"dli_03_0031.html", "node_id":"dli_03_0031.xml", "product_code":"dli", - "code":"216", + "code":"249", "des":"Log in to the management console.Click in the upper left corner and select Region and Project.Click (the My Quotas icon) in the upper right corner.The Service Quota pag", "doc_type":"usermanual", "kw":"How Do I View My Quotas?,Quota,User Guide", @@ -4483,7 +5144,7 @@ "uri":"dli_03_0032.html", "node_id":"dli_03_0032.xml", "product_code":"dli", - "code":"217", + "code":"250", "des":"The system does not support online quota adjustment. To increase a resource quota, dial the hotline or send an email to the customer service. We will process your applica", "doc_type":"usermanual", "kw":"How Do I Increase a Quota?,Quota,User Guide", @@ -4504,7 +5165,7 @@ "uri":"dli_03_0054.html", "node_id":"dli_03_0054.xml", "product_code":"dli", - "code":"218", + "code":"251", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Permission", @@ -4521,14 +5182,35 @@ "title":"Permission", "githuburl":"" }, + { + "uri":"dli_03_0223.html", + "node_id":"dli_03_0223.xml", + "product_code":"dli", + "code":"252", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"Usage", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"Usage", + "githuburl":"" + }, { "uri":"dli_03_0100.html", "node_id":"dli_03_0100.xml", "product_code":"dli", - "code":"219", + "code":"253", "des":"DLI has a comprehensive permission control mechanism and supports fine-grained authentication through Identity and Access Management (IAM). You can create policies in IAM", "doc_type":"usermanual", - "kw":"How Do I Manage Fine-Grained DLI Permissions?,Permission,User Guide", + "kw":"How Do I Manage Fine-Grained DLI Permissions?,Usage,User Guide", "search_title":"", "metedata":[ { @@ -4546,10 +5228,10 @@ "uri":"dli_03_0008.html", "node_id":"dli_03_0008.xml", "product_code":"dli", - "code":"220", + "code":"254", "des":"You cannot perform permission-related operations on the partition column of a partitioned table.However, when you grant the permission of any non-partition column in a pa", "doc_type":"usermanual", - "kw":"What Is Column Permission Granting of a DLI Partition Table?,Permission,User Guide", + "kw":"What Is Column Permission Granting of a DLI Partition Table?,Usage,User Guide", "search_title":"", "metedata":[ { @@ -4563,14 +5245,35 @@ "title":"What Is Column Permission Granting of a DLI Partition Table?", "githuburl":"" }, + { + "uri":"dli_03_0226.html", + "node_id":"dli_03_0226.xml", + "product_code":"dli", + "code":"255", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"O&M Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"O&M Guide", + "githuburl":"" + }, { "uri":"dli_03_0140.html", "node_id":"dli_03_0140.xml", "product_code":"dli", - "code":"221", + "code":"256", "des":"When you submit a job, a message is displayed indicating that the job fails to be submitted due to insufficient permission caused by arrears. In this case, you need to ch", "doc_type":"usermanual", - "kw":"Why Does My Account Have Insufficient Permissions Due to Arrears?,Permission,User Guide", + "kw":"Why Does My Account Have Insufficient Permissions Due to Arrears?,O&M Guide,User Guide", "search_title":"", "metedata":[ { @@ -4588,7 +5291,7 @@ "uri":"dli_03_0195.html", "node_id":"dli_03_0195.xml", "product_code":"dli", - "code":"222", + "code":"257", "des":"When the user update an existing program package, the following error information is displayed:\"error_code\"*DLI.0003\",\"error_msg\":\"Permission denied for resource 'resourc", "doc_type":"usermanual", "kw":"Why Does the System Display a Message Indicating Insufficient Permissions When I Update a Program Pa", @@ -4609,10 +5312,10 @@ "uri":"dli_03_0227.html", "node_id":"dli_03_0227.xml", "product_code":"dli", - "code":"223", + "code":"258", "des":"When the SQL query statement is executed, the system displays a message indicating that the user does not have the permission to query resources.Error information: DLI.00", "doc_type":"usermanual", - "kw":"Why Is Error \"DLI.0003: Permission denied for resource...\" Reported When I Run a SQL Statement?,Perm", + "kw":"Why Is Error \"DLI.0003: Permission denied for resource...\" Reported When I Run a SQL Statement?,O&M ", "search_title":"", "metedata":[ { @@ -4630,10 +5333,10 @@ "uri":"dli_03_0228.html", "node_id":"dli_03_0228.xml", "product_code":"dli", - "code":"224", + "code":"259", "des":"The table permission has been granted and verified. However, after a period of time, an error is reported indicating that the table query fails.There are two possible rea", "doc_type":"usermanual", - "kw":"Why Can't I Query Table Data After I've Been Granted Table Permissions?,Permission,User Guide", + "kw":"Why Can't I Query Table Data After I've Been Granted Table Permissions?,O&M Guide,User Guide", "search_title":"", "metedata":[ { @@ -4651,7 +5354,7 @@ "uri":"dli_03_0057.html", "node_id":"dli_03_0057.xml", "product_code":"dli", - "code":"225", + "code":"260", "des":"If a table inherits database permissions, you do not need to regrant the inherited permissions to the table.When you grant permissions on a table on the console:If you se", "doc_type":"usermanual", "kw":"Will an Error Be Reported if the Inherited Permissions Are Regranted to a Table That Inherits Databa", @@ -4672,10 +5375,10 @@ "uri":"dli_03_0067.html", "node_id":"dli_03_0067.xml", "product_code":"dli", - "code":"226", + "code":"261", "des":"User A created Table1.User B created View1 based on Table1.After the Select Table permission on Table1 is granted to user C, user C fails to query View1.User B does not h", "doc_type":"usermanual", - "kw":"Why Can't I Query a View After I'm Granted the Select Table Permission on the View?,Permission,User ", + "kw":"Why Can't I Query a View After I'm Granted the Select Table Permission on the View?,O&M Guide,User G", "search_title":"", "metedata":[ { @@ -4693,7 +5396,7 @@ "uri":"dli_03_0049.html", "node_id":"dli_03_0049.xml", "product_code":"dli", - "code":"227", + "code":"262", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Queue", @@ -4710,14 +5413,35 @@ "title":"Queue", "githuburl":"" }, + { + "uri":"dli_03_0229.html", + "node_id":"dli_03_0229.xml", + "product_code":"dli", + "code":"263", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"Usage", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"Usage", + "githuburl":"" + }, { "uri":"dli_03_0109.html", "node_id":"dli_03_0109.xml", "product_code":"dli", - "code":"228", + "code":"264", "des":"Currently, you are not allowed to modify the description of a created queue. You can add the description when purchasing the queue.", "doc_type":"usermanual", - "kw":"Does the Description of a DLI Queue Can Be Modified?,Queue,User Guide", + "kw":"Does the Description of a DLI Queue Can Be Modified?,Usage,User Guide", "search_title":"", "metedata":[ { @@ -4735,10 +5459,10 @@ "uri":"dli_03_0166.html", "node_id":"dli_03_0166.xml", "product_code":"dli", - "code":"229", + "code":"265", "des":"Deleting a queue does not cause table data loss in your database.", "doc_type":"usermanual", - "kw":"Will Table Data in My Database Be Lost If I Delete a Queue?,Queue,User Guide", + "kw":"Will Table Data in My Database Be Lost If I Delete a Queue?,Usage,User Guide", "search_title":"", "metedata":[ { @@ -4756,10 +5480,10 @@ "uri":"dli_03_0170.html", "node_id":"dli_03_0170.xml", "product_code":"dli", - "code":"230", + "code":"266", "des":"You need to develop a mechanism to retry failed jobs. When a faulty queue is recovered, your application tries to submit the failed jobs to the queue again.", "doc_type":"usermanual", - "kw":"How Does DLI Ensure the Reliability of Spark Jobs When a Queue Is Abnormal?,Queue,User Guide", + "kw":"How Does DLI Ensure the Reliability of Spark Jobs When a Queue Is Abnormal?,Usage,User Guide", "search_title":"", "metedata":[ { @@ -4777,10 +5501,10 @@ "uri":"dli_03_0098.html", "node_id":"dli_03_0098.xml", "product_code":"dli", - "code":"231", + "code":"267", "des":"DLI allows you to subscribe to an SMN topic for failed jobs.Log in to the DLI console.In the navigation pane on the left, choose Queue Management.On the Queue Management ", "doc_type":"usermanual", - "kw":"How Do I Monitor Queue Exceptions?,Queue,User Guide", + "kw":"How Do I Monitor Queue Exceptions?,Usage,User Guide", "search_title":"", "metedata":[ { @@ -4794,14 +5518,35 @@ "title":"How Do I Monitor Queue Exceptions?", "githuburl":"" }, + { + "uri":"dli_03_0230.html", + "node_id":"dli_03_0230.xml", + "product_code":"dli", + "code":"268", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"O&M Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"O&M Guide", + "githuburl":"" + }, { "uri":"dli_03_0095.html", "node_id":"dli_03_0095.xml", "product_code":"dli", - "code":"232", + "code":"269", "des":"To check the running status of the DLI queue and determine whether to run more jobs on that queue, you need to check the queue load.Search for Cloud Eye on the console.In", "doc_type":"usermanual", - "kw":"How Do I View DLI Queue Load?,Queue,User Guide", + "kw":"How Do I View DLI Queue Load?,O&M Guide,User Guide", "search_title":"", "metedata":[ { @@ -4819,10 +5564,10 @@ "uri":"dli_03_0183.html", "node_id":"dli_03_0183.xml", "product_code":"dli", - "code":"233", + "code":"270", "des":"You need to check the large number of jobs in the Submitting and Running states on the queue.Use Cloud Eye to view jobs in different states on the queue. The procedure is", "doc_type":"usermanual", - "kw":"How Do I Determine Whether There Are Too Many Jobs in the Current Queue?,Queue,User Guide", + "kw":"How Do I Determine Whether There Are Too Many Jobs in the Current Queue?,O&M Guide,User Guide", "search_title":"", "metedata":[ { @@ -4840,10 +5585,10 @@ "uri":"dli_03_0065.html", "node_id":"dli_03_0065.xml", "product_code":"dli", - "code":"234", + "code":"271", "des":"Currently, DLI provides two types of queues, For SQL and For general use. SQL queues are used to run SQL jobs. General-use queues are compatible with Spark queues of earl", "doc_type":"usermanual", - "kw":"How Do I Switch an Earlier-Version Spark Queue to a General-Purpose Queue?,Queue,User Guide", + "kw":"How Do I Switch an Earlier-Version Spark Queue to a General-Purpose Queue?,O&M Guide,User Guide", "search_title":"", "metedata":[ { @@ -4861,10 +5606,10 @@ "uri":"dli_03_0193.html", "node_id":"dli_03_0193.xml", "product_code":"dli", - "code":"235", + "code":"272", "des":"DLI queues do not use resources or bandwidth when no job is running. In this case, the running status of DLI queues is not displayed on CES.", "doc_type":"usermanual", - "kw":"Why Cannot I View the Resource Running Status of DLI Queues on Cloud Eye?,Queue,User Guide", + "kw":"Why Cannot I View the Resource Running Status of DLI Queues on Cloud Eye?,O&M Guide,User Guide", "search_title":"", "metedata":[ { @@ -4882,10 +5627,10 @@ "uri":"dli_03_0088.html", "node_id":"dli_03_0088.xml", "product_code":"dli", - "code":"236", + "code":"273", "des":"In DLI, 64 CU = 64 cores and 256 GB memory.In a Spark job, if the driver occupies 4 cores and 16 GB memory, the executor can occupy 60 cores and 240 GB memory.", "doc_type":"usermanual", - "kw":"How Do I Allocate Queue Resources for Running Spark Jobs If I Have Purchased 64 CUs?,Queue,User Guid", + "kw":"How Do I Allocate Queue Resources for Running Spark Jobs If I Have Purchased 64 CUs?,O&M Guide,User ", "search_title":"", "metedata":[ { @@ -4903,7 +5648,7 @@ "uri":"dli_03_0159.html", "node_id":"dli_03_0159.xml", "product_code":"dli", - "code":"237", + "code":"274", "des":"Queue plans create failed. The plan xxx target cu is out of quota is displayed when you create a scheduled scaling task.The CU quota of the current account is insufficien", "doc_type":"usermanual", "kw":"Why Is Error \"Queue plans create failed. The plan xxx target cu is out of quota\" Reported When I Sch", @@ -4924,7 +5669,7 @@ "uri":"dli_03_0171.html", "node_id":"dli_03_0171.xml", "product_code":"dli", - "code":"238", + "code":"275", "des":"After a SQL job was submitted to the default queue, the job runs abnormally. The job log reported that the execution timed out. The exception logs are as follows:[ERROR] ", "doc_type":"usermanual", "kw":"Why Is a Timeout Exception Reported When a DLI SQL Statement Fails to Be Executed on the Default Que", @@ -4941,11 +5686,50 @@ "title":"Why Is a Timeout Exception Reported When a DLI SQL Statement Fails to Be Executed on the Default Queue?", "githuburl":"" }, + { + "uri":"dli_03_0276.html", + "node_id":"dli_03_0276.xml", + "product_code":"dli", + "code":"276", + "des":"In daily big data analysis work, it is important to allocate and manage compute resources properly to provide a good job execution environment.You can allocate resources ", + "doc_type":"usermanual", + "kw":"How Can I Check the Actual and Used CUs for an Elastic Resource Pool as Well as the Required CUs for", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"usermanual" + } + ], + "title":"How Can I Check the Actual and Used CUs for an Elastic Resource Pool as Well as the Required CUs for a Job?", + "githuburl":"" + }, { "uri":"dli_03_0022.html", "node_id":"dli_03_0022.xml", "product_code":"dli", - "code":"239", + "code":"277", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"Datasource Connections", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"Datasource Connections", + "githuburl":"" + }, + { + "uri":"dli_03_0110.html", + "node_id":"dli_03_0110.xml", + "product_code":"dli", + "code":"278", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Datasource Connections", @@ -4966,7 +5750,7 @@ "uri":"dli_03_0128.html", "node_id":"dli_03_0128.xml", "product_code":"dli", - "code":"240", + "code":"279", "des":"You need to create a VPC peering connection to enable network connectivity. Take MRS as an example. If DLI and MRS clusters are in the same VPC, and the security group is", "doc_type":"usermanual", "kw":"Why Do I Need to Create a VPC Peering Connection for an Enhanced Datasource Connection?,Datasource C", @@ -4987,7 +5771,7 @@ "uri":"dli_03_0237.html", "node_id":"dli_03_0237.xml", "product_code":"dli", - "code":"241", + "code":"280", "des":"An enhanced datasource connection failed to pass the network connectivity test. Datasource connection cannot be bound to a queue. The following error information is displ", "doc_type":"usermanual", "kw":"Failed to Bind a Queue to an Enhanced Datasource Connection,Datasource Connections,User Guide", @@ -5008,7 +5792,7 @@ "uri":"dli_03_0238.html", "node_id":"dli_03_0238.xml", "product_code":"dli", - "code":"242", + "code":"281", "des":"The outbound rule had been configured for the security group of the queue associated with the enhanced datasource connection. The datasource authentication used a passwor", "doc_type":"usermanual", "kw":"DLI Failed to Connect to GaussDB(DWS) Through an Enhanced Datasource Connection,Datasource Connectio", @@ -5029,7 +5813,7 @@ "uri":"dli_03_0179.html", "node_id":"dli_03_0179.xml", "product_code":"dli", - "code":"243", + "code":"282", "des":"A datasource connection is created and bound to a queue. The connectivity test fails and the following error information is displayed:failed to connect to specified addre", "doc_type":"usermanual", "kw":"How Do I Do if the Datasource Connection Is Created But the Network Connectivity Test Fails?,Datasou", @@ -5050,7 +5834,7 @@ "uri":"dli_03_0186.html", "node_id":"dli_03_0186.xml", "product_code":"dli", - "code":"244", + "code":"283", "des":"Configuring the Connection Between a DLI Queue and a Data Source in a Private NetworkIf your DLI job needs to connect to a data source, for example, MRS, RDS, CSS, Kafka,", "doc_type":"usermanual", "kw":"How Do I Configure the Network Between a DLI Queue and a Data Source?,Datasource Connections,User Gu", @@ -5071,7 +5855,7 @@ "uri":"dli_03_0257.html", "node_id":"dli_03_0257.xml", "product_code":"dli", - "code":"245", + "code":"284", "des":"The possible causes and solutions are as follows:If you have created a queue, do not bind it to a datasource connection immediately. Wait for 5 to 10 minutes. After the c", "doc_type":"usermanual", "kw":"What Can I Do If a Datasource Connection Is Stuck in Creating State When I Try to Bind a Queue to It", @@ -5092,7 +5876,7 @@ "uri":"dli_03_0259.html", "node_id":"dli_03_0259.xml", "product_code":"dli", - "code":"246", + "code":"285", "des":"DLI enhanced datasource connection uses VPC peering to directly connect the VPC networks of the desired data sources for point-to-point data exchanges.", "doc_type":"usermanual", "kw":"How Do I Connect DLI to Data Sources?,Datasource Connections,User Guide", @@ -5109,14 +5893,35 @@ "title":"How Do I Connect DLI to Data Sources?", "githuburl":"" }, + { + "uri":"dli_03_0112.html", + "node_id":"dli_03_0112.xml", + "product_code":"dli", + "code":"286", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"Cross-Source Analysis", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"Cross-Source Analysis", + "githuburl":"" + }, { "uri":"dli_03_0011.html", "node_id":"dli_03_0011.xml", "product_code":"dli", - "code":"247", + "code":"287", "des":"To perform query on data stored on services rather than DLI, perform the following steps:Assume that the data to be queried is stored on multiple services (for example, O", "doc_type":"usermanual", - "kw":"How Can I Perform Query on Data Stored on Services Rather Than DLI?,Datasource Connections,User Guid", + "kw":"How Can I Perform Query on Data Stored on Services Rather Than DLI?,Cross-Source Analysis,User Guide", "search_title":"", "metedata":[ { @@ -5134,10 +5939,10 @@ "uri":"dli_03_0085.html", "node_id":"dli_03_0085.xml", "product_code":"dli", - "code":"248", + "code":"288", "des":"Connect VPCs in different regions.Create an enhanced datasource connection on DLI and bind it to a queue.Add a DLI route.", "doc_type":"usermanual", - "kw":"How Can I Access Data Across Regions?,Datasource Connections,User Guide", + "kw":"How Can I Access Data Across Regions?,Cross-Source Analysis,User Guide", "search_title":"", "metedata":[ { @@ -5155,7 +5960,7 @@ "uri":"dli_03_0028.html", "node_id":"dli_03_0028.xml", "product_code":"dli", - "code":"249", + "code":"289", "des":"When data is inserted into DLI, set the ID field to NULL.", "doc_type":"usermanual", "kw":"How Do I Set the Auto-increment Primary Key or Other Fields That Are Automatically Filled in the RDS", @@ -5172,11 +5977,32 @@ "title":"How Do I Set the Auto-increment Primary Key or Other Fields That Are Automatically Filled in the RDS Table When Creating a DLI and Associating It with the RDS Table?", "githuburl":"" }, + { + "uri":"dli_03_0256.html", + "node_id":"dli_03_0256.xml", + "product_code":"dli", + "code":"290", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"usermanual", + "kw":"Datasource Connection O&M", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "opensource":"true", + "documenttype":"usermanual", + "IsMulti":"No", + "IsBot":"Yes" + } + ], + "title":"Datasource Connection O&M", + "githuburl":"" + }, { "uri":"dli_03_0047.html", "node_id":"dli_03_0047.xml", "product_code":"dli", - "code":"250", + "code":"291", "des":"Possible CausesThe network connectivity is abnormal. Check whether the security group is correctly selected and whether the VPC is correctly configured.The network connec", "doc_type":"usermanual", "kw":"Why Is the Error Message \"communication link failure\" Displayed When I Use a Newly Activated Datasou", @@ -5197,7 +6023,7 @@ "uri":"dli_03_0080.html", "node_id":"dli_03_0080.xml", "product_code":"dli", - "code":"251", + "code":"292", "des":"The cluster host information is not added to the datasource connection. As a result, the KRB authentication fails, the connection times out, and no error is recorded in l", "doc_type":"usermanual", "kw":"Connection Times Out During MRS HBase Datasource Connection, and No Error Is Recorded in Logs,Dataso", @@ -5218,10 +6044,10 @@ "uri":"dli_03_0111.html", "node_id":"dli_03_0111.xml", "product_code":"dli", - "code":"252", + "code":"293", "des":"When you create a VPC peering connection for the datasource connection, the following error information is displayed:Before you create a datasource connection, check whet", "doc_type":"usermanual", - "kw":"Why Can't I Find the Subnet When Creating a DLI Datasource Connection?,Datasource Connections,User G", + "kw":"Why Can't I Find the Subnet When Creating a DLI Datasource Connection?,Datasource Connection O&M,Use", "search_title":"", "metedata":[ { @@ -5239,7 +6065,7 @@ "uri":"dli_03_0239.html", "node_id":"dli_03_0239.xml", "product_code":"dli", - "code":"253", + "code":"294", "des":"A datasource RDS table was created in the DataArts Studio, and the insert overwrite statement was executed to write data into RDS. DLI.0999: BatchUpdateException: Incorre", "doc_type":"usermanual", "kw":"Error Message \"Incorrect string value\" Is Displayed When insert overwrite Is Executed on a Datasourc", @@ -5260,7 +6086,7 @@ "uri":"dli_03_0250.html", "node_id":"dli_03_0250.xml", "product_code":"dli", - "code":"254", + "code":"295", "des":"The system failed to create a datasource RDS table, and null pointer error was reported.The following table creation statement was used:The RDS database is in a PostGre c", "doc_type":"usermanual", "kw":"Null Pointer Error Is Displayed When the System Creates a Datasource RDS Table,Datasource Connection", @@ -5281,7 +6107,7 @@ "uri":"dli_03_0251.html", "node_id":"dli_03_0251.xml", "product_code":"dli", - "code":"255", + "code":"296", "des":"The system failed to execute insert overwrite on the datasource GaussDB(DWS) table, and org.postgresql.util.PSQLException: ERROR: tuple concurrently updated was displayed", "doc_type":"usermanual", "kw":"Error Message \"org.postgresql.util.PSQLException: ERROR: tuple concurrently updated\" Is Displayed Wh", @@ -5302,7 +6128,7 @@ "uri":"dli_03_0252.html", "node_id":"dli_03_0252.xml", "product_code":"dli", - "code":"256", + "code":"297", "des":"A datasource table was used to import data to a CloudTable HBase table. This HBase table contains a column family and a rowkey for 100 million simulating data records. Th", "doc_type":"usermanual", "kw":"RegionTooBusyException Is Reported When Data Is Imported to a CloudTable HBase Table Through a Datas", @@ -5323,7 +6149,7 @@ "uri":"dli_03_0253.html", "node_id":"dli_03_0253.xml", "product_code":"dli", - "code":"257", + "code":"298", "des":"A table was created on GaussDB(DWS) and then a datasource connection was created on DLI to read and write data. An error message was displayed during data writing, indica", "doc_type":"usermanual", "kw":"A Null Value Is Written Into a Non-Null Field When a DLI Datasource Connection Is Used to Connect to", @@ -5344,7 +6170,7 @@ "uri":"dli_03_0254.html", "node_id":"dli_03_0254.xml", "product_code":"dli", - "code":"258", + "code":"299", "des":"A datasource GaussDB(DWS) table and the datasource connection were created in DLI, and the schema of the source table in GaussDB(DWS) were updated. During the job executi", "doc_type":"usermanual", "kw":"An Insert Operation Failed After the Schema of the GaussDB(DWS) Source Table Is Updated,Datasource C", @@ -5365,7 +6191,7 @@ "uri":"dli_03_0056.html", "node_id":"dli_03_0056.xml", "product_code":"dli", - "code":"259", + "code":"300", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"APIs", @@ -5386,7 +6212,7 @@ "uri":"dli_03_0060.html", "node_id":"dli_03_0060.xml", "product_code":"dli", - "code":"260", + "code":"301", "des":"In the REST API provided by DLI, the request header can be added to the request URI, for example, Content-Type.Content-Type indicates the request body type or format. The", "doc_type":"usermanual", "kw":"Why Is Error \"unsupported media Type\" Reported When I Subimt a SQL Job?,APIs,User Guide", @@ -5407,7 +6233,7 @@ "uri":"dli_03_0125.html", "node_id":"dli_03_0125.xml", "product_code":"dli", - "code":"261", + "code":"302", "des":"When different IAM users call an API under the same enterprise project in the same region, the project ID is the same.", "doc_type":"usermanual", "kw":"Is the Project ID Fixed when Different IAM Users Call an API?,APIs,User Guide", @@ -5428,7 +6254,7 @@ "uri":"dli_03_0178.html", "node_id":"dli_03_0178.xml", "product_code":"dli", - "code":"262", + "code":"303", "des":"When the API call for submitting a SQL job times out, and the following error information is displayed:There are currently no resources tracked in the state, so there is ", "doc_type":"usermanual", "kw":"What Can I Do If an Error Is Reported When the Execution of the API for Creating a SQL Job Times Out", @@ -5449,7 +6275,7 @@ "uri":"dli_03_0058.html", "node_id":"dli_03_0058.xml", "product_code":"dli", - "code":"263", + "code":"304", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"SDKs", @@ -5470,7 +6296,7 @@ "uri":"dli_03_0073.html", "node_id":"dli_03_0073.xml", "product_code":"dli", - "code":"264", + "code":"305", "des":"When you query the SQL job results using SDK, the system checks the job status when the job is submitted. The timeout interval set in the system is 300s. If the job is no", "doc_type":"usermanual", "kw":"How Do I Set the Timeout Duration for Querying SQL Job Results Using SDK?,SDKs,User Guide", @@ -5491,7 +6317,7 @@ "uri":"dli_03_0255.html", "node_id":"dli_03_0255.xml", "product_code":"dli", - "code":"265", + "code":"306", "des":"Run the ping command to check whether dli.xxx can be accessed.If dli.xxx can be accessed, check whether DNS resolution is correctly configured.If dli.xxx can be accessed,", "doc_type":"usermanual", "kw":"How Do I Handle the dli.xxx,unable to resolve host address Error?,SDKs,User Guide", @@ -5512,7 +6338,7 @@ "uri":"dli_01_00006.html", "node_id":"dli_01_00006.xml", "product_code":"dli", - "code":"266", + "code":"307", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Change History,User Guide", diff --git a/docs/dli/umn/CLASS.TXT.json b/docs/dli/umn/CLASS.TXT.json index fb44d47c..082ba8e3 100644 --- a/docs/dli/umn/CLASS.TXT.json +++ b/docs/dli/umn/CLASS.TXT.json @@ -153,7 +153,7 @@ "code":"17" }, { - "desc":"SQL jobs allow you to execute SQL statements entered in the SQL job editing window, import data, and export data.SQL job management provides the following functions:Searc", + "desc":"SQL jobs allow you to execute SQL statements in the SQL job editing window, import data, and export data.SQL job management provides the following functions:Searching for", "product_code":"dli", "title":"SQL Job Management", "uri":"dli_01_0017.html", @@ -234,7 +234,7 @@ "code":"26" }, { - "desc":"After creating a job, you can view the job details to learn about the following information:Viewing Job DetailsChecking the Job Monitoring InformationViewing the Task Lis", + "desc":"After creating a job, you can view the job details to learn about the following information:Viewing Job DetailsChecking Job Monitoring InformationViewing the Task List of", "product_code":"dli", "title":"Flink Job Details", "uri":"dli_01_0462.html", @@ -341,6 +341,15 @@ "p_code":"34", "code":"38" }, + { + "desc":"You can create enterprise projects matching the organizational structure of your enterprises to centrally manage cloud resources across regions by project. Then you can c", + "product_code":"dli", + "title":"Allocating a Queue to an Enterprise Project", + "uri":"dli_01_0565.html", + "doc_type":"usermanual", + "p_code":"34", + "code":"39" + }, { "desc":"If the CIDR block of the DLI queue conflicts with that of the user data source, you can change the CIDR block of the queue.If the queue whose CIDR block is to be modified", "product_code":"dli", @@ -348,7 +357,7 @@ "uri":"dli_01_0443.html", "doc_type":"usermanual", "p_code":"34", - "code":"39" + "code":"40" }, { "desc":"Elastic scaling can be performed for a newly created queue only when there were jobs running in this queue.Queues with 16 CUs do not support scale-out or scale-in.Queues ", @@ -357,7 +366,7 @@ "uri":"dli_01_0487.html", "doc_type":"usermanual", "p_code":"34", - "code":"40" + "code":"41" }, { "desc":"When services are busy, you might need to use more compute resources to process services in a period. After this period, you do not require the same amount of resources. ", @@ -366,7 +375,7 @@ "uri":"dli_01_0488.html", "doc_type":"usermanual", "p_code":"34", - "code":"41" + "code":"42" }, { "desc":"It can be used to test the connectivity between the DLI queue and the peer IP address specified by the user in common scenarios, or the connectivity between the DLI queue", @@ -375,7 +384,7 @@ "uri":"dli_01_0489.html", "doc_type":"usermanual", "p_code":"34", - "code":"42" + "code":"43" }, { "desc":"Once you have created an SMN topic, you can easily subscribe to it by going to the Topic Management > Topics page of the SMN console. You can choose to receive notificati", @@ -384,7 +393,7 @@ "uri":"dli_01_0421.html", "doc_type":"usermanual", "p_code":"34", - "code":"43" + "code":"44" }, { "desc":"A tag is a key-value pair that you can customize to identify cloud resources. It helps you to classify and search for cloud resources. A tag consists of a tag key and a t", @@ -393,7 +402,160 @@ "uri":"dli_01_0022.html", "doc_type":"usermanual", "p_code":"34", - "code":"44" + "code":"45" + }, + { + "desc":"DLI allows you to set properties for queues.You can set Spark driver parameters to improve the scheduling efficiency of queues.This section describes how to set queue pro", + "product_code":"dli", + "title":"Setting Queue Properties", + "uri":"dli_01_0563.html", + "doc_type":"usermanual", + "p_code":"34", + "code":"46" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Elastic Resource Pool", + "uri":"dli_01_0508.html", + "doc_type":"usermanual", + "p_code":"", + "code":"47" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Before You Start", + "uri":"dli_01_0528.html", + "doc_type":"usermanual", + "p_code":"47", + "code":"48" + }, + { + "desc":"An elastic resource pool provides compute resources (CPU and memory) for running DLI jobs. The unit is CU. One CU contains one CPU and 4 GB memory.You can create multiple", + "product_code":"dli", + "title":"Overview", + "uri":"dli_01_0504.html", + "doc_type":"usermanual", + "p_code":"48", + "code":"49" + }, + { + "desc":"This section walks you through the procedure of adding a queue to an elastic resource pool and binding an enhanced datasource connection to the elastic resource pool.Proc", + "product_code":"dli", + "title":"Creating an Elastic Resource Pool and Running a Job", + "uri":"dli_01_0515.html", + "doc_type":"usermanual", + "p_code":"48", + "code":"50" + }, + { + "desc":"A company has multiple departments that perform data analysis in different periods during a day.Department A requires a large number of compute resources from 00:00 a.m. ", + "product_code":"dli", + "title":"Configuring Scaling Policies for Queues", + "uri":"dli_01_0516.html", + "doc_type":"usermanual", + "p_code":"48", + "code":"51" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Regular Operations", + "uri":"dli_01_0529.html", + "doc_type":"usermanual", + "p_code":"47", + "code":"52" + }, + { + "desc":"For details about the application scenarios of elastic resource pools, see the Overview. This section describes how to create an elastic resource pool.If you use an enhan", + "product_code":"dli", + "title":"Creating an Elastic Resource Pool", + "uri":"dli_01_0505.html", + "doc_type":"usermanual", + "p_code":"52", + "code":"53" + }, + { + "desc":"Administrators can assign permissions of different operation scopes to users for each elastic resource pool.The administrator and elastic resource pool owner have all per", + "product_code":"dli", + "title":"Managing Permissions", + "uri":"dli_01_0526.html", + "doc_type":"usermanual", + "p_code":"52", + "code":"54" + }, + { + "desc":"You can add one or more queues to an elastic resource pool to run jobs. This section describes how to add a queue to an elastic resource pool.Automatic scaling of an elas", + "product_code":"dli", + "title":"Adding a Queue", + "uri":"dli_01_0509.html", + "doc_type":"usermanual", + "p_code":"52", + "code":"55" + }, + { + "desc":"If you want a queue to use resources in an elastic resource pool, bind the queue to the pool.You can click Associate Queue on the Resource Pool page to bind a queue to an", + "product_code":"dli", + "title":"Binding a Queue", + "uri":"dli_01_0530.html", + "doc_type":"usermanual", + "p_code":"52", + "code":"56" + }, + { + "desc":"Multiple queues can be added to an elastic resource pool. For details about how to add a queue, see Adding a Queue. You can configure the number of CUs you want based on ", + "product_code":"dli", + "title":"Managing Queues", + "uri":"dli_01_0506.html", + "doc_type":"usermanual", + "p_code":"52", + "code":"57" + }, + { + "desc":"CU settings are used to control the maximum and minimum CU ranges for elastic resource pools to avoid unlimited resource scaling.For example, an elastic resource pool has", + "product_code":"dli", + "title":"Setting CUs", + "uri":"dli_01_0507.html", + "doc_type":"usermanual", + "p_code":"52", + "code":"58" + }, + { + "desc":"If CUs of a yearly/monthly elastic resource pool cannot meet your service requirements, you can modify the CUs. In this case, you will be charged based on the number of C", + "product_code":"dli", + "title":"Modifying Specifications", + "uri":"dli_01_0524.html", + "doc_type":"usermanual", + "p_code":"52", + "code":"59" + }, + { + "desc":"A tag is a key-value pair that you can customize to identify cloud resources. It helps you to classify and search for cloud resources. A tag consists of a tag key and a t", + "product_code":"dli", + "title":"Managing Tags", + "uri":"dli_01_0525.html", + "doc_type":"usermanual", + "p_code":"52", + "code":"60" + }, + { + "desc":"If you added a queue to or deleted one from an elastic resource pool, or you scaled an added queue, the CU quantity of the elastic resource pool may be changed. You can v", + "product_code":"dli", + "title":"Viewing Scaling History", + "uri":"dli_01_0532.html", + "doc_type":"usermanual", + "p_code":"52", + "code":"61" + }, + { + "desc":"You can create enterprise projects matching the organizational structure of your enterprises to centrally manage cloud resources across regions by project. Then you can c", + "product_code":"dli", + "title":"Allocating to an Enterprise Project", + "uri":"dli_01_0566.html", + "doc_type":"usermanual", + "p_code":"52", + "code":"62" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -402,7 +564,7 @@ "uri":"dli_01_0004.html", "doc_type":"usermanual", "p_code":"", - "code":"45" + "code":"63" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -410,8 +572,8 @@ "title":"Databases and Tables", "uri":"dli_01_0390.html", "doc_type":"usermanual", - "p_code":"45", - "code":"46" + "p_code":"63", + "code":"64" }, { "desc":"DLI database and table management provide the following functions:Database Permission ManagementTable Permission ManagementCreating a Database or a TableDeleting a Databa", @@ -419,8 +581,8 @@ "title":"Overview", "uri":"dli_01_0228.html", "doc_type":"usermanual", - "p_code":"46", - "code":"47" + "p_code":"64", + "code":"65" }, { "desc":"By setting permissions, you can assign varying database permissions to different users.The administrator and database owner have all permissions, which cannot be set or m", @@ -428,8 +590,8 @@ "title":"Managing Database Permissions", "uri":"dli_01_0447.html", "doc_type":"usermanual", - "p_code":"46", - "code":"48" + "p_code":"64", + "code":"66" }, { "desc":"By setting permissions, you can assign varying table permissions to different users.The administrator and table owner have all permissions, which cannot be set or modifie", @@ -437,8 +599,8 @@ "title":"Managing Table Permissions", "uri":"dli_01_0448.html", "doc_type":"usermanual", - "p_code":"46", - "code":"49" + "p_code":"64", + "code":"67" }, { "desc":"A database, built on the computer storage device, is a data warehouse where data is organized, stored, and managed based on its structure.The table is an important part o", @@ -446,8 +608,8 @@ "title":"Creating a Database or a Table", "uri":"dli_01_0005.html", "doc_type":"usermanual", - "p_code":"46", - "code":"50" + "p_code":"64", + "code":"68" }, { "desc":"You can delete unnecessary databases and tables based on actual conditions.You are not allowed to delete databases or tables that are being used for running jobs.The admi", @@ -455,8 +617,8 @@ "title":"Deleting a Database or a Table", "uri":"dli_01_0011.html", "doc_type":"usermanual", - "p_code":"46", - "code":"51" + "p_code":"64", + "code":"69" }, { "desc":"During actual use, developers create databases and tables and submit them to test personnel for testing. After the test is complete, the databases and tables are transfer", @@ -464,8 +626,8 @@ "title":"Modifying the Owners of Databases and Tables", "uri":"dli_01_0376.html", "doc_type":"usermanual", - "p_code":"46", - "code":"52" + "p_code":"64", + "code":"70" }, { "desc":"You can import data from OBS to a table created in DLI.Only one path can be specified during data import. The path cannot contain commas (,).To import data in CSV format ", @@ -473,8 +635,8 @@ "title":"Importing Data to the Table", "uri":"dli_01_0253.html", "doc_type":"usermanual", - "p_code":"46", - "code":"53" + "p_code":"64", + "code":"71" }, { "desc":"You can export data from a DLI table to OBS. During the export, a folder is created in OBS or the content in the existing folder is overwritten.The exported file can be i", @@ -482,8 +644,8 @@ "title":"Exporting Data from DLI to OBS", "uri":"dli_01_0010.html", "doc_type":"usermanual", - "p_code":"46", - "code":"54" + "p_code":"64", + "code":"72" }, { "desc":"Metadata is used to define data types. It describes information about the data, including the source, size, format, and other data features. In database fields, metadata ", @@ -491,8 +653,8 @@ "title":"Viewing Metadata", "uri":"dli_01_0008.html", "doc_type":"usermanual", - "p_code":"46", - "code":"55" + "p_code":"64", + "code":"73" }, { "desc":"The Preview page displays the first 10 records in the table.You can preview data on either the Data Management page or the SQL Editor page.To preview data on the Data Man", @@ -500,8 +662,8 @@ "title":"Previewing Data", "uri":"dli_01_0007.html", "doc_type":"usermanual", - "p_code":"46", - "code":"56" + "p_code":"64", + "code":"74" }, { "desc":"A tag is a key-value pair that you can customize to identify cloud resources. It helps you to classify and search for cloud resources. A tag consists of a tag key and a t", @@ -509,8 +671,8 @@ "title":"Managing Tags", "uri":"dli_01_0552.html", "doc_type":"usermanual", - "p_code":"46", - "code":"57" + "p_code":"64", + "code":"75" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -518,8 +680,8 @@ "title":"Package Management", "uri":"dli_01_0366.html", "doc_type":"usermanual", - "p_code":"45", - "code":"58" + "p_code":"63", + "code":"76" }, { "desc":"Package management provides the following functions:Managing Package PermissionsCreating a PackageDeleting a PackageYou can delete program packages in batches.You can del", @@ -527,8 +689,8 @@ "title":"Overview", "uri":"dli_01_0407.html", "doc_type":"usermanual", - "p_code":"58", - "code":"59" + "p_code":"76", + "code":"77" }, { "desc":"You can isolate package groups or packages allocated to different users by setting permissions to ensure data query performance.The administrator and the owner of a packa", @@ -536,8 +698,8 @@ "title":"Managing Permissions on Packages and Package Groups", "uri":"dli_01_0477.html", "doc_type":"usermanual", - "p_code":"58", - "code":"60" + "p_code":"76", + "code":"78" }, { "desc":"DLI allows you to submit program packages in batches to the general-use queue for running.If you need to update a package, you can use the same package or file to upload ", @@ -545,8 +707,8 @@ "title":"Creating a Package", "uri":"dli_01_0367.html", "doc_type":"usermanual", - "p_code":"58", - "code":"61" + "p_code":"76", + "code":"79" }, { "desc":"You can delete a package based on actual conditions.On the left of the management console, choose Data Management > Package Management.Click Delete in the Operation colum", @@ -554,8 +716,8 @@ "title":"Deleting a Package", "uri":"dli_01_0369.html", "doc_type":"usermanual", - "p_code":"58", - "code":"62" + "p_code":"76", + "code":"80" }, { "desc":"To change the owner of a package, click More > Modify Owner in the Operation column of a package on the Package Management page.If the package has been grouped, you can m", @@ -563,8 +725,8 @@ "title":"Modifying the Owner", "uri":"dli_01_0478.html", "doc_type":"usermanual", - "p_code":"58", - "code":"63" + "p_code":"76", + "code":"81" }, { "desc":"DLI built-in dependencies are provided by the platform by default. In case of conflicts, you do not need to upload them when packaging JAR packages of Spark or Flink Jar ", @@ -572,8 +734,8 @@ "title":"Built-in Dependencies", "uri":"dli_01_0397.html", "doc_type":"usermanual", - "p_code":"58", - "code":"64" + "p_code":"76", + "code":"82" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -582,7 +744,7 @@ "uri":"dli_01_0379.html", "doc_type":"usermanual", "p_code":"", - "code":"65" + "code":"83" }, { "desc":"To facilitate SQL operation execution, DLI allows you to customize query templates or save the SQL statements in use as templates. After templates are saved, you do not n", @@ -590,8 +752,8 @@ "title":"Managing SQL Templates", "uri":"dli_01_0021.html", "doc_type":"usermanual", - "p_code":"65", - "code":"66" + "p_code":"83", + "code":"84" }, { "desc":"Flink templates include sample templates and custom templates. You can modify an existing sample template to meet the actual job logic requirements and save time for edit", @@ -599,8 +761,8 @@ "title":"Managing Flink Templates", "uri":"dli_01_0464.html", "doc_type":"usermanual", - "p_code":"65", - "code":"67" + "p_code":"83", + "code":"85" }, { "desc":"You can modify a sample template to meet the Spark job requirements, saving time for editing SQL statements.Currently, the cloud platform does not provide preset Spark te", @@ -608,8 +770,8 @@ "title":"Managing Spark SQL Templates", "uri":"dli_01_0551.html", "doc_type":"usermanual", - "p_code":"65", - "code":"68" + "p_code":"83", + "code":"86" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -617,8 +779,8 @@ "title":"Appendix", "uri":"dli_01_05110.html", "doc_type":"usermanual", - "p_code":"65", - "code":"69" + "p_code":"83", + "code":"87" }, { "desc":"TPC-H is a test set developed by the Transaction Processing Performance Council (TPC) to simulate decision-making support applications. It is widely used in academia and ", @@ -626,8 +788,8 @@ "title":"TPC-H Sample Data in the SQL Template", "uri":"dli_01_05111.html", "doc_type":"usermanual", - "p_code":"69", - "code":"70" + "p_code":"87", + "code":"88" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -636,7 +798,7 @@ "uri":"dli_01_0426.html", "doc_type":"usermanual", "p_code":"", - "code":"71" + "code":"89" }, { "desc":"In cross-source data analysis scenarios, DLI needs to connect to external data sources. However, due to the different VPCs between the data source and DLI, the network ca", @@ -644,8 +806,8 @@ "title":"Overview", "uri":"dli_01_0003.html", "doc_type":"usermanual", - "p_code":"71", - "code":"72" + "p_code":"89", + "code":"90" }, { "desc":"If DLI needs to access external data sources, you need to establish enhanced datasource connections to enable the network between DLI and the data sources, and then devel", @@ -653,8 +815,8 @@ "title":"Cross-Source Analysis Development Methods", "uri":"dli_01_0410.html", "doc_type":"usermanual", - "p_code":"71", - "code":"73" + "p_code":"89", + "code":"91" }, { "desc":"Create an enhanced datasource connection for DLI to access, import, query, and analyze data of other data sources.For example, to connect DLI to the MRS, RDS, CSS, Kafka,", @@ -662,8 +824,8 @@ "title":"Creating an Enhanced Datasource Connection", "uri":"dli_01_0006.html", "doc_type":"usermanual", - "p_code":"71", - "code":"74" + "p_code":"89", + "code":"92" }, { "desc":"Delete an enhanced datasource connection that is no longer used on the console.Log in to the DLI management console.In the left navigation pane, choose Datasource Connect", @@ -671,8 +833,8 @@ "title":"Deleting an Enhanced Datasource Connection", "uri":"dli_01_0553.html", "doc_type":"usermanual", - "p_code":"71", - "code":"75" + "p_code":"89", + "code":"93" }, { "desc":"Host information is the mapping between host IP addresses and domain names. After you configure host information, jobs can only use the configured domain names to access ", @@ -680,8 +842,8 @@ "title":"Modifying Host Information", "uri":"dli_01_0013.html", "doc_type":"usermanual", - "p_code":"71", - "code":"76" + "p_code":"89", + "code":"94" }, { "desc":"The CIDR block of the DLI queue that is bound with a datasource connection cannot overlap with that of the data source.The default queue cannot be bound with a connection", @@ -689,8 +851,8 @@ "title":"Binding and Unbinding a Queue", "uri":"dli_01_0514.html", "doc_type":"usermanual", - "p_code":"71", - "code":"77" + "p_code":"89", + "code":"95" }, { "desc":"A route is configured with the destination, next hop type, and next hop to determine where the network traffic is directed. Routes are classified into system routes and c", @@ -698,8 +860,8 @@ "title":"Adding a Route", "uri":"dli_01_0014.html", "doc_type":"usermanual", - "p_code":"71", - "code":"78" + "p_code":"89", + "code":"96" }, { "desc":"Delete a route that is no longer used.A custom route table cannot be deleted if it is associated with a subnet.Log in to the DLI management console.In the left navigation", @@ -707,8 +869,8 @@ "title":"Deleting a Route", "uri":"dli_01_0556.html", "doc_type":"usermanual", - "p_code":"71", - "code":"79" + "p_code":"89", + "code":"97" }, { "desc":"Enhanced connections support user authorization by project. After authorization, users in the project have the permission to perform operations on the enhanced connection", @@ -716,8 +878,8 @@ "title":"Enhanced Connection Permission Management", "uri":"dli_01_0018.html", "doc_type":"usermanual", - "p_code":"71", - "code":"80" + "p_code":"89", + "code":"98" }, { "desc":"A tag is a key-value pair customized by users and used to identify cloud resources. It helps users to classify and search for cloud resources. A tag consists of a tag key", @@ -725,8 +887,8 @@ "title":"Enhanced Datasource Connection Tag Management", "uri":"dli_01_0019.html", "doc_type":"usermanual", - "p_code":"71", - "code":"81" + "p_code":"89", + "code":"99" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -735,7 +897,7 @@ "uri":"dli_01_0422.html", "doc_type":"usermanual", "p_code":"", - "code":"82" + "code":"100" }, { "desc":"When analyzing across multiple sources, it is not recommended to configure authentication information directly in a job as it can lead to password leakage. Instead, you a", @@ -743,8 +905,8 @@ "title":"Overview", "uri":"dli_01_0561.html", "doc_type":"usermanual", - "p_code":"82", - "code":"83" + "p_code":"100", + "code":"101" }, { "desc":"Create a CSS datasource authentication on the DLI console to store the authentication information of the CSS security cluster to DLI. This will allow you to access to the", @@ -752,8 +914,8 @@ "title":"Creating a CSS Datasource Authentication", "uri":"dli_01_0427.html", "doc_type":"usermanual", - "p_code":"82", - "code":"84" + "p_code":"100", + "code":"102" }, { "desc":"Create a Kerberos datasource authentication on the DLI console to store the authentication information of the data source to DLI. This will allow you to access to the dat", @@ -761,8 +923,8 @@ "title":"Creating a Kerberos Datasource Authentication", "uri":"dli_01_0558.html", "doc_type":"usermanual", - "p_code":"82", - "code":"85" + "p_code":"100", + "code":"103" }, { "desc":"Create a Kafka_SSL datasource authentication on the DLI console to store the Kafka authentication information to DLI. This will allow you to access to Kafka instances wit", @@ -770,8 +932,8 @@ "title":"Creating a Kafka_SSL Datasource Authentication", "uri":"dli_01_0560.html", "doc_type":"usermanual", - "p_code":"82", - "code":"86" + "p_code":"100", + "code":"104" }, { "desc":"Create a password datasource authentication on the DLI console to store passwords of the GaussDB(DWS), RDS, DCS, and DDS data sources to DLI. This will allow you to acces", @@ -779,8 +941,8 @@ "title":"Creating a Password Datasource Authentication", "uri":"dli_01_0559.html", "doc_type":"usermanual", - "p_code":"82", - "code":"87" + "p_code":"100", + "code":"105" }, { "desc":"Grant permissions on a datasource authentication to users so multiple user jobs can use the datasource authentication without affecting each other.The administrator and t", @@ -788,8 +950,8 @@ "title":"Datasource Authentication Permission Management", "uri":"dli_01_0480.html", "doc_type":"usermanual", - "p_code":"82", - "code":"88" + "p_code":"100", + "code":"106" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -798,7 +960,7 @@ "uri":"dli_01_0485.html", "doc_type":"usermanual", "p_code":"", - "code":"89" + "code":"107" }, { "desc":"DLI allows you to set variables that are frequently used during job development as global variables on the DLI management console. This avoids repeated definitions during", @@ -806,8 +968,8 @@ "title":"Global Variables", "uri":"dli_01_0476.html", "doc_type":"usermanual", - "p_code":"89", - "code":"90" + "p_code":"107", + "code":"108" }, { "desc":"You can grant permissions on a global variable to users.The administrator and the global variable owner have all permissions. You do not need to set permissions for them,", @@ -815,17 +977,8 @@ "title":"Permission Management for Global Variables", "uri":"dli_01_0533.html", "doc_type":"usermanual", - "p_code":"89", - "code":"91" - }, - { - "desc":"Only the tenant account or a subaccount of user group admin can authorize access.After entering the DLI management console, you are advised to set agency permissions to e", - "product_code":"dli", - "title":"Service Authorization", - "uri":"dli_01_0486.html", - "doc_type":"usermanual", - "p_code":"89", - "code":"92" + "p_code":"107", + "code":"109" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -834,7 +987,7 @@ "uri":"dli_01_0408.html", "doc_type":"usermanual", "p_code":"", - "code":"93" + "code":"110" }, { "desc":"DLI has a comprehensive permission control mechanism and supports fine-grained authentication through Identity and Access Management (IAM). You can create policies in IAM", @@ -842,8 +995,8 @@ "title":"Overview", "uri":"dli_01_0440.html", "doc_type":"usermanual", - "p_code":"93", - "code":"94" + "p_code":"110", + "code":"111" }, { "desc":"You can use Identity and Access Management (IAM) to implement fine-grained permissions control on DLI resources. For details, see Overview.If your cloud account does not ", @@ -851,8 +1004,8 @@ "title":"Creating an IAM User and Granting Permissions", "uri":"dli_01_0418.html", "doc_type":"usermanual", - "p_code":"93", - "code":"95" + "p_code":"110", + "code":"112" }, { "desc":"Custom policies can be created as a supplement to the system policies of DLI. You can add actions to custom policies. For the actions supported for custom policies, see \"", @@ -860,8 +1013,8 @@ "title":"Creating a Custom Policy", "uri":"dli_01_0451.html", "doc_type":"usermanual", - "p_code":"93", - "code":"96" + "p_code":"110", + "code":"113" }, { "desc":"A resource is an object that exists within a service. You can select DLI resources by specifying their paths.", @@ -869,8 +1022,8 @@ "title":"DLI Resources", "uri":"dli_01_0417.html", "doc_type":"usermanual", - "p_code":"93", - "code":"97" + "p_code":"110", + "code":"114" }, { "desc":"Request conditions are useful in determining when a custom policy takes effect. A request condition consists of a condition key and operator. Condition keys are either gl", @@ -878,8 +1031,8 @@ "title":"DLI Request Conditions", "uri":"dli_01_0475.html", "doc_type":"usermanual", - "p_code":"93", - "code":"98" + "p_code":"110", + "code":"115" }, { "desc":"Table 1 lists the common operations supported by each system policy of DLI. Choose proper system policies according to this table. For details about the SQL statement per", @@ -887,8 +1040,8 @@ "title":"Common Operations Supported by DLI System Policy", "uri":"dli_01_0441.html", "doc_type":"usermanual", - "p_code":"93", - "code":"99" + "p_code":"110", + "code":"116" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -897,7 +1050,7 @@ "uri":"dli_01_0513.html", "doc_type":"usermanual", "p_code":"", - "code":"100" + "code":"117" }, { "desc":"On the DLI management console, you can import data stored on OBS to DLI tables from Data Management > Databases and Tables > Table Management and SQL Editor pages. For de", @@ -905,8 +1058,8 @@ "title":"Importing Data to a DLI Table", "uri":"dli_01_0420.html", "doc_type":"usermanual", - "p_code":"100", - "code":"101" + "p_code":"117", + "code":"118" }, { "desc":"This section describes metrics reported by DLI to Cloud Eye as well as their namespaces and dimensions. You can use the management console or APIs provided by Cloud Eye t", @@ -914,8 +1067,8 @@ "title":"Viewing Monitoring Metrics", "uri":"dli_01_0445.html", "doc_type":"usermanual", - "p_code":"100", - "code":"102" + "p_code":"117", + "code":"119" }, { "desc":"With CTS, you can record operations associated with DLI for later query, audit, and backtrack operations.", @@ -923,17 +1076,17 @@ "title":"DLI Operations That Can Be Recorded by CTS", "uri":"dli_01_0318.html", "doc_type":"usermanual", - "p_code":"100", - "code":"103" + "p_code":"117", + "code":"120" }, { "desc":"A quota limits the quantity of a resource available to users, thereby preventing spikes in the usage of the resource.You can also request for an increased quota if your e", "product_code":"dli", - "title":"Quotas", + "title":"Quota Management", "uri":"dli_01_0550.html", "doc_type":"usermanual", - "p_code":"100", - "code":"104" + "p_code":"117", + "code":"121" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -942,7 +1095,7 @@ "uri":"dli_01_0539.html", "doc_type":"usermanual", "p_code":"", - "code":"105" + "code":"122" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -950,8 +1103,17 @@ "title":"Flink Jobs", "uri":"dli_03_0037.html", "doc_type":"usermanual", - "p_code":"105", - "code":"106" + "p_code":"122", + "code":"123" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Usage", + "uri":"dli_03_0137.html", + "doc_type":"usermanual", + "p_code":"123", + "code":"124" }, { "desc":"DLI Flink jobs support the following data formats:Avro, Avro_merge, BLOB, CSV, EMAIL, JSON, ORC, Parquet, and XML.DLI Flink jobs support data from the following data sour", @@ -959,8 +1121,8 @@ "title":"What Data Formats and Data Sources Are Supported by DLI Flink Jobs?", "uri":"dli_03_0083.html", "doc_type":"usermanual", - "p_code":"106", - "code":"107" + "p_code":"124", + "code":"125" }, { "desc":"A sub-user can view queues but cannot view Flink jobs. You can authorize the sub-user using DLI or IAM.Authorization on DLILog in to the DLI console using a tenant accoun", @@ -968,8 +1130,8 @@ "title":"How Do I Authorize a Subuser to View Flink Jobs?", "uri":"dli_03_0139.html", "doc_type":"usermanual", - "p_code":"106", - "code":"108" + "p_code":"124", + "code":"126" }, { "desc":"DLI Flink jobs are highly available. You can enable the automatic restart function to automatically restart your jobs after short-time faults of peripheral services are r", @@ -977,8 +1139,8 @@ "title":"How Do I Set Auto Restart upon Exception for a Flink Job?", "uri":"dli_03_0090.html", "doc_type":"usermanual", - "p_code":"106", - "code":"109" + "p_code":"124", + "code":"127" }, { "desc":"When you create a Flink SQL job or Flink Jar job, you can select Save Job Log on the job editing page to save job running logs to OBS.To set the OBS bucket for storing th", @@ -986,8 +1148,8 @@ "title":"How Do I Save Flink Job Logs?", "uri":"dli_03_0099.html", "doc_type":"usermanual", - "p_code":"106", - "code":"110" + "p_code":"124", + "code":"128" }, { "desc":"DLI can output Flink job results to DIS. You can view the results in DIS. For details, see \"Obtaining Data from DIS\" in Data Ingestion Service User Guide.DLI can output F", @@ -995,8 +1157,8 @@ "title":"How Can I Check Flink Job Results?", "uri":"dli_03_0043.html", "doc_type":"usermanual", - "p_code":"106", - "code":"111" + "p_code":"124", + "code":"129" }, { "desc":"Choose Job Management > Flink Jobs. In the Operation column of the target job, choose More > Permissions. When a new user is authorized, No such user. userName:xxxx. is d", @@ -1004,8 +1166,8 @@ "title":"Why Is Error \"No such user. userName:xxxx.\" Reported on the Flink Job Management Page When I Grant Permission to a User?", "uri":"dli_03_0160.html", "doc_type":"usermanual", - "p_code":"106", - "code":"112" + "p_code":"124", + "code":"130" }, { "desc":"Checkpoint was enabled when a Flink job is created, and the OBS bucket for storing checkpoints was specified. After a Flink job is manually stopped, no message is display", @@ -1013,8 +1175,8 @@ "title":"How Do I Know Which Checkpoint the Flink Job I Stopped Will Be Restored to When I Start the Job Again?", "uri":"dli_03_0180.html", "doc_type":"usermanual", - "p_code":"106", - "code":"113" + "p_code":"124", + "code":"131" }, { "desc":"When you set running parameters of a DLI Flink job, you can enable Alarm Generation upon Job Exception to receive alarms when the job runs abnormally or is in arrears.If ", @@ -1022,8 +1184,17 @@ "title":"Why Is a Message Displayed Indicating That the SMN Topic Does Not Exist When I Use the SMN Topic in DLI?", "uri":"dli_03_0036.html", "doc_type":"usermanual", - "p_code":"106", - "code":"114" + "p_code":"124", + "code":"132" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Flink SQL", + "uri":"dli_03_0131.html", + "doc_type":"usermanual", + "p_code":"123", + "code":"133" }, { "desc":"The consumption capability of a Flink SQL job depends on the data source transmission, queue size, and job parameter settings. The peak consumption is 10 Mbit/s.", @@ -1031,8 +1202,8 @@ "title":"How Much Data Can Be Processed in a Day by a Flink SQL Job?", "uri":"dli_03_0130.html", "doc_type":"usermanual", - "p_code":"106", - "code":"115" + "p_code":"133", + "code":"134" }, { "desc":"The temp stream in Flink SQL is similar to a subquery. It is a logical stream used to simplify the SQL logic and does not generate data storage. Therefore, there is no ne", @@ -1040,8 +1211,8 @@ "title":"Does Data in the Temporary Stream of Flink SQL Need to Be Cleared Periodically? How Do I Clear the Data?", "uri":"dli_03_0061.html", "doc_type":"usermanual", - "p_code":"106", - "code":"116" + "p_code":"133", + "code":"135" }, { "desc":"SymptomWhen you create a Flink SQL job and configure the parameters, you select an OBS bucket you have created. The system displays a message indicating that the OBS buck", @@ -1049,8 +1220,8 @@ "title":"Why Is a Message Displayed Indicating That the OBS Bucket Is Not Authorized When I Select an OBS Bucket for a Flink SQL Job?", "uri":"dli_03_0138.html", "doc_type":"usermanual", - "p_code":"106", - "code":"117" + "p_code":"133", + "code":"136" }, { "desc":"When using a Flink SQL job, you need to create an OBS partition table for subsequent batch processing.In the following example, the day field is used as the partition fie", @@ -1058,8 +1229,8 @@ "title":"How Do I Create an OBS Partitioned Table for a Flink SQL Job?", "uri":"dli_03_0089.html", "doc_type":"usermanual", - "p_code":"106", - "code":"118" + "p_code":"133", + "code":"137" }, { "desc":"In this example, the day field is used as the partition field with the parquet encoding format (only the parquet format is supported currently) to dump car_info data to O", @@ -1067,8 +1238,8 @@ "title":"How Do I Dump Data to OBS and Create an OBS Partitioned Table?", "uri":"dli_03_0075.html", "doc_type":"usermanual", - "p_code":"106", - "code":"119" + "p_code":"133", + "code":"138" }, { "desc":"When I run the creation statement with an EL expression in the table name in a Flink SQL job, the following error message is displayed:DLI.0005: AnalysisException: t_user", @@ -1076,8 +1247,8 @@ "title":"Why Is Error Message \"DLI.0005\" Displayed When I Use an EL Expression to Create a Table in a Flink SQL Job?", "uri":"dli_03_0167.html", "doc_type":"usermanual", - "p_code":"106", - "code":"120" + "p_code":"133", + "code":"139" }, { "desc":"After data is written to OBS through the Flink job output stream, data cannot be queried from the DLI table created in the OBS file path.For example, use the following Fl", @@ -1085,8 +1256,8 @@ "title":"Why Is No Data Queried in the DLI Table Created Using the OBS File Path When Data Is Written to OBS by a Flink Job Output Stream?", "uri":"dli_03_0168.html", "doc_type":"usermanual", - "p_code":"106", - "code":"121" + "p_code":"133", + "code":"140" }, { "desc":"After a Flink SQL job is submitted on DLI, the job fails to be executed. The following error information is displayed in the job log:connect to DIS failed java.lang.Illeg", @@ -1094,8 +1265,8 @@ "title":"Why Does a Flink SQL Job Fails to Be Executed, and Is \"connect to DIS failed java.lang.IllegalArgumentException: Access key cannot be null\" Displayed in the Log?", "uri":"dli_03_0174.html", "doc_type":"usermanual", - "p_code":"106", - "code":"122" + "p_code":"133", + "code":"141" }, { "desc":"Semantic verification for a Flink SQL job (reading DIS data) fails. The following information is displayed when the job fails:Get dis channel xxxinfo failed. error info: ", @@ -1103,8 +1274,8 @@ "title":"Why Is Error \"Not authorized\" Reported When a Flink SQL Job Reads DIS Data?", "uri":"dli_03_0176.html", "doc_type":"usermanual", - "p_code":"106", - "code":"123" + "p_code":"133", + "code":"142" }, { "desc":"After a Flink SQL job consumed Kafka and sent data to the Elasticsearch cluster, the job was successfully executed, but no data is available.Possible causes are as follow", @@ -1112,8 +1283,17 @@ "title":"Data Writing Fails After a Flink SQL Job Consumed Kafka and Sank Data to the Elasticsearch Cluster", "uri":"dli_03_0232.html", "doc_type":"usermanual", - "p_code":"106", - "code":"124" + "p_code":"133", + "code":"143" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Flink Jar Jobs", + "uri":"dli_03_0132.html", + "doc_type":"usermanual", + "p_code":"123", + "code":"144" }, { "desc":"The procedure is as follows:Add the following code to the JAR file code of the Flink Jar job:// Configure the pom file on which the StreamExecutionEnvironment depends.\nSt", @@ -1121,8 +1301,8 @@ "title":"How Do I Configure Checkpoints for Flink Jar Jobs and Save the Checkpoints to OBS?", "uri":"dli_03_0038.html", "doc_type":"usermanual", - "p_code":"106", - "code":"125" + "p_code":"144", + "code":"145" }, { "desc":"Configuration files can be uploaded for user-defined jobs (JAR).Upload the configuration file to DLI through Package Management.In the Other Dependencies area of the Flin", @@ -1130,8 +1310,8 @@ "title":"Does a Flink JAR Job Support Configuration File Upload? How Do I Upload a Configuration File?", "uri":"dli_03_0044.html", "doc_type":"usermanual", - "p_code":"106", - "code":"126" + "p_code":"144", + "code":"146" }, { "desc":"The dependency of your Flink job conflicts with a built-in dependency of the DLI Flink platform. As a result, the job submission fails.Delete your JAR file that is the sa", @@ -1139,8 +1319,8 @@ "title":"Why Does the Submission Fail Due to Flink JAR File Conflict?", "uri":"dli_03_0119.html", "doc_type":"usermanual", - "p_code":"106", - "code":"127" + "p_code":"144", + "code":"147" }, { "desc":"When a Flink Jar job is submitted to access GaussDB(DWS), an error message is displayed indicating that the job fails to be started. The job log contains the following er", @@ -1148,8 +1328,8 @@ "title":"Why Does a Flink Jar Job Fail to Access GaussDB(DWS) and a Message Is Displayed Indicating Too Many Client Connections?", "uri":"dli_03_0161.html", "doc_type":"usermanual", - "p_code":"106", - "code":"128" + "p_code":"144", + "code":"148" }, { "desc":"An exception occurred when a Flink Jar job is running. The following error information is displayed in the job log:org.apache.flink.shaded.curator.org.apache.curator.Conn", @@ -1157,8 +1337,8 @@ "title":"Why Is Error Message \"Authentication failed\" Displayed During Flink Jar Job Running?", "uri":"dli_03_0165.html", "doc_type":"usermanual", - "p_code":"106", - "code":"129" + "p_code":"144", + "code":"149" }, { "desc":"The storage path of the Flink Jar job checkpoints was set to an OBS bucket. The job failed to be submitted, and an error message indicating an invalid OBS bucket name was", @@ -1166,8 +1346,8 @@ "title":"Why Is Error Invalid OBS Bucket Name Reported After a Flink Job Submission Failed?", "uri":"dli_03_0233.html", "doc_type":"usermanual", - "p_code":"106", - "code":"130" + "p_code":"144", + "code":"150" }, { "desc":"Flink Job submission failed. The exception information is as follows:Flink JAR files conflicted. The submitted Flink JAR file conflicted with the HDFS JAR file of the DLI", @@ -1175,8 +1355,8 @@ "title":"Why Does the Flink Submission Fail Due to Hadoop JAR File Conflict?", "uri":"dli_03_0234.html", "doc_type":"usermanual", - "p_code":"106", - "code":"131" + "p_code":"144", + "code":"151" }, { "desc":"You can use Flink Jar to connect to Kafka with SASL SSL authentication enabled.", @@ -1184,8 +1364,17 @@ "title":"How Do I Connect a Flink jar Job to SASL_SSL?", "uri":"dli_03_0266.html", "doc_type":"usermanual", - "p_code":"106", - "code":"132" + "p_code":"144", + "code":"152" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Performance Tuning", + "uri":"dli_03_0133.html", + "doc_type":"usermanual", + "p_code":"123", + "code":"153" }, { "desc":"Data Stacking in a Consumer GroupThe accumulated data of a consumer group can be calculated by the following formula: Total amount of data to be consumed by the consumer ", @@ -1193,8 +1382,8 @@ "title":"How Do I Optimize Performance of a Flink Job?", "uri":"dli_03_0106.html", "doc_type":"usermanual", - "p_code":"106", - "code":"133" + "p_code":"153", + "code":"154" }, { "desc":"Add the following SQL statements to the Flink job:", @@ -1202,8 +1391,8 @@ "title":"How Do I Write Data to Different Elasticsearch Clusters in a Flink Job?", "uri":"dli_03_0048.html", "doc_type":"usermanual", - "p_code":"106", - "code":"134" + "p_code":"153", + "code":"155" }, { "desc":"The DLI Flink checkpoint/savepoint mechanism is complete and reliable. You can use this mechanism to prevent data loss when a job is manually restarted or restarted due t", @@ -1211,8 +1400,17 @@ "title":"How Do I Prevent Data Loss After Flink Job Restart?", "uri":"dli_03_0096.html", "doc_type":"usermanual", - "p_code":"106", - "code":"135" + "p_code":"153", + "code":"156" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"O&M Guide", + "uri":"dli_03_0135.html", + "doc_type":"usermanual", + "p_code":"123", + "code":"157" }, { "desc":"On the Flink job management page, hover the cursor on the status of the job that fails to be submitted to view the brief information about the failure.The possible causes", @@ -1220,8 +1418,8 @@ "title":"How Do I Locate a Flink Job Submission Error?", "uri":"dli_03_0103.html", "doc_type":"usermanual", - "p_code":"106", - "code":"136" + "p_code":"157", + "code":"158" }, { "desc":"On the Flink job management, click Edit in the Operation column of the target job. On the displayed page, check whether Save Job Log in the Running Parameters tab is enab", @@ -1229,8 +1427,8 @@ "title":"How Do I Locate a Flink Job Running Error?", "uri":"dli_03_0105.html", "doc_type":"usermanual", - "p_code":"106", - "code":"137" + "p_code":"157", + "code":"159" }, { "desc":"Flink's checkpointing is a fault tolerance and recovery mechanism. This mechanism ensures that real-time programs can self-recover in case of exceptions or machine issues", @@ -1238,8 +1436,8 @@ "title":"How Can I Check if a Flink Job Can Be Restored From a Checkpoint After Restarting It?", "uri":"dli_03_0136.html", "doc_type":"usermanual", - "p_code":"106", - "code":"138" + "p_code":"157", + "code":"160" }, { "desc":"To rectify this fault, perform the following steps:Log in to the DIS management console. In the navigation pane, choose Stream Management. View the Flink job SQL statemen", @@ -1247,8 +1445,8 @@ "title":"Why Does DIS Stream Not Exist During Job Semantic Check?", "uri":"dli_03_0040.html", "doc_type":"usermanual", - "p_code":"106", - "code":"139" + "p_code":"157", + "code":"161" }, { "desc":"If the OBS bucket selected for a job is not authorized, perform the following steps:Select Enable Checkpointing or Save Job Log.Specify OBS Bucket.Select Authorize OBS.", @@ -1256,8 +1454,8 @@ "title":"Why Is the OBS Bucket Selected for Job Not Authorized?", "uri":"dli_03_0045.html", "doc_type":"usermanual", - "p_code":"106", - "code":"140" + "p_code":"157", + "code":"162" }, { "desc":"Mode for storing generated job logs when a DLI Flink job fails to be submitted or executed. The options are as follows:If the submission fails, a submission log is genera", @@ -1265,8 +1463,8 @@ "title":"Why Are Logs Not Written to the OBS Bucket After a DLI Flink Job Fails to Be Submitted for Running?", "uri":"dli_03_0064.html", "doc_type":"usermanual", - "p_code":"106", - "code":"141" + "p_code":"157", + "code":"163" }, { "desc":"The Flink/Spark UI was displayed with incomplete information.When the queue is used to run a job, the system releases the cluster and takes about 10 minutes to create a n", @@ -1274,8 +1472,8 @@ "title":"Why Is Information Displayed on the FlinkUI/Spark UI Page Incomplete?", "uri":"dli_03_0235.html", "doc_type":"usermanual", - "p_code":"106", - "code":"142" + "p_code":"157", + "code":"164" }, { "desc":"JobManager and TaskManager heartbeats timed out. As a result, the Flink job is abnormal.Check whether the network is intermittently disconnected and whether the cluster l", @@ -1283,8 +1481,8 @@ "title":"Why Is the Flink Job Abnormal Due to Heartbeat Timeout Between JobManager and TaskManager?", "uri":"dli_03_0236.html", "doc_type":"usermanual", - "p_code":"106", - "code":"143" + "p_code":"157", + "code":"165" }, { "desc":"Test address connectivity.If the network is unreachable, rectify the network connection first. Ensure that the network connection between the DLI queue and the external d", @@ -1292,8 +1490,8 @@ "title":"Why Is Error \"Timeout expired while fetching topic metadata\" Repeatedly Reported in Flink JobManager Logs?", "uri":"dli_03_0265.html", "doc_type":"usermanual", - "p_code":"106", - "code":"144" + "p_code":"157", + "code":"166" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1301,8 +1499,17 @@ "title":"Problems Related to SQL Jobs", "uri":"dli_03_0020.html", "doc_type":"usermanual", - "p_code":"105", - "code":"145" + "p_code":"122", + "code":"167" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Usage", + "uri":"dli_03_0216.html", + "doc_type":"usermanual", + "p_code":"167", + "code":"168" }, { "desc":"A temporary table is used to store intermediate results. When a transaction or session ends, the data in the temporary table can be automatically deleted. For example, in", @@ -1310,8 +1517,17 @@ "title":"SQL Jobs", "uri":"dli_03_0200.html", "doc_type":"usermanual", - "p_code":"145", - "code":"146" + "p_code":"168", + "code":"169" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Job Development", + "uri":"dli_03_0204.html", + "doc_type":"usermanual", + "p_code":"167", + "code":"170" }, { "desc":"If a large number of small files are generated during SQL execution, job execution and table query will take a long time. In this case, you should merge small files.Set t", @@ -1319,8 +1535,8 @@ "title":"How Do I Merge Small Files?", "uri":"dli_03_0086.html", "doc_type":"usermanual", - "p_code":"145", - "code":"147" + "p_code":"170", + "code":"171" }, { "desc":"When creating an OBS table, you must specify a table path in the database. The path format is as follows: obs://xxx/database name/table name.If the specified path is akdc", @@ -1328,8 +1544,8 @@ "title":"How Do I Specify an OBS Path When Creating an OBS Table?", "uri":"dli_03_0092.html", "doc_type":"usermanual", - "p_code":"145", - "code":"148" + "p_code":"170", + "code":"172" }, { "desc":"DLI allows you to associate JSON data in an OBS bucket to create tables in asynchronous mode.The statement for creating the table is as follows:", @@ -1337,8 +1553,8 @@ "title":"How Do I Create a Table Using JSON Data in an OBS Bucket?", "uri":"dli_03_0108.html", "doc_type":"usermanual", - "p_code":"145", - "code":"149" + "p_code":"170", + "code":"173" }, { "desc":"You can use the where condition statement in the select statement to filter data. For example:", @@ -1346,8 +1562,8 @@ "title":"How Do I Set Local Variables in SQL Statements?", "uri":"dli_03_0087.html", "doc_type":"usermanual", - "p_code":"145", - "code":"150" + "p_code":"170", + "code":"174" }, { "desc":"The correct method for using the count function to perform aggregation is as follows:OrIf an incorrect method is used, an error will be reported.", @@ -1355,8 +1571,8 @@ "title":"How Can I Use the count Function to Perform Aggregation?", "uri":"dli_03_0069.html", "doc_type":"usermanual", - "p_code":"145", - "code":"151" + "p_code":"170", + "code":"175" }, { "desc":"You can use the cross-region replication function of OBS. The procedure is as follows:Export the DLI table data in region 1 to the user-defined OBS bucket.Use the OBS cro", @@ -1364,8 +1580,8 @@ "title":"How Do I Synchronize DLI Table Data from One Region to Another?", "uri":"dli_03_0072.html", "doc_type":"usermanual", - "p_code":"145", - "code":"152" + "p_code":"170", + "code":"176" }, { "desc":"Currently, DLI does not allow you to insert table data into specific fields. To insert table data, you must insert data of all table fields at a time.", @@ -1373,8 +1589,17 @@ "title":"How Do I Insert Table Data into Specific Fields of a Table Using a SQL Job?", "uri":"dli_03_0191.html", "doc_type":"usermanual", - "p_code":"145", - "code":"153" + "p_code":"170", + "code":"177" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Job O&M Errors", + "uri":"dli_03_0206.html", + "doc_type":"usermanual", + "p_code":"167", + "code":"178" }, { "desc":"Create an OBS directory with a unique name. Alternatively, you can manually delete the existing OBS directory and submit the job again. However, exercise caution when del", @@ -1382,8 +1607,8 @@ "title":"Why Is Error \"path obs://xxx already exists\" Reported When Data Is Exported to OBS?", "uri":"dli_03_0014.html", "doc_type":"usermanual", - "p_code":"145", - "code":"154" + "p_code":"178", + "code":"179" }, { "desc":"This message indicates that the two tables to be joined contain the same column, but the owner of the column is not specified when the command is executed.For example, ta", @@ -1391,8 +1616,8 @@ "title":"Why Is Error \"SQL_ANALYSIS_ERROR: Reference 't.id' is ambiguous, could be: t.id, t.id.;\" Displayed When Two Tables Are Joined?", "uri":"dli_03_0066.html", "doc_type":"usermanual", - "p_code":"145", - "code":"155" + "p_code":"178", + "code":"180" }, { "desc":"Check if your account is in arrears and top it up if necessary.If the same error message persists after the top-up, log out of your account and log back in.", @@ -1400,8 +1625,8 @@ "title":"Why Is Error \"The current account does not have permission to perform this operation,the current account was restricted. Restricted for no budget.\" Reported when a SQL Statement Is Executed?", "uri":"dli_03_0071.html", "doc_type":"usermanual", - "p_code":"145", - "code":"156" + "p_code":"178", + "code":"181" }, { "desc":"Cause AnalysisWhen you query the partitioned table XX.YYY, the partition column is not specified in the search criteria.A partitioned table can be queried only when the q", @@ -1409,8 +1634,8 @@ "title":"Why Is Error \"There should be at least one partition pruning predicate on partitioned table XX.YYY\" Reported When a Query Statement Is Executed?", "uri":"dli_03_0145.html", "doc_type":"usermanual", - "p_code":"145", - "code":"157" + "p_code":"178", + "code":"182" }, { "desc":"The following error message is displayed when the LOAD DATA command is executed by a Spark SQL job to import data to a DLI table:In some cases ,the following error messag", @@ -1418,8 +1643,8 @@ "title":"Why Is Error \"IllegalArgumentException: Buffer size too small. size\" Reported When Data Is Loaded to an OBS Foreign Table?", "uri":"dli_03_0169.html", "doc_type":"usermanual", - "p_code":"145", - "code":"158" + "p_code":"178", + "code":"183" }, { "desc":"An error is reported during SQL job execution:Please contact DLI service. DLI.0002: FileNotFoundException: getFileStatus on obs://xxx: status [404]Check whether there is ", @@ -1427,8 +1652,8 @@ "title":"Why Is Error \"DLI.0002 FileNotFoundException\" Reported During SQL Job Running?", "uri":"dli_03_0189.html", "doc_type":"usermanual", - "p_code":"145", - "code":"159" + "p_code":"178", + "code":"184" }, { "desc":"Currently, DLI supports the Hive syntax for creating tables of the TEXTFILE, SEQUENCEFILE, RCFILE, ORC, AVRO, and PARQUET file types. If the file format specified for cre", @@ -1436,8 +1661,8 @@ "title":"Why Is a Schema Parsing Error Reported When I Create a Hive Table Using CTAS?", "uri":"dli_03_0046.html", "doc_type":"usermanual", - "p_code":"145", - "code":"160" + "p_code":"178", + "code":"185" }, { "desc":"When you run a DLI SQL script on DataArts Studio, the log shows that the statements fail to be executed. The error information is as follows:DLI.0999: RuntimeException: o", @@ -1445,8 +1670,8 @@ "title":"Why Is Error \"org.apache.hadoop.fs.obs.OBSIOException\" Reported When I Run DLI SQL Scripts on DataArts Studio?", "uri":"dli_03_0173.html", "doc_type":"usermanual", - "p_code":"145", - "code":"161" + "p_code":"178", + "code":"186" }, { "desc":"After the migration job is submitted, the following error information is displayed in the log:org.apache.sqoop.common.SqoopException:UQUERY_CONNECTOR_0001:Invoke DLI serv", @@ -1454,8 +1679,8 @@ "title":"Why Is Error \"UQUERY_CONNECTOR_0001:Invoke DLI service api failed\" Reported in the Job Log When I Use CDM to Migrate Data to DLI?", "uri":"dli_03_0172.html", "doc_type":"usermanual", - "p_code":"145", - "code":"162" + "p_code":"178", + "code":"187" }, { "desc":"Error message \"File not Found\" is displayed when a SQL job is accessed.Generally, the file cannot be found due to a read/write conflict. Check whether a job is overwritin", @@ -1463,8 +1688,8 @@ "title":"Why Is Error \"File not Found\" Reported When I Access a SQL Job?", "uri":"dli_03_0207.html", "doc_type":"usermanual", - "p_code":"145", - "code":"163" + "p_code":"178", + "code":"188" }, { "desc":"Error message \"DLI.0003: AccessControlException XXX\" is reported when a SQL job is accessed.Check the OBS bucket written in the AccessControlException to confirm if your ", @@ -1472,8 +1697,8 @@ "title":"Why Is Error \"DLI.0003: AccessControlException XXX\" Reported When I Access a SQL Job?", "uri":"dli_03_0208.html", "doc_type":"usermanual", - "p_code":"145", - "code":"164" + "p_code":"178", + "code":"189" }, { "desc":"Error message \"DLI.0001: org.apache.hadoop.security.AccessControlException: verifyBucketExists on {{bucket name}}: status [403]\" is reported when a SQL job is Accessed.Yo", @@ -1481,8 +1706,8 @@ "title":"Why Is Error \"DLI.0001: org.apache.hadoop.security.AccessControlException: verifyBucketExists on {{bucket name}}: status [403]\" Reported When I Access a SQL Job?", "uri":"dli_03_0209.html", "doc_type":"usermanual", - "p_code":"145", - "code":"165" + "p_code":"178", + "code":"190" }, { "desc":"Error message \"The current account does not have permission to perform this operation,the current account was restricted.\" is reported during SQL statement execution.Chec", @@ -1490,8 +1715,17 @@ "title":"Why Is Error \"The current account does not have permission to perform this operation,the current account was restricted. Restricted for no budget\" Reported During SQL Statement Execution? Restricted for no budget.", "uri":"dli_03_0210.html", "doc_type":"usermanual", - "p_code":"145", - "code":"166" + "p_code":"178", + "code":"191" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"O&M Guide", + "uri":"dli_03_0211.html", + "doc_type":"usermanual", + "p_code":"167", + "code":"192" }, { "desc":"If the job runs slowly, perform the following steps to find the causes and rectify the fault:Check whether the problem is caused by FullGC.Log in to the DLI console. In t", @@ -1499,8 +1733,8 @@ "title":"How Do I Troubleshoot Slow SQL Jobs?", "uri":"dli_03_0196.html", "doc_type":"usermanual", - "p_code":"145", - "code":"167" + "p_code":"192", + "code":"193" }, { "desc":"You can view SQL job logs for routine O&M.Obtain the ID of the DLI job executed on the DataArts Studio console.Job IDOn the DLI console, choose Job Management > SQL Jobs.", @@ -1508,8 +1742,8 @@ "title":"How Do I View DLI SQL Logs?", "uri":"dli_03_0091.html", "doc_type":"usermanual", - "p_code":"145", - "code":"168" + "p_code":"192", + "code":"194" }, { "desc":"You can view the job execution records when a job is running.Log in to the DLI management console.In the navigation pane on the left, choose Job Management > SQL Jobs.Ent", @@ -1517,8 +1751,8 @@ "title":"How Do I View SQL Execution Records?", "uri":"dli_03_0116.html", "doc_type":"usermanual", - "p_code":"145", - "code":"169" + "p_code":"192", + "code":"195" }, { "desc":"If the execution of an SQL statement takes a long time, you need to access the Spark UI to check the execution status.If data skew occurs, the running time of a stage exc", @@ -1526,8 +1760,8 @@ "title":"How Do I Eliminate Data Skew by Configuring AE Parameters?", "uri":"dli_03_0093.html", "doc_type":"usermanual", - "p_code":"145", - "code":"170" + "p_code":"192", + "code":"196" }, { "desc":"A DLI table exists but cannot be queried on the DLI console.If a table exists but cannot be queried, there is a high probability that the current user does not have the p", @@ -1535,8 +1769,8 @@ "title":"What Can I Do If a Table Cannot Be Queried on the DLI Console?", "uri":"dli_03_0184.html", "doc_type":"usermanual", - "p_code":"145", - "code":"171" + "p_code":"192", + "code":"197" }, { "desc":"A high compression ratio of OBS tables in the Parquet or ORC format (for example, a compression ratio of 5 or higher compared with text compression) will lead to large da", @@ -1544,8 +1778,8 @@ "title":"The Compression Ratio of OBS Tables Is Too High", "uri":"dli_03_0013.html", "doc_type":"usermanual", - "p_code":"145", - "code":"172" + "p_code":"192", + "code":"198" }, { "desc":"DLI supports only UTF-8-encoded texts. Ensure that data is encoded using UTF-8 during table creation and import.", @@ -1553,8 +1787,8 @@ "title":"How Can I Avoid Garbled Characters Caused by Inconsistent Character Codes?", "uri":"dli_03_0009.html", "doc_type":"usermanual", - "p_code":"145", - "code":"173" + "p_code":"192", + "code":"199" }, { "desc":"User A created the testTable table in a database through a SQL job and granted user B the permission to insert and delete table data. User A deleted the testTable table a", @@ -1562,8 +1796,8 @@ "title":"Do I Need to Grant Table Permissions to a User and Project After I Delete a Table and Create One with the Same Name?", "uri":"dli_03_0175.html", "doc_type":"usermanual", - "p_code":"145", - "code":"174" + "p_code":"192", + "code":"200" }, { "desc":"A CSV file is imported to a DLI partitioned table, but the imported file data does not contain the data in the partitioning column. The partitioning column needs to be sp", @@ -1571,8 +1805,8 @@ "title":"Why Can't I Query Table Data After Data Is Imported to a DLI Partitioned Table Because the File to Be Imported Does Not Contain Data in the Partitioning Column?", "uri":"dli_03_0177.html", "doc_type":"usermanual", - "p_code":"145", - "code":"175" + "p_code":"192", + "code":"201" }, { "desc":"When an OBS foreign table is created, a field in the specified OBS file contains a carriage return line feed (CRLF) character. As a result, the data is incorrect.The stat", @@ -1580,8 +1814,8 @@ "title":"How Do I Fix the Data Error Caused by CRLF Characters in a Field of the OBS File Used to Create an External OBS Table?", "uri":"dli_03_0181.html", "doc_type":"usermanual", - "p_code":"145", - "code":"176" + "p_code":"192", + "code":"202" }, { "desc":"A SQL job contains join operations. After the job is submitted, it is stuck in the Running state and no result is returned.When a Spark SQL job has join operations on sma", @@ -1589,8 +1823,8 @@ "title":"Why Does a SQL Job That Has Join Operations Stay in the Running State?", "uri":"dli_03_0182.html", "doc_type":"usermanual", - "p_code":"145", - "code":"177" + "p_code":"192", + "code":"203" }, { "desc":"The on clause was not added to the SQL statement for joining tables. As a result, the Cartesian product query occurs due to multi-table association, and the queue resourc", @@ -1598,8 +1832,8 @@ "title":"The on Clause Is Not Added When Tables Are Joined. Cartesian Product Query Causes High Resource Usage of the Queue, and the Job Fails to Be Executed", "uri":"dli_03_0187.html", "doc_type":"usermanual", - "p_code":"145", - "code":"178" + "p_code":"192", + "code":"204" }, { "desc":"Partition data is manually uploaded to a partition of an OBS table. However, the data cannot be queried using DLI SQL editor.After manually adding partition data, you nee", @@ -1607,8 +1841,8 @@ "title":"Why Can't I Query Data After I Manually Add Data to the Partition Directory of an OBS Table?", "uri":"dli_03_0190.html", "doc_type":"usermanual", - "p_code":"145", - "code":"179" + "p_code":"192", + "code":"205" }, { "desc":"To dynamically overwrite the specified partitioned data in the DataSource table, set dli.sql.dynamicPartitionOverwrite.enabled to true and then run the insert overwrite s", @@ -1616,8 +1850,8 @@ "title":"Why Is All Data Overwritten When insert overwrite Is Used to Overwrite Partitioned Table?", "uri":"dli_03_0212.html", "doc_type":"usermanual", - "p_code":"145", - "code":"180" + "p_code":"192", + "code":"206" }, { "desc":"The possible causes and solutions are as follows:After you purchase a DLI queue and submit a SQL job for the first time, wait for 5 to 10 minutes. After the cluster is st", @@ -1625,8 +1859,8 @@ "title":"Why Is a SQL Job Stuck in the Submitting State?", "uri":"dli_03_0213.html", "doc_type":"usermanual", - "p_code":"145", - "code":"181" + "p_code":"192", + "code":"207" }, { "desc":"Spark does not have the datetime type and uses the TIMESTAMP type instead.You can use a function to convert data types.The following is an example.select cast(create_date", @@ -1634,8 +1868,8 @@ "title":"Why Is the create_date Field in the RDS Table Is a Timestamp in the DLI query result?", "uri":"dli_03_0214.html", "doc_type":"usermanual", - "p_code":"145", - "code":"182" + "p_code":"192", + "code":"208" }, { "desc":"If the table name is changed immediately after SQL statements are executed, the data size of the table may be incorrect.If you need to change the table name, change it 5 ", @@ -1643,8 +1877,8 @@ "title":"What Can I Do If datasize Cannot Be Changed After the Table Name Is Changed in a Finished SQL Job?", "uri":"dli_03_0215.html", "doc_type":"usermanual", - "p_code":"145", - "code":"183" + "p_code":"192", + "code":"209" }, { "desc":"When DLI is used to insert data into an OBS temporary table, only part of data is imported.Possible causes are as follows:The amount of data read during job execution is ", @@ -1652,8 +1886,8 @@ "title":"Why Is the Data Volume Changes When Data Is Imported from DLI to OBS?", "uri":"dli_03_0231.html", "doc_type":"usermanual", - "p_code":"145", - "code":"184" + "p_code":"192", + "code":"210" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1661,8 +1895,17 @@ "title":"Problems Related to Spark Jobs", "uri":"dli_03_0021.html", "doc_type":"usermanual", - "p_code":"105", - "code":"185" + "p_code":"122", + "code":"211" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Usage", + "uri":"dli_03_0163.html", + "doc_type":"usermanual", + "p_code":"211", + "code":"212" }, { "desc":"DLI Spark does not support job scheduling. You can use other services, such as DataArts Studio, or use APIs or SDKs to customize job schedule.The Spark SQL syntax does no", @@ -1670,8 +1913,17 @@ "title":"Spark Jobs", "uri":"dli_03_0201.html", "doc_type":"usermanual", - "p_code":"185", - "code":"186" + "p_code":"212", + "code":"213" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Job Development", + "uri":"dli_03_0217.html", + "doc_type":"usermanual", + "p_code":"211", + "code":"214" }, { "desc":"To use Spark to write data into a DLI table, configure the following parameters:fs.obs.access.keyfs.obs.secret.keyfs.obs.implfs.obs.endpointThe following is an example:", @@ -1679,8 +1931,8 @@ "title":"How Do I Use Spark to Write Data into a DLI Table?", "uri":"dli_03_0107.html", "doc_type":"usermanual", - "p_code":"185", - "code":"187" + "p_code":"214", + "code":"215" }, { "desc":"Hard-coded or plaintext AK and SK pose significant security risks. To ensure security, encrypt your AK and SK, store them in configuration files or environment variables,", @@ -1688,8 +1940,8 @@ "title":"How Do I Set the AK/SK for a Queue to Operate an OBS Table?", "uri":"dli_03_0017.html", "doc_type":"usermanual", - "p_code":"185", - "code":"188" + "p_code":"214", + "code":"216" }, { "desc":"Log in to the DLI console. In the navigation pane, choose Job Management > Spark Jobs. In the job list, locate the target job and click next to Job ID to view the parame", @@ -1697,8 +1949,8 @@ "title":"How Do I View the Resource Usage of DLI Spark Jobs?", "uri":"dli_03_0102.html", "doc_type":"usermanual", - "p_code":"185", - "code":"189" + "p_code":"214", + "code":"217" }, { "desc":"If the pymysql module is missing, check whether the corresponding EGG package exists. If the package does not exist, upload the pyFile package on the Package Management p", @@ -1706,8 +1958,8 @@ "title":"How Do I Use Python Scripts to Access the MySQL Database If the pymysql Module Is Missing from the Spark Job Results Stored in MySQL?", "uri":"dli_03_0076.html", "doc_type":"usermanual", - "p_code":"185", - "code":"190" + "p_code":"214", + "code":"218" }, { "desc":"DLI natively supports PySpark.For most cases, Python is preferred for data analysis, and PySpark is the best choice for big data analysis. Generally, JVM programs are pac", @@ -1715,8 +1967,8 @@ "title":"How Do I Run a Complex PySpark Program in DLI?", "uri":"dli_03_0082.html", "doc_type":"usermanual", - "p_code":"185", - "code":"191" + "p_code":"214", + "code":"219" }, { "desc":"You can use DLI Spark jobs to access data in the MySQL database using either of the following methods:Solution 1: Buy a queue, create an enhanced datasource connection, a", @@ -1724,8 +1976,8 @@ "title":"How Does a Spark Job Access a MySQL Database?", "uri":"dli_03_0127.html", "doc_type":"usermanual", - "p_code":"185", - "code":"192" + "p_code":"214", + "code":"220" }, { "desc":"When shuffle statements, such as GROUP BY and JOIN, are executed in Spark jobs, data skew occurs, which slows down the job execution.To solve this problem, you can config", @@ -1733,8 +1985,8 @@ "title":"How Do I Use JDBC to Set the spark.sql.shuffle.partitions Parameter to Improve the Task Concurrency?", "uri":"dli_03_0068.html", "doc_type":"usermanual", - "p_code":"185", - "code":"193" + "p_code":"214", + "code":"221" }, { "desc":"You can use SparkFiles to read the file submitted using –-file form a local path: SparkFiles.get(\"Name of the uploaded file\").The file path in the Driver is different fro", @@ -1742,8 +1994,17 @@ "title":"How Do I Read Uploaded Files for a Spark Jar Job?", "uri":"dli_03_0118.html", "doc_type":"usermanual", - "p_code":"185", - "code":"194" + "p_code":"214", + "code":"222" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Job O&M Errors", + "uri":"dli_03_0218.html", + "doc_type":"usermanual", + "p_code":"211", + "code":"223" }, { "desc":"The following error is reported when a Spark job accesses OBS data:Set the AK/SK to enable Spark jobs to access OBS data. For details, see How Do I Set the AK/SK for a Qu", @@ -1751,8 +2012,8 @@ "title":"Why Are Errors \"ResponseCode: 403\" and \"ResponseStatus: Forbidden\" Reported When a Spark Job Accesses OBS Data?", "uri":"dli_03_0156.html", "doc_type":"usermanual", - "p_code":"185", - "code":"195" + "p_code":"223", + "code":"224" }, { "desc":"Check whether the OBS bucket is used to store DLI logs on the Global Configuration > Job Configurations page. The job log bucket cannot be used for other purpose.", @@ -1760,8 +2021,8 @@ "title":"Why Is Error \"verifyBucketExists on XXXX: status [403]\" Reported When I Use a Spark Job to Access an OBS Bucket That I Have Access Permission?", "uri":"dli_03_0164.html", "doc_type":"usermanual", - "p_code":"185", - "code":"196" + "p_code":"223", + "code":"225" }, { "desc":"When a Spark job accesses a large amount of data, for example, accessing data in a GaussDB(DWS) database, you are advised to set the number of concurrent tasks and enable", @@ -1769,8 +2030,8 @@ "title":"Why Is a Job Running Timeout Reported When a Spark Job Runs a Large Amount of Data?", "uri":"dli_03_0157.html", "doc_type":"usermanual", - "p_code":"185", - "code":"197" + "p_code":"223", + "code":"226" }, { "desc":"Spark jobs cannot access SFTP. Upload the files you want to access to OBS and then you can analyze the data using Spark jobs.", @@ -1778,8 +2039,8 @@ "title":"Why Does the Job Fail to Be Executed and the Log Shows that the File Directory Is Abnormal When I Use a Spark Job to Access Files in SFTP?", "uri":"dli_03_0188.html", "doc_type":"usermanual", - "p_code":"185", - "code":"198" + "p_code":"223", + "code":"227" }, { "desc":"When a Spark job is running, an error message is displayed, indicating that the user does not have the database permission. The error information is as follows:org.apache", @@ -1787,8 +2048,17 @@ "title":"Why Does the Job Fail to Be Executed Due to Insufficient Database and Table Permissions?", "uri":"dli_03_0192.html", "doc_type":"usermanual", - "p_code":"185", - "code":"199" + "p_code":"223", + "code":"228" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"O&M Guide", + "uri":"dli_03_0219.html", + "doc_type":"usermanual", + "p_code":"211", + "code":"229" }, { "desc":"I cannot find the specified Python environment after adding the Python 3 package.Set spark.yarn.appMasterEnv.PYSPARK_PYTHON to python3 in the conf file to specify the Pyt", @@ -1796,8 +2066,8 @@ "title":"Why Can't I Find the Specified Python Environment After Adding the Python Package?", "uri":"dli_03_0077.html", "doc_type":"usermanual", - "p_code":"185", - "code":"200" + "p_code":"229", + "code":"230" }, { "desc":"The remaining CUs in the queue may be insufficient. As a result, the job cannot be submitted.To view the remaining CUs of a queue, perform the following steps:Check the C", @@ -1805,8 +2075,8 @@ "title":"Why Is a Spark Jar Job Stuck in the Submitting State?", "uri":"dli_03_0220.html", "doc_type":"usermanual", - "p_code":"185", - "code":"201" + "p_code":"229", + "code":"231" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1814,8 +2084,17 @@ "title":"Product Consultation", "uri":"dli_03_0001.html", "doc_type":"usermanual", - "p_code":"105", - "code":"202" + "p_code":"122", + "code":"232" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Usage", + "uri":"dli_03_0221.html", + "doc_type":"usermanual", + "p_code":"232", + "code":"233" }, { "desc":"Data Lake Insight (DLI) is a serverless data processing and analysis service fully compatible with Apache Spark and Apache Flink ecosystems. It frees you from managing an", @@ -1823,8 +2102,8 @@ "title":"What Is DLI?", "uri":"dli_03_0002.html", "doc_type":"usermanual", - "p_code":"202", - "code":"203" + "p_code":"233", + "code":"234" }, { "desc":"DLI supports the following data formats:ParquetCSVORCJsonAvro", @@ -1832,8 +2111,8 @@ "title":"Which Data Formats Does DLI Support?", "uri":"dli_03_0025.html", "doc_type":"usermanual", - "p_code":"202", - "code":"204" + "p_code":"233", + "code":"235" }, { "desc":"The Spark component of DLI is a fully managed service. You can only use the DLI Spark through its APIs. .The Spark component of MRS is built on the VM in an MRS cluster. ", @@ -1841,8 +2120,8 @@ "title":"What Are the Differences Between MRS Spark and DLI Spark?", "uri":"dli_03_0115.html", "doc_type":"usermanual", - "p_code":"202", - "code":"205" + "p_code":"233", + "code":"236" }, { "desc":"DLI data can be stored in either of the following:OBS: Data used by SQL jobs, Spark jobs, and Flink jobs can be stored in OBS, reducing storage costs.DLI: The column-base", @@ -1850,8 +2129,8 @@ "title":"Where Can DLI Data Be Stored?", "uri":"dli_03_0029.html", "doc_type":"usermanual", - "p_code":"202", - "code":"206" + "p_code":"233", + "code":"237" }, { "desc":"DLI tables store data within the DLI service, and you do not need to know the data storage path.OBS tables store data in your OBS buckets, and you need to manage the sour", @@ -1859,8 +2138,8 @@ "title":"What Are the Differences Between DLI Tables and OBS Tables?", "uri":"dli_03_0117.html", "doc_type":"usermanual", - "p_code":"202", - "code":"207" + "p_code":"233", + "code":"238" }, { "desc":"Currently, DLI supports analysis only on the data uploaded to the cloud. In scenarios where regular (for example, on a per day basis) one-off analysis on incremental data", @@ -1868,8 +2147,8 @@ "title":"How Can I Use DLI If Data Is Not Uploaded to OBS?", "uri":"dli_03_0010.html", "doc_type":"usermanual", - "p_code":"202", - "code":"208" + "p_code":"233", + "code":"239" }, { "desc":"Data in the OBS bucket shared by IAM users under the same account can be imported. You cannot import data in the OBS bucket shared with other IAM account.", @@ -1877,8 +2156,8 @@ "title":"Can I Import OBS Bucket Data Shared by Other Tenants into DLI?", "uri":"dli_03_0129.html", "doc_type":"usermanual", - "p_code":"202", - "code":"209" + "p_code":"233", + "code":"240" }, { "desc":"Log in to the management console.Click in the upper left corner and select a region and a project.Click the My Quota icon in the upper right corner of the page.The Serv", @@ -1886,8 +2165,8 @@ "title":"Why Is Error \"Failed to create the database. {\"error_code\":\"DLI.1028\";\"error_msg\":\"Already reached the maximum quota of databases:XXX\".\" Reported?", "uri":"dli_03_0264.html", "doc_type":"usermanual", - "p_code":"202", - "code":"210" + "p_code":"233", + "code":"241" }, { "desc":"No, a global variable can only be used by the user who created it. Global variables can be used to simplify complex parameters. For example, long and difficult variables ", @@ -1895,8 +2174,17 @@ "title":"Can a Member Account Use Global Variables Created by Other Member Accounts?", "uri":"dli_03_0263.html", "doc_type":"usermanual", - "p_code":"202", - "code":"211" + "p_code":"233", + "code":"242" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Job Management", + "uri":"dli_03_0222.html", + "doc_type":"usermanual", + "p_code":"232", + "code":"243" }, { "desc":"If you are suggested to perform following operations to run a large number of DLI jobs:Group the DLI jobs by type, and run each group on a queue.Alternatively, create IAM", @@ -1904,8 +2192,8 @@ "title":"How Do I Manage Tens of Thousands of Jobs Running on DLI?", "uri":"dli_03_0126.html", "doc_type":"usermanual", - "p_code":"202", - "code":"212" + "p_code":"243", + "code":"244" }, { "desc":"The field names of tables that have been created cannot be changed.You can create a table, define new table fields, and migrate data from the old table to the new one.", @@ -1913,8 +2201,17 @@ "title":"How Do I Change the Name of a Field in a Created Table?", "uri":"dli_03_0162.html", "doc_type":"usermanual", - "p_code":"202", - "code":"213" + "p_code":"243", + "code":"245" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Privacy and Security", + "uri":"dli_03_0261.html", + "doc_type":"usermanual", + "p_code":"232", + "code":"246" }, { "desc":"No. The spark.acls.enable configuration item is not used in DLI. The Apache Spark command injection vulnerability (CVE-2022-33891) does not exist in DLI.", @@ -1922,8 +2219,8 @@ "title":"Does DLI Have the Apache Spark Command Injection Vulnerability (CVE-2022-33891)?", "uri":"dli_03_0260.html", "doc_type":"usermanual", - "p_code":"202", - "code":"214" + "p_code":"246", + "code":"247" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1931,8 +2228,8 @@ "title":"Quota", "uri":"dli_03_0053.html", "doc_type":"usermanual", - "p_code":"105", - "code":"215" + "p_code":"122", + "code":"248" }, { "desc":"Log in to the management console.Click in the upper left corner and select Region and Project.Click (the My Quotas icon) in the upper right corner.The Service Quota pag", @@ -1940,8 +2237,8 @@ "title":"How Do I View My Quotas?", "uri":"dli_03_0031.html", "doc_type":"usermanual", - "p_code":"215", - "code":"216" + "p_code":"248", + "code":"249" }, { "desc":"The system does not support online quota adjustment. To increase a resource quota, dial the hotline or send an email to the customer service. We will process your applica", @@ -1949,8 +2246,8 @@ "title":"How Do I Increase a Quota?", "uri":"dli_03_0032.html", "doc_type":"usermanual", - "p_code":"215", - "code":"217" + "p_code":"248", + "code":"250" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1958,8 +2255,17 @@ "title":"Permission", "uri":"dli_03_0054.html", "doc_type":"usermanual", - "p_code":"105", - "code":"218" + "p_code":"122", + "code":"251" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Usage", + "uri":"dli_03_0223.html", + "doc_type":"usermanual", + "p_code":"251", + "code":"252" }, { "desc":"DLI has a comprehensive permission control mechanism and supports fine-grained authentication through Identity and Access Management (IAM). You can create policies in IAM", @@ -1967,8 +2273,8 @@ "title":"How Do I Manage Fine-Grained DLI Permissions?", "uri":"dli_03_0100.html", "doc_type":"usermanual", - "p_code":"218", - "code":"219" + "p_code":"252", + "code":"253" }, { "desc":"You cannot perform permission-related operations on the partition column of a partitioned table.However, when you grant the permission of any non-partition column in a pa", @@ -1976,8 +2282,17 @@ "title":"What Is Column Permission Granting of a DLI Partition Table?", "uri":"dli_03_0008.html", "doc_type":"usermanual", - "p_code":"218", - "code":"220" + "p_code":"252", + "code":"254" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"O&M Guide", + "uri":"dli_03_0226.html", + "doc_type":"usermanual", + "p_code":"251", + "code":"255" }, { "desc":"When you submit a job, a message is displayed indicating that the job fails to be submitted due to insufficient permission caused by arrears. In this case, you need to ch", @@ -1985,8 +2300,8 @@ "title":"Why Does My Account Have Insufficient Permissions Due to Arrears?", "uri":"dli_03_0140.html", "doc_type":"usermanual", - "p_code":"218", - "code":"221" + "p_code":"255", + "code":"256" }, { "desc":"When the user update an existing program package, the following error information is displayed:\"error_code\"*DLI.0003\",\"error_msg\":\"Permission denied for resource 'resourc", @@ -1994,8 +2309,8 @@ "title":"Why Does the System Display a Message Indicating Insufficient Permissions When I Update a Program Package?", "uri":"dli_03_0195.html", "doc_type":"usermanual", - "p_code":"218", - "code":"222" + "p_code":"255", + "code":"257" }, { "desc":"When the SQL query statement is executed, the system displays a message indicating that the user does not have the permission to query resources.Error information: DLI.00", @@ -2003,8 +2318,8 @@ "title":"Why Is Error \"DLI.0003: Permission denied for resource...\" Reported When I Run a SQL Statement?", "uri":"dli_03_0227.html", "doc_type":"usermanual", - "p_code":"218", - "code":"223" + "p_code":"255", + "code":"258" }, { "desc":"The table permission has been granted and verified. However, after a period of time, an error is reported indicating that the table query fails.There are two possible rea", @@ -2012,8 +2327,8 @@ "title":"Why Can't I Query Table Data After I've Been Granted Table Permissions?", "uri":"dli_03_0228.html", "doc_type":"usermanual", - "p_code":"218", - "code":"224" + "p_code":"255", + "code":"259" }, { "desc":"If a table inherits database permissions, you do not need to regrant the inherited permissions to the table.When you grant permissions on a table on the console:If you se", @@ -2021,8 +2336,8 @@ "title":"Will an Error Be Reported if the Inherited Permissions Are Regranted to a Table That Inherits Database Permissions?", "uri":"dli_03_0057.html", "doc_type":"usermanual", - "p_code":"218", - "code":"225" + "p_code":"255", + "code":"260" }, { "desc":"User A created Table1.User B created View1 based on Table1.After the Select Table permission on Table1 is granted to user C, user C fails to query View1.User B does not h", @@ -2030,8 +2345,8 @@ "title":"Why Can't I Query a View After I'm Granted the Select Table Permission on the View?", "uri":"dli_03_0067.html", "doc_type":"usermanual", - "p_code":"218", - "code":"226" + "p_code":"255", + "code":"261" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2039,8 +2354,17 @@ "title":"Queue", "uri":"dli_03_0049.html", "doc_type":"usermanual", - "p_code":"105", - "code":"227" + "p_code":"122", + "code":"262" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Usage", + "uri":"dli_03_0229.html", + "doc_type":"usermanual", + "p_code":"262", + "code":"263" }, { "desc":"Currently, you are not allowed to modify the description of a created queue. You can add the description when purchasing the queue.", @@ -2048,8 +2372,8 @@ "title":"Does the Description of a DLI Queue Can Be Modified?", "uri":"dli_03_0109.html", "doc_type":"usermanual", - "p_code":"227", - "code":"228" + "p_code":"263", + "code":"264" }, { "desc":"Deleting a queue does not cause table data loss in your database.", @@ -2057,8 +2381,8 @@ "title":"Will Table Data in My Database Be Lost If I Delete a Queue?", "uri":"dli_03_0166.html", "doc_type":"usermanual", - "p_code":"227", - "code":"229" + "p_code":"263", + "code":"265" }, { "desc":"You need to develop a mechanism to retry failed jobs. When a faulty queue is recovered, your application tries to submit the failed jobs to the queue again.", @@ -2066,8 +2390,8 @@ "title":"How Does DLI Ensure the Reliability of Spark Jobs When a Queue Is Abnormal?", "uri":"dli_03_0170.html", "doc_type":"usermanual", - "p_code":"227", - "code":"230" + "p_code":"263", + "code":"266" }, { "desc":"DLI allows you to subscribe to an SMN topic for failed jobs.Log in to the DLI console.In the navigation pane on the left, choose Queue Management.On the Queue Management ", @@ -2075,8 +2399,17 @@ "title":"How Do I Monitor Queue Exceptions?", "uri":"dli_03_0098.html", "doc_type":"usermanual", - "p_code":"227", - "code":"231" + "p_code":"263", + "code":"267" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"O&M Guide", + "uri":"dli_03_0230.html", + "doc_type":"usermanual", + "p_code":"262", + "code":"268" }, { "desc":"To check the running status of the DLI queue and determine whether to run more jobs on that queue, you need to check the queue load.Search for Cloud Eye on the console.In", @@ -2084,8 +2417,8 @@ "title":"How Do I View DLI Queue Load?", "uri":"dli_03_0095.html", "doc_type":"usermanual", - "p_code":"227", - "code":"232" + "p_code":"268", + "code":"269" }, { "desc":"You need to check the large number of jobs in the Submitting and Running states on the queue.Use Cloud Eye to view jobs in different states on the queue. The procedure is", @@ -2093,8 +2426,8 @@ "title":"How Do I Determine Whether There Are Too Many Jobs in the Current Queue?", "uri":"dli_03_0183.html", "doc_type":"usermanual", - "p_code":"227", - "code":"233" + "p_code":"268", + "code":"270" }, { "desc":"Currently, DLI provides two types of queues, For SQL and For general use. SQL queues are used to run SQL jobs. General-use queues are compatible with Spark queues of earl", @@ -2102,8 +2435,8 @@ "title":"How Do I Switch an Earlier-Version Spark Queue to a General-Purpose Queue?", "uri":"dli_03_0065.html", "doc_type":"usermanual", - "p_code":"227", - "code":"234" + "p_code":"268", + "code":"271" }, { "desc":"DLI queues do not use resources or bandwidth when no job is running. In this case, the running status of DLI queues is not displayed on CES.", @@ -2111,8 +2444,8 @@ "title":"Why Cannot I View the Resource Running Status of DLI Queues on Cloud Eye?", "uri":"dli_03_0193.html", "doc_type":"usermanual", - "p_code":"227", - "code":"235" + "p_code":"268", + "code":"272" }, { "desc":"In DLI, 64 CU = 64 cores and 256 GB memory.In a Spark job, if the driver occupies 4 cores and 16 GB memory, the executor can occupy 60 cores and 240 GB memory.", @@ -2120,8 +2453,8 @@ "title":"How Do I Allocate Queue Resources for Running Spark Jobs If I Have Purchased 64 CUs?", "uri":"dli_03_0088.html", "doc_type":"usermanual", - "p_code":"227", - "code":"236" + "p_code":"268", + "code":"273" }, { "desc":"Queue plans create failed. The plan xxx target cu is out of quota is displayed when you create a scheduled scaling task.The CU quota of the current account is insufficien", @@ -2129,8 +2462,8 @@ "title":"Why Is Error \"Queue plans create failed. The plan xxx target cu is out of quota\" Reported When I Schedule CU Changes?", "uri":"dli_03_0159.html", "doc_type":"usermanual", - "p_code":"227", - "code":"237" + "p_code":"268", + "code":"274" }, { "desc":"After a SQL job was submitted to the default queue, the job runs abnormally. The job log reported that the execution timed out. The exception logs are as follows:[ERROR] ", @@ -2138,8 +2471,17 @@ "title":"Why Is a Timeout Exception Reported When a DLI SQL Statement Fails to Be Executed on the Default Queue?", "uri":"dli_03_0171.html", "doc_type":"usermanual", - "p_code":"227", - "code":"238" + "p_code":"268", + "code":"275" + }, + { + "desc":"In daily big data analysis work, it is important to allocate and manage compute resources properly to provide a good job execution environment.You can allocate resources ", + "product_code":"dli", + "title":"How Can I Check the Actual and Used CUs for an Elastic Resource Pool as Well as the Required CUs for a Job?", + "uri":"dli_03_0276.html", + "doc_type":"usermanual", + "p_code":"268", + "code":"276" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2147,8 +2489,17 @@ "title":"Datasource Connections", "uri":"dli_03_0022.html", "doc_type":"usermanual", - "p_code":"105", - "code":"239" + "p_code":"122", + "code":"277" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Datasource Connections", + "uri":"dli_03_0110.html", + "doc_type":"usermanual", + "p_code":"277", + "code":"278" }, { "desc":"You need to create a VPC peering connection to enable network connectivity. Take MRS as an example. If DLI and MRS clusters are in the same VPC, and the security group is", @@ -2156,8 +2507,8 @@ "title":"Why Do I Need to Create a VPC Peering Connection for an Enhanced Datasource Connection?", "uri":"dli_03_0128.html", "doc_type":"usermanual", - "p_code":"239", - "code":"240" + "p_code":"278", + "code":"279" }, { "desc":"An enhanced datasource connection failed to pass the network connectivity test. Datasource connection cannot be bound to a queue. The following error information is displ", @@ -2165,8 +2516,8 @@ "title":"Failed to Bind a Queue to an Enhanced Datasource Connection", "uri":"dli_03_0237.html", "doc_type":"usermanual", - "p_code":"239", - "code":"241" + "p_code":"278", + "code":"280" }, { "desc":"The outbound rule had been configured for the security group of the queue associated with the enhanced datasource connection. The datasource authentication used a passwor", @@ -2174,8 +2525,8 @@ "title":"DLI Failed to Connect to GaussDB(DWS) Through an Enhanced Datasource Connection", "uri":"dli_03_0238.html", "doc_type":"usermanual", - "p_code":"239", - "code":"242" + "p_code":"278", + "code":"281" }, { "desc":"A datasource connection is created and bound to a queue. The connectivity test fails and the following error information is displayed:failed to connect to specified addre", @@ -2183,8 +2534,8 @@ "title":"How Do I Do if the Datasource Connection Is Created But the Network Connectivity Test Fails?", "uri":"dli_03_0179.html", "doc_type":"usermanual", - "p_code":"239", - "code":"243" + "p_code":"278", + "code":"282" }, { "desc":"Configuring the Connection Between a DLI Queue and a Data Source in a Private NetworkIf your DLI job needs to connect to a data source, for example, MRS, RDS, CSS, Kafka,", @@ -2192,8 +2543,8 @@ "title":"How Do I Configure the Network Between a DLI Queue and a Data Source?", "uri":"dli_03_0186.html", "doc_type":"usermanual", - "p_code":"239", - "code":"244" + "p_code":"278", + "code":"283" }, { "desc":"The possible causes and solutions are as follows:If you have created a queue, do not bind it to a datasource connection immediately. Wait for 5 to 10 minutes. After the c", @@ -2201,8 +2552,8 @@ "title":"What Can I Do If a Datasource Connection Is Stuck in Creating State When I Try to Bind a Queue to It?", "uri":"dli_03_0257.html", "doc_type":"usermanual", - "p_code":"239", - "code":"245" + "p_code":"278", + "code":"284" }, { "desc":"DLI enhanced datasource connection uses VPC peering to directly connect the VPC networks of the desired data sources for point-to-point data exchanges.", @@ -2210,8 +2561,17 @@ "title":"How Do I Connect DLI to Data Sources?", "uri":"dli_03_0259.html", "doc_type":"usermanual", - "p_code":"239", - "code":"246" + "p_code":"278", + "code":"285" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Cross-Source Analysis", + "uri":"dli_03_0112.html", + "doc_type":"usermanual", + "p_code":"277", + "code":"286" }, { "desc":"To perform query on data stored on services rather than DLI, perform the following steps:Assume that the data to be queried is stored on multiple services (for example, O", @@ -2219,8 +2579,8 @@ "title":"How Can I Perform Query on Data Stored on Services Rather Than DLI?", "uri":"dli_03_0011.html", "doc_type":"usermanual", - "p_code":"239", - "code":"247" + "p_code":"286", + "code":"287" }, { "desc":"Connect VPCs in different regions.Create an enhanced datasource connection on DLI and bind it to a queue.Add a DLI route.", @@ -2228,8 +2588,8 @@ "title":"How Can I Access Data Across Regions?", "uri":"dli_03_0085.html", "doc_type":"usermanual", - "p_code":"239", - "code":"248" + "p_code":"286", + "code":"288" }, { "desc":"When data is inserted into DLI, set the ID field to NULL.", @@ -2237,8 +2597,17 @@ "title":"How Do I Set the Auto-increment Primary Key or Other Fields That Are Automatically Filled in the RDS Table When Creating a DLI and Associating It with the RDS Table?", "uri":"dli_03_0028.html", "doc_type":"usermanual", - "p_code":"239", - "code":"249" + "p_code":"286", + "code":"289" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Datasource Connection O&M", + "uri":"dli_03_0256.html", + "doc_type":"usermanual", + "p_code":"277", + "code":"290" }, { "desc":"Possible CausesThe network connectivity is abnormal. Check whether the security group is correctly selected and whether the VPC is correctly configured.The network connec", @@ -2246,8 +2615,8 @@ "title":"Why Is the Error Message \"communication link failure\" Displayed When I Use a Newly Activated Datasource Connection?", "uri":"dli_03_0047.html", "doc_type":"usermanual", - "p_code":"239", - "code":"250" + "p_code":"290", + "code":"291" }, { "desc":"The cluster host information is not added to the datasource connection. As a result, the KRB authentication fails, the connection times out, and no error is recorded in l", @@ -2255,8 +2624,8 @@ "title":"Connection Times Out During MRS HBase Datasource Connection, and No Error Is Recorded in Logs", "uri":"dli_03_0080.html", "doc_type":"usermanual", - "p_code":"239", - "code":"251" + "p_code":"290", + "code":"292" }, { "desc":"When you create a VPC peering connection for the datasource connection, the following error information is displayed:Before you create a datasource connection, check whet", @@ -2264,8 +2633,8 @@ "title":"Why Can't I Find the Subnet When Creating a DLI Datasource Connection?", "uri":"dli_03_0111.html", "doc_type":"usermanual", - "p_code":"239", - "code":"252" + "p_code":"290", + "code":"293" }, { "desc":"A datasource RDS table was created in the DataArts Studio, and the insert overwrite statement was executed to write data into RDS. DLI.0999: BatchUpdateException: Incorre", @@ -2273,8 +2642,8 @@ "title":"Error Message \"Incorrect string value\" Is Displayed When insert overwrite Is Executed on a Datasource RDS Table", "uri":"dli_03_0239.html", "doc_type":"usermanual", - "p_code":"239", - "code":"253" + "p_code":"290", + "code":"294" }, { "desc":"The system failed to create a datasource RDS table, and null pointer error was reported.The following table creation statement was used:The RDS database is in a PostGre c", @@ -2282,8 +2651,8 @@ "title":"Null Pointer Error Is Displayed When the System Creates a Datasource RDS Table", "uri":"dli_03_0250.html", "doc_type":"usermanual", - "p_code":"239", - "code":"254" + "p_code":"290", + "code":"295" }, { "desc":"The system failed to execute insert overwrite on the datasource GaussDB(DWS) table, and org.postgresql.util.PSQLException: ERROR: tuple concurrently updated was displayed", @@ -2291,8 +2660,8 @@ "title":"Error Message \"org.postgresql.util.PSQLException: ERROR: tuple concurrently updated\" Is Displayed When the System Executes insert overwrite on a Datasource GaussDB(DWS) Table", "uri":"dli_03_0251.html", "doc_type":"usermanual", - "p_code":"239", - "code":"255" + "p_code":"290", + "code":"296" }, { "desc":"A datasource table was used to import data to a CloudTable HBase table. This HBase table contains a column family and a rowkey for 100 million simulating data records. Th", @@ -2300,8 +2669,8 @@ "title":"RegionTooBusyException Is Reported When Data Is Imported to a CloudTable HBase Table Through a Datasource Table", "uri":"dli_03_0252.html", "doc_type":"usermanual", - "p_code":"239", - "code":"256" + "p_code":"290", + "code":"297" }, { "desc":"A table was created on GaussDB(DWS) and then a datasource connection was created on DLI to read and write data. An error message was displayed during data writing, indica", @@ -2309,8 +2678,8 @@ "title":"A Null Value Is Written Into a Non-Null Field When a DLI Datasource Connection Is Used to Connect to a GaussDB(DWS) Table", "uri":"dli_03_0253.html", "doc_type":"usermanual", - "p_code":"239", - "code":"257" + "p_code":"290", + "code":"298" }, { "desc":"A datasource GaussDB(DWS) table and the datasource connection were created in DLI, and the schema of the source table in GaussDB(DWS) were updated. During the job executi", @@ -2318,8 +2687,8 @@ "title":"An Insert Operation Failed After the Schema of the GaussDB(DWS) Source Table Is Updated", "uri":"dli_03_0254.html", "doc_type":"usermanual", - "p_code":"239", - "code":"258" + "p_code":"290", + "code":"299" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2327,8 +2696,8 @@ "title":"APIs", "uri":"dli_03_0056.html", "doc_type":"usermanual", - "p_code":"105", - "code":"259" + "p_code":"122", + "code":"300" }, { "desc":"In the REST API provided by DLI, the request header can be added to the request URI, for example, Content-Type.Content-Type indicates the request body type or format. The", @@ -2336,8 +2705,8 @@ "title":"Why Is Error \"unsupported media Type\" Reported When I Subimt a SQL Job?", "uri":"dli_03_0060.html", "doc_type":"usermanual", - "p_code":"259", - "code":"260" + "p_code":"300", + "code":"301" }, { "desc":"When different IAM users call an API under the same enterprise project in the same region, the project ID is the same.", @@ -2345,8 +2714,8 @@ "title":"Is the Project ID Fixed when Different IAM Users Call an API?", "uri":"dli_03_0125.html", "doc_type":"usermanual", - "p_code":"259", - "code":"261" + "p_code":"300", + "code":"302" }, { "desc":"When the API call for submitting a SQL job times out, and the following error information is displayed:There are currently no resources tracked in the state, so there is ", @@ -2354,8 +2723,8 @@ "title":"What Can I Do If an Error Is Reported When the Execution of the API for Creating a SQL Job Times Out?", "uri":"dli_03_0178.html", "doc_type":"usermanual", - "p_code":"259", - "code":"262" + "p_code":"300", + "code":"303" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2363,8 +2732,8 @@ "title":"SDKs", "uri":"dli_03_0058.html", "doc_type":"usermanual", - "p_code":"105", - "code":"263" + "p_code":"122", + "code":"304" }, { "desc":"When you query the SQL job results using SDK, the system checks the job status when the job is submitted. The timeout interval set in the system is 300s. If the job is no", @@ -2372,8 +2741,8 @@ "title":"How Do I Set the Timeout Duration for Querying SQL Job Results Using SDK?", "uri":"dli_03_0073.html", "doc_type":"usermanual", - "p_code":"263", - "code":"264" + "p_code":"304", + "code":"305" }, { "desc":"Run the ping command to check whether dli.xxx can be accessed.If dli.xxx can be accessed, check whether DNS resolution is correctly configured.If dli.xxx can be accessed,", @@ -2381,8 +2750,8 @@ "title":"How Do I Handle the dli.xxx,unable to resolve host address Error?", "uri":"dli_03_0255.html", "doc_type":"usermanual", - "p_code":"263", - "code":"265" + "p_code":"304", + "code":"306" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2391,6 +2760,6 @@ "uri":"dli_01_00006.html", "doc_type":"usermanual", "p_code":"", - "code":"266" + "code":"307" } ] \ No newline at end of file diff --git a/docs/dli/umn/dli_01_00006.html b/docs/dli/umn/dli_01_00006.html index e5b29e84..2406d648 100644 --- a/docs/dli/umn/dli_01_00006.html +++ b/docs/dli/umn/dli_01_00006.html @@ -8,7 +8,13 @@ -

2024-02-27

+

2024-04-28

+ +

Added the following section:

+ + + +

2024-02-27

Added the following content:

diff --git a/docs/dli/umn/dli_01_0002.html b/docs/dli/umn/dli_01_0002.html index e7aa6f36..57841538 100644 --- a/docs/dli/umn/dli_01_0002.html +++ b/docs/dli/umn/dli_01_0002.html @@ -22,8 +22,8 @@ -

Step 2: Create a Queue

A queue is the basis for using DLI. Before executing an SQL job, you need to create a queue.

- -

  • Click OK.

    After a table is created, you can view and select the table for use on the Data Management page or SQL Editor page.

    +

  • Click OK.

    After a table is created, you can view and select the table for use on the Data Management page or SQL Editor page.

  • (Optional) After a DLI table is created, you can decide whether to directly import data to the table.
  • diff --git a/docs/dli/umn/dli_01_0006.html b/docs/dli/umn/dli_01_0006.html index 3dc50590..b1e3aecf 100644 --- a/docs/dli/umn/dli_01_0006.html +++ b/docs/dli/umn/dli_01_0006.html @@ -65,7 +65,7 @@ -

    Deleting a Table

    You can delete a table on either the Data Management page or the SQL Editor page.
    diff --git a/docs/dli/umn/dli_01_0012.html b/docs/dli/umn/dli_01_0012.html index 31425337..b6ccd14f 100644 --- a/docs/dli/umn/dli_01_0012.html +++ b/docs/dli/umn/dli_01_0012.html @@ -12,6 +12,8 @@ + + diff --git a/docs/dli/umn/dli_01_0013.html b/docs/dli/umn/dli_01_0013.html index 236e45cd..62cd1b36 100644 --- a/docs/dli/umn/dli_01_0013.html +++ b/docs/dli/umn/dli_01_0013.html @@ -14,7 +14,7 @@
  • Click OK.
  • How Do I Obtain MRS Host Information?