From fdd43c552e46c060c83a216974bbd13f21261a0f Mon Sep 17 00:00:00 2001 From: "Su, Xiaomeng" Date: Fri, 9 Aug 2024 11:00:57 +0000 Subject: [PATCH] dli_umn_20240808 Reviewed-by: Pruthi, Vineet Co-authored-by: Su, Xiaomeng Co-committed-by: Su, Xiaomeng --- docs/dli/umn/ALL_META.TXT.json | 1388 +++++++++----------------------- docs/dli/umn/CLASS.TXT.json | 527 ++++++------ docs/dli/umn/dli_01_00006.html | 12 +- docs/dli/umn/dli_01_0003.html | 4 +- docs/dli/umn/dli_01_0005.html | 12 +- docs/dli/umn/dli_01_0006.html | 5 +- docs/dli/umn/dli_01_0012.html | 2 +- docs/dli/umn/dli_01_0013.html | 2 +- docs/dli/umn/dli_01_0228.html | 3 +- docs/dli/umn/dli_01_0318.html | 336 +++++--- docs/dli/umn/dli_01_0320.html | 1 - docs/dli/umn/dli_01_0384.html | 2 +- docs/dli/umn/dli_01_0397.html | 329 ++++---- docs/dli/umn/dli_01_0407.html | 2 +- docs/dli/umn/dli_01_0420.html | 10 +- docs/dli/umn/dli_01_0426.html | 4 +- docs/dli/umn/dli_01_0454.html | 2 +- docs/dli/umn/dli_01_0487.html | 10 +- docs/dli/umn/dli_01_0489.html | 4 +- docs/dli/umn/dli_01_0504.html | 17 +- docs/dli/umn/dli_01_0524.html | 5 +- docs/dli/umn/dli_01_0531.html | 6 +- docs/dli/umn/dli_01_0538.html | 2 +- docs/dli/umn/dli_01_0552.html | 2 +- docs/dli/umn/dli_01_0561.html | 6 +- docs/dli/umn/dli_01_0563.html | 2 +- docs/dli/umn/dli_01_0624.html | 71 ++ docs/dli/umn/dli_03_0002.html | 11 - docs/dli/umn/dli_03_0017.html | 11 +- docs/dli/umn/dli_03_0032.html | 2 +- docs/dli/umn/dli_03_0038.html | 53 -- docs/dli/umn/dli_03_0044.html | 6 +- docs/dli/umn/dli_03_0075.html | 3 - docs/dli/umn/dli_03_0089.html | 3 - docs/dli/umn/dli_03_0093.html | 37 +- docs/dli/umn/dli_03_0100.html | 2 +- docs/dli/umn/dli_03_0119.html | 2 +- docs/dli/umn/dli_03_0126.html | 2 +- docs/dli/umn/dli_03_0132.html | 6 +- docs/dli/umn/dli_03_0138.html | 9 +- docs/dli/umn/dli_03_0139.html | 2 +- docs/dli/umn/dli_03_0140.html | 2 +- docs/dli/umn/dli_03_0160.html | 2 +- docs/dli/umn/dli_03_0165.html | 2 +- docs/dli/umn/dli_03_0171.html | 3 +- docs/dli/umn/dli_03_0172.html | 2 +- docs/dli/umn/dli_03_0179.html | 6 +- docs/dli/umn/dli_03_0184.html | 2 +- docs/dli/umn/dli_03_0196.html | 2 +- docs/dli/umn/dli_03_0211.html | 2 +- docs/dli/umn/dli_03_0221.html | 2 - docs/dli/umn/dli_03_0227.html | 2 +- docs/dli/umn/dli_07_0005.html | 25 +- 53 files changed, 1223 insertions(+), 1744 deletions(-) create mode 100644 docs/dli/umn/dli_01_0624.html delete mode 100644 docs/dli/umn/dli_03_0002.html delete mode 100644 docs/dli/umn/dli_03_0038.html diff --git a/docs/dli/umn/ALL_META.TXT.json b/docs/dli/umn/ALL_META.TXT.json index 8f4d0d36..e09a1d5b 100644 --- a/docs/dli/umn/ALL_META.TXT.json +++ b/docs/dli/umn/ALL_META.TXT.json @@ -87,7 +87,7 @@ "code":"5", "des":"Only the latest 100 jobs are displayed on DLI's SparkUI.A maximum of 1,000 job results can be displayed on the console. To view more or all jobs, export the job data to O", "doc_type":"usermanual", - "kw":"Constraints and Limitations,Service Overview,User Guide", + "kw":"Notes and Constraints,Service Overview,User Guide", "search_title":"", "metedata":[ { @@ -97,7 +97,7 @@ "IsBot":"Yes" } ], - "title":"Constraints and Limitations", + "title":"Notes and Constraints", "githuburl":"" }, { @@ -172,10 +172,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Getting Started", @@ -273,9 +270,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -314,10 +310,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Job Management", @@ -373,10 +366,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Flink Job Management", @@ -414,9 +404,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -435,9 +424,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -596,10 +584,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Spark Job Management", @@ -677,10 +662,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Queue Management", @@ -718,9 +700,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -759,9 +740,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -798,9 +778,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -814,7 +793,7 @@ "code":"41", "des":"Elastic scaling can be performed for a newly created queue only when there were jobs running in this queue.Queues with 16 CUs do not support scale-out or scale-in.Queues ", "doc_type":"usermanual", - "kw":"Elastic Queue Scaling,Queue Management,User Guide", + "kw":"Elastic Scaling of Queues,Queue Management,User Guide", "search_title":"", "metedata":[ { @@ -824,7 +803,7 @@ "IsBot":"Yes" } ], - "title":"Elastic Queue Scaling", + "title":"Elastic Scaling of Queues", "githuburl":"" }, { @@ -935,10 +914,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Elastic Resource Pool", @@ -956,10 +932,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Before You Start", @@ -1037,10 +1010,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Regular Operations", @@ -1171,7 +1141,7 @@ "node_id":"dli_01_0524.xml", "product_code":"dli", "code":"59", - "des":"If CUs of a yearly/monthly elastic resource pool cannot meet your service requirements, you can modify the CUs. In this case, you will be charged based on the number of C", + "des":"If the current specifications of your elastic resource pool do not meet your service needs, you can modify them using the change specifications function.In the navigation", "doc_type":"usermanual", "kw":"Modifying Specifications,Regular Operations,User Guide", "search_title":"", @@ -1256,10 +1226,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Data Management", @@ -1277,10 +1244,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Databases and Tables", @@ -1518,10 +1482,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Package Management", @@ -1599,9 +1560,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -1620,9 +1580,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -1661,10 +1620,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Job Templates", @@ -1682,9 +1638,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -1723,9 +1678,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -1744,10 +1698,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Appendix", @@ -1785,9 +1736,9 @@ "metedata":[ { "prodname":"dli", + "IsMulti":"Yes", "opensource":"true", "documenttype":"usermanual", - "IsMulti":"No", "IsBot":"Yes" } ], @@ -1858,34 +1809,31 @@ "githuburl":"" }, { - "uri":"dli_01_0553.html", - "node_id":"dli_01_0553.xml", + "uri":"dli_01_0624.html", + "node_id":"dli_01_0624.xml", "product_code":"dli", "code":"93", - "des":"Delete an enhanced datasource connection that is no longer used on the console.Log in to the DLI management console.In the left navigation pane, choose Datasource Connect", + "des":"VPC sharing allows sharing VPC resources created in one account with other accounts using Resource Access Manager (RAM). For example, account A can share its VPC and subn", "doc_type":"usermanual", - "kw":"Deleting an Enhanced Datasource Connection,Enhanced Datasource Connections,User Guide", + "kw":"Establishing a Network Connection Between DLI and Resources in a Shared VPC,Enhanced Datasource Conn", "search_title":"", "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], - "title":"Deleting an Enhanced Datasource Connection", + "title":"Establishing a Network Connection Between DLI and Resources in a Shared VPC", "githuburl":"" }, { - "uri":"dli_01_0013.html", - "node_id":"dli_01_0013.xml", + "uri":"dli_01_0553.html", + "node_id":"dli_01_0553.xml", "product_code":"dli", "code":"94", - "des":"Host information is the mapping between host IP addresses and domain names. After you configure host information, jobs can only use the configured domain names to access ", + "des":"Delete an enhanced datasource connection that is no longer used on the console.Log in to the DLI management console.In the left navigation pane, choose Datasource Connect", "doc_type":"usermanual", - "kw":"Modifying Host Information,Enhanced Datasource Connections,User Guide", + "kw":"Deleting an Enhanced Datasource Connection,Enhanced Datasource Connections,User Guide", "search_title":"", "metedata":[ { @@ -1896,14 +1844,35 @@ "IsBot":"Yes" } ], - "title":"Modifying Host Information", + "title":"Deleting an Enhanced Datasource Connection", + "githuburl":"" + }, + { + "uri":"dli_01_0013.html", + "node_id":"dli_01_0013.xml", + "product_code":"dli", + "code":"95", + "des":"Host information is the mapping between host IP addresses and domain names. After you configure host information, jobs can only use the configured domain names to access ", + "doc_type":"usermanual", + "kw":"Modifying Host Information in an Elastic Resource Pool,Enhanced Datasource Connections,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "IsMulti":"Yes", + "opensource":"true", + "documenttype":"usermanual", + "IsBot":"Yes" + } + ], + "title":"Modifying Host Information in an Elastic Resource Pool", "githuburl":"" }, { "uri":"dli_01_0514.html", "node_id":"dli_01_0514.xml", "product_code":"dli", - "code":"95", + "code":"96", "des":"The CIDR block of the DLI queue that is bound with a datasource connection cannot overlap with that of the data source.The default queue cannot be bound with a connection", "doc_type":"usermanual", "kw":"Binding and Unbinding a Queue,Enhanced Datasource Connections,User Guide", @@ -1924,7 +1893,7 @@ "uri":"dli_01_0014.html", "node_id":"dli_01_0014.xml", "product_code":"dli", - "code":"96", + "code":"97", "des":"A route is configured with the destination, next hop type, and next hop to determine where the network traffic is directed. Routes are classified into system routes and c", "doc_type":"usermanual", "kw":"Adding a Route,Enhanced Datasource Connections,User Guide", @@ -1945,7 +1914,7 @@ "uri":"dli_01_0556.html", "node_id":"dli_01_0556.xml", "product_code":"dli", - "code":"97", + "code":"98", "des":"Delete a route that is no longer used.A custom route table cannot be deleted if it is associated with a subnet.Log in to the DLI management console.In the left navigation", "doc_type":"usermanual", "kw":"Deleting a Route,Enhanced Datasource Connections,User Guide", @@ -1953,9 +1922,9 @@ "metedata":[ { "prodname":"dli", + "IsMulti":"Yes", "opensource":"true", "documenttype":"usermanual", - "IsMulti":"No", "IsBot":"Yes" } ], @@ -1966,7 +1935,7 @@ "uri":"dli_01_0018.html", "node_id":"dli_01_0018.xml", "product_code":"dli", - "code":"98", + "code":"99", "des":"Enhanced connections support user authorization by project. After authorization, users in the project have the permission to perform operations on the enhanced connection", "doc_type":"usermanual", "kw":"Enhanced Connection Permission Management,Enhanced Datasource Connections,User Guide", @@ -1987,7 +1956,7 @@ "uri":"dli_01_0019.html", "node_id":"dli_01_0019.xml", "product_code":"dli", - "code":"99", + "code":"100", "des":"A tag is a key-value pair customized by users and used to identify cloud resources. It helps users to classify and search for cloud resources. A tag consists of a tag key", "doc_type":"usermanual", "kw":"Enhanced Datasource Connection Tag Management,Enhanced Datasource Connections,User Guide", @@ -2008,7 +1977,7 @@ "uri":"dli_01_0422.html", "node_id":"dli_01_0422.xml", "product_code":"dli", - "code":"100", + "code":"101", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Datasource Authentication", @@ -2016,9 +1985,9 @@ "metedata":[ { "prodname":"dli", + "IsMulti":"Yes", "opensource":"true", "documenttype":"usermanual", - "IsMulti":"No", "IsBot":"Yes" } ], @@ -2029,7 +1998,7 @@ "uri":"dli_01_0561.html", "node_id":"dli_01_0561.xml", "product_code":"dli", - "code":"101", + "code":"102", "des":"When analyzing across multiple sources, it is not recommended to configure authentication information directly in a job as it can lead to password leakage. Instead, you a", "doc_type":"usermanual", "kw":"Overview,Datasource Authentication,User Guide", @@ -2050,7 +2019,7 @@ "uri":"dli_01_0427.html", "node_id":"dli_01_0427.xml", "product_code":"dli", - "code":"102", + "code":"103", "des":"Create a CSS datasource authentication on the DLI console to store the authentication information of the CSS security cluster to DLI. This will allow you to access to the", "doc_type":"usermanual", "kw":"Creating a CSS Datasource Authentication,Datasource Authentication,User Guide", @@ -2071,7 +2040,7 @@ "uri":"dli_01_0558.html", "node_id":"dli_01_0558.xml", "product_code":"dli", - "code":"103", + "code":"104", "des":"Create a Kerberos datasource authentication on the DLI console to store the authentication information of the data source to DLI. This will allow you to access to the dat", "doc_type":"usermanual", "kw":"Creating a Kerberos Datasource Authentication,Datasource Authentication,User Guide", @@ -2092,7 +2061,7 @@ "uri":"dli_01_0560.html", "node_id":"dli_01_0560.xml", "product_code":"dli", - "code":"104", + "code":"105", "des":"Create a Kafka_SSL datasource authentication on the DLI console to store the Kafka authentication information to DLI. This will allow you to access to Kafka instances wit", "doc_type":"usermanual", "kw":"Creating a Kafka_SSL Datasource Authentication,Datasource Authentication,User Guide", @@ -2113,7 +2082,7 @@ "uri":"dli_01_0559.html", "node_id":"dli_01_0559.xml", "product_code":"dli", - "code":"105", + "code":"106", "des":"Create a password datasource authentication on the DLI console to store passwords of the GaussDB(DWS), RDS, DCS, and DDS data sources to DLI. This will allow you to acces", "doc_type":"usermanual", "kw":"Creating a Password Datasource Authentication,Datasource Authentication,User Guide", @@ -2134,7 +2103,7 @@ "uri":"dli_01_0480.html", "node_id":"dli_01_0480.xml", "product_code":"dli", - "code":"106", + "code":"107", "des":"Grant permissions on a datasource authentication to users so multiple user jobs can use the datasource authentication without affecting each other.The administrator and t", "doc_type":"usermanual", "kw":"Datasource Authentication Permission Management,Datasource Authentication,User Guide", @@ -2142,9 +2111,9 @@ "metedata":[ { "prodname":"dli", + "IsMulti":"Yes", "opensource":"true", "documenttype":"usermanual", - "IsMulti":"No", "IsBot":"Yes" } ], @@ -2155,7 +2124,7 @@ "uri":"dli_01_0485.html", "node_id":"dli_01_0485.xml", "product_code":"dli", - "code":"107", + "code":"108", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Global Configuration", @@ -2163,10 +2132,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Global Configuration", @@ -2176,7 +2142,7 @@ "uri":"dli_01_0476.html", "node_id":"dli_01_0476.xml", "product_code":"dli", - "code":"108", + "code":"109", "des":"DLI allows you to set variables that are frequently used during job development as global variables on the DLI management console. This avoids repeated definitions during", "doc_type":"usermanual", "kw":"Global Variables,Global Configuration,User Guide", @@ -2196,7 +2162,7 @@ "uri":"dli_01_0533.html", "node_id":"dli_01_0533.xml", "product_code":"dli", - "code":"109", + "code":"110", "des":"You can grant permissions on a global variable to users.The administrator and the global variable owner have all permissions. You do not need to set permissions for them,", "doc_type":"usermanual", "kw":"Permission Management for Global Variables,Global Configuration,User Guide", @@ -2216,7 +2182,7 @@ "uri":"dli_01_0408.html", "node_id":"dli_01_0408.xml", "product_code":"dli", - "code":"110", + "code":"111", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Permissions Management", @@ -2234,7 +2200,7 @@ "uri":"dli_01_0440.html", "node_id":"dli_01_0440.xml", "product_code":"dli", - "code":"111", + "code":"112", "des":"DLI has a comprehensive permission control mechanism and supports fine-grained authentication through Identity and Access Management (IAM). You can create policies in IAM", "doc_type":"usermanual", "kw":"Overview,Permissions Management,User Guide", @@ -2254,7 +2220,7 @@ "uri":"dli_01_0418.html", "node_id":"dli_01_0418.xml", "product_code":"dli", - "code":"112", + "code":"113", "des":"You can use Identity and Access Management (IAM) to implement fine-grained permissions control on DLI resources. For details, see Overview.If your cloud account does not ", "doc_type":"usermanual", "kw":"Creating an IAM User and Granting Permissions,Permissions Management,User Guide", @@ -2274,7 +2240,7 @@ "uri":"dli_01_0451.html", "node_id":"dli_01_0451.xml", "product_code":"dli", - "code":"113", + "code":"114", "des":"Custom policies can be created as a supplement to the system policies of DLI. You can add actions to custom policies. For the actions supported for custom policies, see \"", "doc_type":"usermanual", "kw":"Creating a Custom Policy,Permissions Management,User Guide", @@ -2294,7 +2260,7 @@ "uri":"dli_01_0417.html", "node_id":"dli_01_0417.xml", "product_code":"dli", - "code":"114", + "code":"115", "des":"A resource is an object that exists within a service. You can select DLI resources by specifying their paths.", "doc_type":"usermanual", "kw":"DLI Resources,Permissions Management,User Guide", @@ -2314,7 +2280,7 @@ "uri":"dli_01_0475.html", "node_id":"dli_01_0475.xml", "product_code":"dli", - "code":"115", + "code":"116", "des":"Request conditions are useful in determining when a custom policy takes effect. A request condition consists of a condition key and operator. Condition keys are either gl", "doc_type":"usermanual", "kw":"DLI Request Conditions,Permissions Management,User Guide", @@ -2334,7 +2300,7 @@ "uri":"dli_01_0441.html", "node_id":"dli_01_0441.xml", "product_code":"dli", - "code":"116", + "code":"117", "des":"Table 1 lists the common operations supported by each system policy of DLI. Choose proper system policies according to this table. For details about the SQL statement per", "doc_type":"usermanual", "kw":"Common Operations Supported by DLI System Policy,Permissions Management,User Guide", @@ -2354,7 +2320,7 @@ "uri":"dli_01_0513.html", "node_id":"dli_01_0513.xml", "product_code":"dli", - "code":"117", + "code":"118", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Other Common Operations", @@ -2362,10 +2328,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Other Common Operations", @@ -2375,17 +2338,16 @@ "uri":"dli_01_0420.html", "node_id":"dli_01_0420.xml", "product_code":"dli", - "code":"118", - "des":"On the DLI management console, you can import data stored on OBS to DLI tables from Data Management > Databases and Tables > Table Management and SQL Editor pages. For de", + "code":"119", + "des":"On the DLI management console, you can import data stored in OBS into DLI tables.To import OBS data to a DLI table, either choose Data Management > Databases and Tables i", "doc_type":"usermanual", "kw":"Importing Data to a DLI Table,Other Common Operations,User Guide", "search_title":"", "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -2396,7 +2358,7 @@ "uri":"dli_01_0445.html", "node_id":"dli_01_0445.xml", "product_code":"dli", - "code":"119", + "code":"120", "des":"This section describes metrics reported by DLI to Cloud Eye as well as their namespaces and dimensions. You can use the management console or APIs provided by Cloud Eye t", "doc_type":"usermanual", "kw":"Viewing Monitoring Metrics,Other Common Operations,User Guide", @@ -2416,7 +2378,7 @@ "uri":"dli_01_0318.html", "node_id":"dli_01_0318.xml", "product_code":"dli", - "code":"120", + "code":"121", "des":"With CTS, you can record operations associated with DLI for later query, audit, and backtrack operations.", "doc_type":"usermanual", "kw":"DLI Operations That Can Be Recorded by CTS,Other Common Operations,User Guide", @@ -2436,7 +2398,7 @@ "uri":"dli_01_0550.html", "node_id":"dli_01_0550.xml", "product_code":"dli", - "code":"121", + "code":"122", "des":"A quota limits the quantity of a resource available to users, thereby preventing spikes in the usage of the resource.You can also request for an increased quota if your e", "doc_type":"usermanual", "kw":"Quota Management,Other Common Operations,User Guide", @@ -2456,7 +2418,7 @@ "uri":"dli_01_0539.html", "node_id":"dli_01_0539.xml", "product_code":"dli", - "code":"122", + "code":"123", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"FAQ", @@ -2464,10 +2426,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"FAQ", @@ -2477,7 +2436,7 @@ "uri":"dli_03_0037.html", "node_id":"dli_03_0037.xml", "product_code":"dli", - "code":"123", + "code":"124", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Flink Jobs", @@ -2485,10 +2444,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Flink Jobs", @@ -2498,7 +2454,7 @@ "uri":"dli_03_0137.html", "node_id":"dli_03_0137.xml", "product_code":"dli", - "code":"124", + "code":"125", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Usage", @@ -2506,10 +2462,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Usage", @@ -2519,7 +2472,7 @@ "uri":"dli_03_0083.html", "node_id":"dli_03_0083.xml", "product_code":"dli", - "code":"125", + "code":"126", "des":"DLI Flink jobs support the following data formats:Avro, Avro_merge, BLOB, CSV, EMAIL, JSON, ORC, Parquet, and XML.DLI Flink jobs support data from the following data sour", "doc_type":"usermanual", "kw":"What Data Formats and Data Sources Are Supported by DLI Flink Jobs?,Usage,User Guide", @@ -2527,10 +2480,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"What Data Formats and Data Sources Are Supported by DLI Flink Jobs?", @@ -2540,7 +2490,7 @@ "uri":"dli_03_0139.html", "node_id":"dli_03_0139.xml", "product_code":"dli", - "code":"126", + "code":"127", "des":"A sub-user can view queues but cannot view Flink jobs. You can authorize the sub-user using DLI or IAM.Authorization on DLILog in to the DLI console using a tenant accoun", "doc_type":"usermanual", "kw":"How Do I Authorize a Subuser to View Flink Jobs?,Usage,User Guide", @@ -2548,10 +2498,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Authorize a Subuser to View Flink Jobs?", @@ -2561,7 +2508,7 @@ "uri":"dli_03_0090.html", "node_id":"dli_03_0090.xml", "product_code":"dli", - "code":"127", + "code":"128", "des":"DLI Flink jobs are highly available. You can enable the automatic restart function to automatically restart your jobs after short-time faults of peripheral services are r", "doc_type":"usermanual", "kw":"How Do I Set Auto Restart upon Exception for a Flink Job?,Usage,User Guide", @@ -2569,10 +2516,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Set Auto Restart upon Exception for a Flink Job?", @@ -2582,7 +2526,7 @@ "uri":"dli_03_0099.html", "node_id":"dli_03_0099.xml", "product_code":"dli", - "code":"128", + "code":"129", "des":"When you create a Flink SQL job or Flink Jar job, you can select Save Job Log on the job editing page to save job running logs to OBS.To set the OBS bucket for storing th", "doc_type":"usermanual", "kw":"How Do I Save Flink Job Logs?,Usage,User Guide", @@ -2590,10 +2534,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Save Flink Job Logs?", @@ -2603,7 +2544,7 @@ "uri":"dli_03_0043.html", "node_id":"dli_03_0043.xml", "product_code":"dli", - "code":"129", + "code":"130", "des":"DLI can output Flink job results to DIS. You can view the results in DIS. For details, see \"Obtaining Data from DIS\" in Data Ingestion Service User Guide.DLI can output F", "doc_type":"usermanual", "kw":"How Can I Check Flink Job Results?,Usage,User Guide", @@ -2611,10 +2552,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Can I Check Flink Job Results?", @@ -2624,7 +2562,7 @@ "uri":"dli_03_0160.html", "node_id":"dli_03_0160.xml", "product_code":"dli", - "code":"130", + "code":"131", "des":"Choose Job Management > Flink Jobs. In the Operation column of the target job, choose More > Permissions. When a new user is authorized, No such user. userName:xxxx. is d", "doc_type":"usermanual", "kw":"Why Is Error \"No such user. userName:xxxx.\" Reported on the Flink Job Management Page When I Grant P", @@ -2632,10 +2570,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"No such user. userName:xxxx.\" Reported on the Flink Job Management Page When I Grant Permission to a User?", @@ -2645,7 +2580,7 @@ "uri":"dli_03_0180.html", "node_id":"dli_03_0180.xml", "product_code":"dli", - "code":"131", + "code":"132", "des":"Checkpoint was enabled when a Flink job is created, and the OBS bucket for storing checkpoints was specified. After a Flink job is manually stopped, no message is display", "doc_type":"usermanual", "kw":"How Do I Know Which Checkpoint the Flink Job I Stopped Will Be Restored to When I Start the Job Agai", @@ -2653,10 +2588,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Know Which Checkpoint the Flink Job I Stopped Will Be Restored to When I Start the Job Again?", @@ -2666,7 +2598,7 @@ "uri":"dli_03_0036.html", "node_id":"dli_03_0036.xml", "product_code":"dli", - "code":"132", + "code":"133", "des":"When you set running parameters of a DLI Flink job, you can enable Alarm Generation upon Job Exception to receive alarms when the job runs abnormally or is in arrears.If ", "doc_type":"usermanual", "kw":"Why Is a Message Displayed Indicating That the SMN Topic Does Not Exist When I Use the SMN Topic in ", @@ -2674,10 +2606,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is a Message Displayed Indicating That the SMN Topic Does Not Exist When I Use the SMN Topic in DLI?", @@ -2687,7 +2616,7 @@ "uri":"dli_03_0131.html", "node_id":"dli_03_0131.xml", "product_code":"dli", - "code":"133", + "code":"134", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Flink SQL", @@ -2695,10 +2624,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Flink SQL", @@ -2708,7 +2634,7 @@ "uri":"dli_03_0130.html", "node_id":"dli_03_0130.xml", "product_code":"dli", - "code":"134", + "code":"135", "des":"The consumption capability of a Flink SQL job depends on the data source transmission, queue size, and job parameter settings. The peak consumption is 10 Mbit/s.", "doc_type":"usermanual", "kw":"How Much Data Can Be Processed in a Day by a Flink SQL Job?,Flink SQL,User Guide", @@ -2716,10 +2642,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Much Data Can Be Processed in a Day by a Flink SQL Job?", @@ -2729,7 +2652,7 @@ "uri":"dli_03_0061.html", "node_id":"dli_03_0061.xml", "product_code":"dli", - "code":"135", + "code":"136", "des":"The temp stream in Flink SQL is similar to a subquery. It is a logical stream used to simplify the SQL logic and does not generate data storage. Therefore, there is no ne", "doc_type":"usermanual", "kw":"Does Data in the Temporary Stream of Flink SQL Need to Be Cleared Periodically? How Do I Clear the D", @@ -2737,10 +2660,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Does Data in the Temporary Stream of Flink SQL Need to Be Cleared Periodically? How Do I Clear the Data?", @@ -2750,18 +2670,15 @@ "uri":"dli_03_0138.html", "node_id":"dli_03_0138.xml", "product_code":"dli", - "code":"136", - "des":"SymptomWhen you create a Flink SQL job and configure the parameters, you select an OBS bucket you have created. The system displays a message indicating that the OBS buck", + "code":"137", + "des":"When you create a Flink SQL job and configure the parameters, you select an OBS bucket you have created. The system displays a message indicating that the OBS bucket is n", "doc_type":"usermanual", "kw":"Why Is a Message Displayed Indicating That the OBS Bucket Is Not Authorized When I Select an OBS Buc", "search_title":"", "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is a Message Displayed Indicating That the OBS Bucket Is Not Authorized When I Select an OBS Bucket for a Flink SQL Job?", @@ -2771,7 +2688,7 @@ "uri":"dli_03_0089.html", "node_id":"dli_03_0089.xml", "product_code":"dli", - "code":"137", + "code":"138", "des":"When using a Flink SQL job, you need to create an OBS partition table for subsequent batch processing.In the following example, the day field is used as the partition fie", "doc_type":"usermanual", "kw":"How Do I Create an OBS Partitioned Table for a Flink SQL Job?,Flink SQL,User Guide", @@ -2779,10 +2696,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Create an OBS Partitioned Table for a Flink SQL Job?", @@ -2792,7 +2706,7 @@ "uri":"dli_03_0075.html", "node_id":"dli_03_0075.xml", "product_code":"dli", - "code":"138", + "code":"139", "des":"In this example, the day field is used as the partition field with the parquet encoding format (only the parquet format is supported currently) to dump car_info data to O", "doc_type":"usermanual", "kw":"How Do I Dump Data to OBS and Create an OBS Partitioned Table?,Flink SQL,User Guide", @@ -2800,10 +2714,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Dump Data to OBS and Create an OBS Partitioned Table?", @@ -2813,7 +2724,7 @@ "uri":"dli_03_0167.html", "node_id":"dli_03_0167.xml", "product_code":"dli", - "code":"139", + "code":"140", "des":"When I run the creation statement with an EL expression in the table name in a Flink SQL job, the following error message is displayed:DLI.0005: AnalysisException: t_user", "doc_type":"usermanual", "kw":"Why Is Error Message \"DLI.0005\" Displayed When I Use an EL Expression to Create a Table in a Flink S", @@ -2821,10 +2732,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error Message \"DLI.0005\" Displayed When I Use an EL Expression to Create a Table in a Flink SQL Job?", @@ -2834,7 +2742,7 @@ "uri":"dli_03_0168.html", "node_id":"dli_03_0168.xml", "product_code":"dli", - "code":"140", + "code":"141", "des":"After data is written to OBS through the Flink job output stream, data cannot be queried from the DLI table created in the OBS file path.For example, use the following Fl", "doc_type":"usermanual", "kw":"Why Is No Data Queried in the DLI Table Created Using the OBS File Path When Data Is Written to OBS ", @@ -2842,10 +2750,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is No Data Queried in the DLI Table Created Using the OBS File Path When Data Is Written to OBS by a Flink Job Output Stream?", @@ -2855,7 +2760,7 @@ "uri":"dli_03_0174.html", "node_id":"dli_03_0174.xml", "product_code":"dli", - "code":"141", + "code":"142", "des":"After a Flink SQL job is submitted on DLI, the job fails to be executed. The following error information is displayed in the job log:connect to DIS failed java.lang.Illeg", "doc_type":"usermanual", "kw":"Why Does a Flink SQL Job Fails to Be Executed, and Is \"connect to DIS failed java.lang.IllegalArgume", @@ -2863,10 +2768,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Does a Flink SQL Job Fails to Be Executed, and Is \"connect to DIS failed java.lang.IllegalArgumentException: Access key cannot be null\" Displayed in the Log?", @@ -2876,7 +2778,7 @@ "uri":"dli_03_0176.html", "node_id":"dli_03_0176.xml", "product_code":"dli", - "code":"142", + "code":"143", "des":"Semantic verification for a Flink SQL job (reading DIS data) fails. The following information is displayed when the job fails:Get dis channel xxxinfo failed. error info: ", "doc_type":"usermanual", "kw":"Why Is Error \"Not authorized\" Reported When a Flink SQL Job Reads DIS Data?,Flink SQL,User Guide", @@ -2884,10 +2786,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"Not authorized\" Reported When a Flink SQL Job Reads DIS Data?", @@ -2897,7 +2796,7 @@ "uri":"dli_03_0232.html", "node_id":"dli_03_0232.xml", "product_code":"dli", - "code":"143", + "code":"144", "des":"After a Flink SQL job consumed Kafka and sent data to the Elasticsearch cluster, the job was successfully executed, but no data is available.Possible causes are as follow", "doc_type":"usermanual", "kw":"Data Writing Fails After a Flink SQL Job Consumed Kafka and Sank Data to the Elasticsearch Cluster,F", @@ -2905,10 +2804,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Data Writing Fails After a Flink SQL Job Consumed Kafka and Sank Data to the Elasticsearch Cluster", @@ -2918,7 +2814,7 @@ "uri":"dli_03_0132.html", "node_id":"dli_03_0132.xml", "product_code":"dli", - "code":"144", + "code":"145", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Flink Jar Jobs", @@ -2926,55 +2822,28 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Flink Jar Jobs", "githuburl":"" }, - { - "uri":"dli_03_0038.html", - "node_id":"dli_03_0038.xml", - "product_code":"dli", - "code":"145", - "des":"The procedure is as follows:Add the following code to the JAR file code of the Flink Jar job:// Configure the pom file on which the StreamExecutionEnvironment depends.\nSt", - "doc_type":"usermanual", - "kw":"How Do I Configure Checkpoints for Flink Jar Jobs and Save the Checkpoints to OBS?,Flink Jar Jobs,Us", - "search_title":"", - "metedata":[ - { - "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" - } - ], - "title":"How Do I Configure Checkpoints for Flink Jar Jobs and Save the Checkpoints to OBS?", - "githuburl":"" - }, { "uri":"dli_03_0044.html", "node_id":"dli_03_0044.xml", "product_code":"dli", "code":"146", - "des":"Configuration files can be uploaded for user-defined jobs (JAR).Upload the configuration file to DLI through Package Management.In the Other Dependencies area of the Flin", + "des":"You can upload configuration files for custom jobs (Jar).Upload the configuration file to DLI through Package Management.In the Other Dependencies area of the Flink Jar j", "doc_type":"usermanual", - "kw":"Does a Flink JAR Job Support Configuration File Upload? How Do I Upload a Configuration File?,Flink ", + "kw":"Can I Upload Configuration Files for Flink Jar Jobs?,Flink Jar Jobs,User Guide", "search_title":"", "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], - "title":"Does a Flink JAR Job Support Configuration File Upload? How Do I Upload a Configuration File?", + "title":"Can I Upload Configuration Files for Flink Jar Jobs?", "githuburl":"" }, { @@ -2984,18 +2853,15 @@ "code":"147", "des":"The dependency of your Flink job conflicts with a built-in dependency of the DLI Flink platform. As a result, the job submission fails.Delete your JAR file that is the sa", "doc_type":"usermanual", - "kw":"Why Does the Submission Fail Due to Flink JAR File Conflict?,Flink Jar Jobs,User Guide", + "kw":"Why Does a Flink Jar Package Conflict Result in Submission Failure?,Flink Jar Jobs,User Guide", "search_title":"", "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], - "title":"Why Does the Submission Fail Due to Flink JAR File Conflict?", + "title":"Why Does a Flink Jar Package Conflict Result in Submission Failure?", "githuburl":"" }, { @@ -3010,10 +2876,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Does a Flink Jar Job Fail to Access GaussDB(DWS) and a Message Is Displayed Indicating Too Many Client Connections?", @@ -3031,10 +2894,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error Message \"Authentication failed\" Displayed During Flink Jar Job Running?", @@ -3052,10 +2912,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error Invalid OBS Bucket Name Reported After a Flink Job Submission Failed?", @@ -3073,10 +2930,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Does the Flink Submission Fail Due to Hadoop JAR File Conflict?", @@ -3094,10 +2948,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Connect a Flink jar Job to SASL_SSL?", @@ -3115,10 +2966,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Performance Tuning", @@ -3136,10 +2984,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Optimize Performance of a Flink Job?", @@ -3157,10 +3002,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Write Data to Different Elasticsearch Clusters in a Flink Job?", @@ -3178,10 +3020,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Prevent Data Loss After Flink Job Restart?", @@ -3199,10 +3038,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"O&M Guide", @@ -3220,10 +3056,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Locate a Flink Job Submission Error?", @@ -3241,10 +3074,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Locate a Flink Job Running Error?", @@ -3262,10 +3092,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Can I Check if a Flink Job Can Be Restored From a Checkpoint After Restarting It?", @@ -3283,10 +3110,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Does DIS Stream Not Exist During Job Semantic Check?", @@ -3304,10 +3128,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is the OBS Bucket Selected for Job Not Authorized?", @@ -3325,10 +3146,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Are Logs Not Written to the OBS Bucket After a DLI Flink Job Fails to Be Submitted for Running?", @@ -3346,10 +3164,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Information Displayed on the FlinkUI/Spark UI Page Incomplete?", @@ -3367,10 +3182,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is the Flink Job Abnormal Due to Heartbeat Timeout Between JobManager and TaskManager?", @@ -3388,10 +3200,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"Timeout expired while fetching topic metadata\" Repeatedly Reported in Flink JobManager Logs?", @@ -3409,10 +3218,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Problems Related to SQL Jobs", @@ -3430,10 +3236,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Usage", @@ -3451,10 +3254,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"SQL Jobs", @@ -3472,10 +3272,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Job Development", @@ -3493,10 +3290,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Merge Small Files?", @@ -3514,10 +3308,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Specify an OBS Path When Creating an OBS Table?", @@ -3535,10 +3326,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Create a Table Using JSON Data in an OBS Bucket?", @@ -3556,10 +3344,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Set Local Variables in SQL Statements?", @@ -3577,10 +3362,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Can I Use the count Function to Perform Aggregation?", @@ -3598,10 +3380,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Synchronize DLI Table Data from One Region to Another?", @@ -3619,10 +3398,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Insert Table Data into Specific Fields of a Table Using a SQL Job?", @@ -3640,10 +3416,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Job O&M Errors", @@ -3661,10 +3434,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"path obs://xxx already exists\" Reported When Data Is Exported to OBS?", @@ -3682,10 +3452,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"SQL_ANALYSIS_ERROR: Reference 't.id' is ambiguous, could be: t.id, t.id.;\" Displayed When Two Tables Are Joined?", @@ -3703,10 +3470,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"The current account does not have permission to perform this operation,the current account was restricted. Restricted for no budget.\" Reported when a SQL Statement Is Executed?", @@ -3724,10 +3488,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"There should be at least one partition pruning predicate on partitioned table XX.YYY\" Reported When a Query Statement Is Executed?", @@ -3745,10 +3506,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"IllegalArgumentException: Buffer size too small. size\" Reported When Data Is Loaded to an OBS Foreign Table?", @@ -3766,10 +3524,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"DLI.0002 FileNotFoundException\" Reported During SQL Job Running?", @@ -3787,10 +3542,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is a Schema Parsing Error Reported When I Create a Hive Table Using CTAS?", @@ -3808,10 +3560,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"org.apache.hadoop.fs.obs.OBSIOException\" Reported When I Run DLI SQL Scripts on DataArts Studio?", @@ -3829,10 +3578,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"UQUERY_CONNECTOR_0001:Invoke DLI service api failed\" Reported in the Job Log When I Use CDM to Migrate Data to DLI?", @@ -3850,10 +3596,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"File not Found\" Reported When I Access a SQL Job?", @@ -3871,10 +3614,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"DLI.0003: AccessControlException XXX\" Reported When I Access a SQL Job?", @@ -3892,10 +3632,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"DLI.0001: org.apache.hadoop.security.AccessControlException: verifyBucketExists on {{bucket name}}: status [403]\" Reported When I Access a SQL Job?", @@ -3913,10 +3650,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"The current account does not have permission to perform this operation,the current account was restricted. Restricted for no budget\" Reported During SQL Statement Execution? Restricted for no budget.", @@ -3934,10 +3668,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"O&M Guide", @@ -3955,10 +3686,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Troubleshoot Slow SQL Jobs?", @@ -3976,10 +3704,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I View DLI SQL Logs?", @@ -3997,10 +3722,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I View SQL Execution Records?", @@ -4011,20 +3733,17 @@ "node_id":"dli_03_0093.xml", "product_code":"dli", "code":"196", - "des":"If the execution of an SQL statement takes a long time, you need to access the Spark UI to check the execution status.If data skew occurs, the running time of a stage exc", + "des":"Data skew is a common issue during the execution of SQL jobs. When data is unevenly distributed, some compute nodes process significantly more data than others, which can", "doc_type":"usermanual", - "kw":"How Do I Eliminate Data Skew by Configuring AE Parameters?,O&M Guide,User Guide", + "kw":"How Do I Do When Data Skew Occurs During the Execution of a SQL Job?,O&M Guide,User Guide", "search_title":"", "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], - "title":"How Do I Eliminate Data Skew by Configuring AE Parameters?", + "title":"How Do I Do When Data Skew Occurs During the Execution of a SQL Job?", "githuburl":"" }, { @@ -4039,10 +3758,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"What Can I Do If a Table Cannot Be Queried on the DLI Console?", @@ -4060,10 +3776,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"The Compression Ratio of OBS Tables Is Too High", @@ -4081,10 +3794,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Can I Avoid Garbled Characters Caused by Inconsistent Character Codes?", @@ -4102,10 +3812,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Do I Need to Grant Table Permissions to a User and Project After I Delete a Table and Create One with the Same Name?", @@ -4123,10 +3830,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Can't I Query Table Data After Data Is Imported to a DLI Partitioned Table Because the File to Be Imported Does Not Contain Data in the Partitioning Column?", @@ -4144,10 +3848,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Fix the Data Error Caused by CRLF Characters in a Field of the OBS File Used to Create an External OBS Table?", @@ -4165,10 +3866,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Does a SQL Job That Has Join Operations Stay in the Running State?", @@ -4186,10 +3884,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"The on Clause Is Not Added When Tables Are Joined. Cartesian Product Query Causes High Resource Usage of the Queue, and the Job Fails to Be Executed", @@ -4207,10 +3902,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Can't I Query Data After I Manually Add Data to the Partition Directory of an OBS Table?", @@ -4228,10 +3920,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is All Data Overwritten When insert overwrite Is Used to Overwrite Partitioned Table?", @@ -4249,10 +3938,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is a SQL Job Stuck in the Submitting State?", @@ -4270,10 +3956,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is the create_date Field in the RDS Table Is a Timestamp in the DLI query result?", @@ -4291,10 +3974,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"What Can I Do If datasize Cannot Be Changed After the Table Name Is Changed in a Finished SQL Job?", @@ -4312,10 +3992,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is the Data Volume Changes When Data Is Imported from DLI to OBS?", @@ -4333,10 +4010,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Problems Related to Spark Jobs", @@ -4354,10 +4028,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Usage", @@ -4375,10 +4046,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Spark Jobs", @@ -4396,10 +4064,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Job Development", @@ -4417,10 +4082,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Use Spark to Write Data into a DLI Table?", @@ -4431,17 +4093,14 @@ "node_id":"dli_03_0017.xml", "product_code":"dli", "code":"216", - "des":"Hard-coded or plaintext AK and SK pose significant security risks. To ensure security, encrypt your AK and SK, store them in configuration files or environment variables,", + "des":"To obtain the AK/SK, set the parameters as follows:Create a SparkContext using code.val sc: SparkContext = new SparkContext()\nsc.hadoopConfiguration.set(\"fs.obs.access.ke", "doc_type":"usermanual", "kw":"How Do I Set the AK/SK for a Queue to Operate an OBS Table?,Job Development,User Guide", "search_title":"", "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Set the AK/SK for a Queue to Operate an OBS Table?", @@ -4459,10 +4118,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I View the Resource Usage of DLI Spark Jobs?", @@ -4480,10 +4136,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Use Python Scripts to Access the MySQL Database If the pymysql Module Is Missing from the Spark Job Results Stored in MySQL?", @@ -4501,10 +4154,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Run a Complex PySpark Program in DLI?", @@ -4522,10 +4172,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Does a Spark Job Access a MySQL Database?", @@ -4543,10 +4190,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Use JDBC to Set the spark.sql.shuffle.partitions Parameter to Improve the Task Concurrency?", @@ -4564,10 +4208,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Read Uploaded Files for a Spark Jar Job?", @@ -4585,10 +4226,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Job O&M Errors", @@ -4606,10 +4244,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Are Errors \"ResponseCode: 403\" and \"ResponseStatus: Forbidden\" Reported When a Spark Job Accesses OBS Data?", @@ -4627,10 +4262,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"verifyBucketExists on XXXX: status [403]\" Reported When I Use a Spark Job to Access an OBS Bucket That I Have Access Permission?", @@ -4648,10 +4280,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is a Job Running Timeout Reported When a Spark Job Runs a Large Amount of Data?", @@ -4669,10 +4298,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Does the Job Fail to Be Executed and the Log Shows that the File Directory Is Abnormal When I Use a Spark Job to Access Files in SFTP?", @@ -4690,10 +4316,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Does the Job Fail to Be Executed Due to Insufficient Database and Table Permissions?", @@ -4711,10 +4334,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"O&M Guide", @@ -4732,10 +4352,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Can't I Find the Specified Python Environment After Adding the Python Package?", @@ -4753,10 +4370,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is a Spark Jar Job Stuck in the Submitting State?", @@ -4774,10 +4388,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Product Consultation", @@ -4795,41 +4406,17 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Usage", "githuburl":"" }, - { - "uri":"dli_03_0002.html", - "node_id":"dli_03_0002.xml", - "product_code":"dli", - "code":"234", - "des":"Data Lake Insight (DLI) is a serverless data processing and analysis service fully compatible with Apache Spark and Apache Flink ecosystems. It frees you from managing an", - "doc_type":"usermanual", - "kw":"What Is DLI?,Usage,User Guide", - "search_title":"", - "metedata":[ - { - "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" - } - ], - "title":"What Is DLI?", - "githuburl":"" - }, { "uri":"dli_03_0025.html", "node_id":"dli_03_0025.xml", "product_code":"dli", - "code":"235", + "code":"234", "des":"DLI supports the following data formats:ParquetCSVORCJsonAvro", "doc_type":"usermanual", "kw":"Which Data Formats Does DLI Support?,Usage,User Guide", @@ -4837,10 +4424,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Which Data Formats Does DLI Support?", @@ -4850,7 +4434,7 @@ "uri":"dli_03_0115.html", "node_id":"dli_03_0115.xml", "product_code":"dli", - "code":"236", + "code":"235", "des":"The Spark component of DLI is a fully managed service. You can only use the DLI Spark through its APIs. .The Spark component of MRS is built on the VM in an MRS cluster. ", "doc_type":"usermanual", "kw":"What Are the Differences Between MRS Spark and DLI Spark?,Usage,User Guide", @@ -4858,10 +4442,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"What Are the Differences Between MRS Spark and DLI Spark?", @@ -4871,7 +4452,7 @@ "uri":"dli_03_0029.html", "node_id":"dli_03_0029.xml", "product_code":"dli", - "code":"237", + "code":"236", "des":"DLI data can be stored in either of the following:OBS: Data used by SQL jobs, Spark jobs, and Flink jobs can be stored in OBS, reducing storage costs.DLI: The column-base", "doc_type":"usermanual", "kw":"Where Can DLI Data Be Stored?,Usage,User Guide", @@ -4879,10 +4460,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Where Can DLI Data Be Stored?", @@ -4892,7 +4470,7 @@ "uri":"dli_03_0117.html", "node_id":"dli_03_0117.xml", "product_code":"dli", - "code":"238", + "code":"237", "des":"DLI tables store data within the DLI service, and you do not need to know the data storage path.OBS tables store data in your OBS buckets, and you need to manage the sour", "doc_type":"usermanual", "kw":"What Are the Differences Between DLI Tables and OBS Tables?,Usage,User Guide", @@ -4900,10 +4478,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"What Are the Differences Between DLI Tables and OBS Tables?", @@ -4913,7 +4488,7 @@ "uri":"dli_03_0010.html", "node_id":"dli_03_0010.xml", "product_code":"dli", - "code":"239", + "code":"238", "des":"Currently, DLI supports analysis only on the data uploaded to the cloud. In scenarios where regular (for example, on a per day basis) one-off analysis on incremental data", "doc_type":"usermanual", "kw":"How Can I Use DLI If Data Is Not Uploaded to OBS?,Usage,User Guide", @@ -4921,10 +4496,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Can I Use DLI If Data Is Not Uploaded to OBS?", @@ -4934,7 +4506,7 @@ "uri":"dli_03_0129.html", "node_id":"dli_03_0129.xml", "product_code":"dli", - "code":"240", + "code":"239", "des":"Data in the OBS bucket shared by IAM users under the same account can be imported. You cannot import data in the OBS bucket shared with other IAM account.", "doc_type":"usermanual", "kw":"Can I Import OBS Bucket Data Shared by Other Tenants into DLI?,Usage,User Guide", @@ -4942,10 +4514,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Can I Import OBS Bucket Data Shared by Other Tenants into DLI?", @@ -4955,7 +4524,7 @@ "uri":"dli_03_0264.html", "node_id":"dli_03_0264.xml", "product_code":"dli", - "code":"241", + "code":"240", "des":"Log in to the management console.Click in the upper left corner and select a region and a project.Click the My Quota icon in the upper right corner of the page.The Serv", "doc_type":"usermanual", "kw":"Why Is Error \"Failed to create the database. {\"error_code\":\"DLI.1028\";\"error_msg\":\"Already reached t", @@ -4963,10 +4532,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"Failed to create the database. {\"error_code\":\"DLI.1028\";\"error_msg\":\"Already reached the maximum quota of databases:XXX\".\" Reported?", @@ -4976,7 +4542,7 @@ "uri":"dli_03_0263.html", "node_id":"dli_03_0263.xml", "product_code":"dli", - "code":"242", + "code":"241", "des":"No, a global variable can only be used by the user who created it. Global variables can be used to simplify complex parameters. For example, long and difficult variables ", "doc_type":"usermanual", "kw":"Can a Member Account Use Global Variables Created by Other Member Accounts?,Usage,User Guide", @@ -4984,10 +4550,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Can a Member Account Use Global Variables Created by Other Member Accounts?", @@ -4997,7 +4560,7 @@ "uri":"dli_03_0222.html", "node_id":"dli_03_0222.xml", "product_code":"dli", - "code":"243", + "code":"242", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Job Management", @@ -5005,10 +4568,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Job Management", @@ -5018,7 +4578,7 @@ "uri":"dli_03_0126.html", "node_id":"dli_03_0126.xml", "product_code":"dli", - "code":"244", + "code":"243", "des":"If you are suggested to perform following operations to run a large number of DLI jobs:Group the DLI jobs by type, and run each group on a queue.Alternatively, create IAM", "doc_type":"usermanual", "kw":"How Do I Manage Tens of Thousands of Jobs Running on DLI?,Job Management,User Guide", @@ -5026,10 +4586,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Manage Tens of Thousands of Jobs Running on DLI?", @@ -5039,7 +4596,7 @@ "uri":"dli_03_0162.html", "node_id":"dli_03_0162.xml", "product_code":"dli", - "code":"245", + "code":"244", "des":"The field names of tables that have been created cannot be changed.You can create a table, define new table fields, and migrate data from the old table to the new one.", "doc_type":"usermanual", "kw":"How Do I Change the Name of a Field in a Created Table?,Job Management,User Guide", @@ -5047,10 +4604,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Change the Name of a Field in a Created Table?", @@ -5060,7 +4614,7 @@ "uri":"dli_03_0261.html", "node_id":"dli_03_0261.xml", "product_code":"dli", - "code":"246", + "code":"245", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Privacy and Security", @@ -5068,10 +4622,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Privacy and Security", @@ -5081,7 +4632,7 @@ "uri":"dli_03_0260.html", "node_id":"dli_03_0260.xml", "product_code":"dli", - "code":"247", + "code":"246", "des":"No. The spark.acls.enable configuration item is not used in DLI. The Apache Spark command injection vulnerability (CVE-2022-33891) does not exist in DLI.", "doc_type":"usermanual", "kw":"Does DLI Have the Apache Spark Command Injection Vulnerability (CVE-2022-33891)?,Privacy and Securit", @@ -5089,10 +4640,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Does DLI Have the Apache Spark Command Injection Vulnerability (CVE-2022-33891)?", @@ -5102,7 +4650,7 @@ "uri":"dli_03_0053.html", "node_id":"dli_03_0053.xml", "product_code":"dli", - "code":"248", + "code":"247", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Quota", @@ -5110,10 +4658,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Quota", @@ -5123,7 +4668,7 @@ "uri":"dli_03_0031.html", "node_id":"dli_03_0031.xml", "product_code":"dli", - "code":"249", + "code":"248", "des":"Log in to the management console.Click in the upper left corner and select Region and Project.Click (the My Quotas icon) in the upper right corner.The Service Quota pag", "doc_type":"usermanual", "kw":"How Do I View My Quotas?,Quota,User Guide", @@ -5131,10 +4676,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I View My Quotas?", @@ -5144,7 +4686,7 @@ "uri":"dli_03_0032.html", "node_id":"dli_03_0032.xml", "product_code":"dli", - "code":"250", + "code":"249", "des":"The system does not support online quota adjustment. To increase a resource quota, dial the hotline or send an email to the customer service. We will process your applica", "doc_type":"usermanual", "kw":"How Do I Increase a Quota?,Quota,User Guide", @@ -5152,10 +4694,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Increase a Quota?", @@ -5165,7 +4704,7 @@ "uri":"dli_03_0054.html", "node_id":"dli_03_0054.xml", "product_code":"dli", - "code":"251", + "code":"250", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Permission", @@ -5173,10 +4712,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Permission", @@ -5186,7 +4722,7 @@ "uri":"dli_03_0223.html", "node_id":"dli_03_0223.xml", "product_code":"dli", - "code":"252", + "code":"251", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Usage", @@ -5194,10 +4730,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Usage", @@ -5207,7 +4740,7 @@ "uri":"dli_03_0100.html", "node_id":"dli_03_0100.xml", "product_code":"dli", - "code":"253", + "code":"252", "des":"DLI has a comprehensive permission control mechanism and supports fine-grained authentication through Identity and Access Management (IAM). You can create policies in IAM", "doc_type":"usermanual", "kw":"How Do I Manage Fine-Grained DLI Permissions?,Usage,User Guide", @@ -5215,10 +4748,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Manage Fine-Grained DLI Permissions?", @@ -5228,7 +4758,7 @@ "uri":"dli_03_0008.html", "node_id":"dli_03_0008.xml", "product_code":"dli", - "code":"254", + "code":"253", "des":"You cannot perform permission-related operations on the partition column of a partitioned table.However, when you grant the permission of any non-partition column in a pa", "doc_type":"usermanual", "kw":"What Is Column Permission Granting of a DLI Partition Table?,Usage,User Guide", @@ -5236,10 +4766,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"What Is Column Permission Granting of a DLI Partition Table?", @@ -5249,7 +4776,7 @@ "uri":"dli_03_0226.html", "node_id":"dli_03_0226.xml", "product_code":"dli", - "code":"255", + "code":"254", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"O&M Guide", @@ -5257,10 +4784,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"O&M Guide", @@ -5270,7 +4794,7 @@ "uri":"dli_03_0140.html", "node_id":"dli_03_0140.xml", "product_code":"dli", - "code":"256", + "code":"255", "des":"When you submit a job, a message is displayed indicating that the job fails to be submitted due to insufficient permission caused by arrears. In this case, you need to ch", "doc_type":"usermanual", "kw":"Why Does My Account Have Insufficient Permissions Due to Arrears?,O&M Guide,User Guide", @@ -5278,10 +4802,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Does My Account Have Insufficient Permissions Due to Arrears?", @@ -5291,7 +4812,7 @@ "uri":"dli_03_0195.html", "node_id":"dli_03_0195.xml", "product_code":"dli", - "code":"257", + "code":"256", "des":"When the user update an existing program package, the following error information is displayed:\"error_code\"*DLI.0003\",\"error_msg\":\"Permission denied for resource 'resourc", "doc_type":"usermanual", "kw":"Why Does the System Display a Message Indicating Insufficient Permissions When I Update a Program Pa", @@ -5299,10 +4820,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Does the System Display a Message Indicating Insufficient Permissions When I Update a Program Package?", @@ -5312,7 +4830,7 @@ "uri":"dli_03_0227.html", "node_id":"dli_03_0227.xml", "product_code":"dli", - "code":"258", + "code":"257", "des":"When the SQL query statement is executed, the system displays a message indicating that the user does not have the permission to query resources.Error information: DLI.00", "doc_type":"usermanual", "kw":"Why Is Error \"DLI.0003: Permission denied for resource...\" Reported When I Run a SQL Statement?,O&M ", @@ -5320,10 +4838,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"DLI.0003: Permission denied for resource...\" Reported When I Run a SQL Statement?", @@ -5333,7 +4848,7 @@ "uri":"dli_03_0228.html", "node_id":"dli_03_0228.xml", "product_code":"dli", - "code":"259", + "code":"258", "des":"The table permission has been granted and verified. However, after a period of time, an error is reported indicating that the table query fails.There are two possible rea", "doc_type":"usermanual", "kw":"Why Can't I Query Table Data After I've Been Granted Table Permissions?,O&M Guide,User Guide", @@ -5341,10 +4856,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Can't I Query Table Data After I've Been Granted Table Permissions?", @@ -5354,7 +4866,7 @@ "uri":"dli_03_0057.html", "node_id":"dli_03_0057.xml", "product_code":"dli", - "code":"260", + "code":"259", "des":"If a table inherits database permissions, you do not need to regrant the inherited permissions to the table.When you grant permissions on a table on the console:If you se", "doc_type":"usermanual", "kw":"Will an Error Be Reported if the Inherited Permissions Are Regranted to a Table That Inherits Databa", @@ -5362,9 +4874,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -5375,7 +4886,7 @@ "uri":"dli_03_0067.html", "node_id":"dli_03_0067.xml", "product_code":"dli", - "code":"261", + "code":"260", "des":"User A created Table1.User B created View1 based on Table1.After the Select Table permission on Table1 is granted to user C, user C fails to query View1.User B does not h", "doc_type":"usermanual", "kw":"Why Can't I Query a View After I'm Granted the Select Table Permission on the View?,O&M Guide,User G", @@ -5383,9 +4894,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -5396,7 +4906,7 @@ "uri":"dli_03_0049.html", "node_id":"dli_03_0049.xml", "product_code":"dli", - "code":"262", + "code":"261", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Queue", @@ -5404,10 +4914,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Queue", @@ -5417,7 +4924,7 @@ "uri":"dli_03_0229.html", "node_id":"dli_03_0229.xml", "product_code":"dli", - "code":"263", + "code":"262", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Usage", @@ -5425,10 +4932,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Usage", @@ -5438,7 +4942,7 @@ "uri":"dli_03_0109.html", "node_id":"dli_03_0109.xml", "product_code":"dli", - "code":"264", + "code":"263", "des":"Currently, you are not allowed to modify the description of a created queue. You can add the description when purchasing the queue.", "doc_type":"usermanual", "kw":"Does the Description of a DLI Queue Can Be Modified?,Usage,User Guide", @@ -5446,10 +4950,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Does the Description of a DLI Queue Can Be Modified?", @@ -5459,7 +4960,7 @@ "uri":"dli_03_0166.html", "node_id":"dli_03_0166.xml", "product_code":"dli", - "code":"265", + "code":"264", "des":"Deleting a queue does not cause table data loss in your database.", "doc_type":"usermanual", "kw":"Will Table Data in My Database Be Lost If I Delete a Queue?,Usage,User Guide", @@ -5467,10 +4968,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Will Table Data in My Database Be Lost If I Delete a Queue?", @@ -5480,7 +4978,7 @@ "uri":"dli_03_0170.html", "node_id":"dli_03_0170.xml", "product_code":"dli", - "code":"266", + "code":"265", "des":"You need to develop a mechanism to retry failed jobs. When a faulty queue is recovered, your application tries to submit the failed jobs to the queue again.", "doc_type":"usermanual", "kw":"How Does DLI Ensure the Reliability of Spark Jobs When a Queue Is Abnormal?,Usage,User Guide", @@ -5488,10 +4986,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Does DLI Ensure the Reliability of Spark Jobs When a Queue Is Abnormal?", @@ -5501,7 +4996,7 @@ "uri":"dli_03_0098.html", "node_id":"dli_03_0098.xml", "product_code":"dli", - "code":"267", + "code":"266", "des":"DLI allows you to subscribe to an SMN topic for failed jobs.Log in to the DLI console.In the navigation pane on the left, choose Queue Management.On the Queue Management ", "doc_type":"usermanual", "kw":"How Do I Monitor Queue Exceptions?,Usage,User Guide", @@ -5509,10 +5004,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Monitor Queue Exceptions?", @@ -5522,7 +5014,7 @@ "uri":"dli_03_0230.html", "node_id":"dli_03_0230.xml", "product_code":"dli", - "code":"268", + "code":"267", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"O&M Guide", @@ -5530,10 +5022,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"O&M Guide", @@ -5543,7 +5032,7 @@ "uri":"dli_03_0095.html", "node_id":"dli_03_0095.xml", "product_code":"dli", - "code":"269", + "code":"268", "des":"To check the running status of the DLI queue and determine whether to run more jobs on that queue, you need to check the queue load.Search for Cloud Eye on the console.In", "doc_type":"usermanual", "kw":"How Do I View DLI Queue Load?,O&M Guide,User Guide", @@ -5551,10 +5040,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I View DLI Queue Load?", @@ -5564,7 +5050,7 @@ "uri":"dli_03_0183.html", "node_id":"dli_03_0183.xml", "product_code":"dli", - "code":"270", + "code":"269", "des":"You need to check the large number of jobs in the Submitting and Running states on the queue.Use Cloud Eye to view jobs in different states on the queue. The procedure is", "doc_type":"usermanual", "kw":"How Do I Determine Whether There Are Too Many Jobs in the Current Queue?,O&M Guide,User Guide", @@ -5572,10 +5058,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Determine Whether There Are Too Many Jobs in the Current Queue?", @@ -5585,7 +5068,7 @@ "uri":"dli_03_0065.html", "node_id":"dli_03_0065.xml", "product_code":"dli", - "code":"271", + "code":"270", "des":"Currently, DLI provides two types of queues, For SQL and For general use. SQL queues are used to run SQL jobs. General-use queues are compatible with Spark queues of earl", "doc_type":"usermanual", "kw":"How Do I Switch an Earlier-Version Spark Queue to a General-Purpose Queue?,O&M Guide,User Guide", @@ -5593,10 +5076,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Switch an Earlier-Version Spark Queue to a General-Purpose Queue?", @@ -5606,7 +5086,7 @@ "uri":"dli_03_0193.html", "node_id":"dli_03_0193.xml", "product_code":"dli", - "code":"272", + "code":"271", "des":"DLI queues do not use resources or bandwidth when no job is running. In this case, the running status of DLI queues is not displayed on CES.", "doc_type":"usermanual", "kw":"Why Cannot I View the Resource Running Status of DLI Queues on Cloud Eye?,O&M Guide,User Guide", @@ -5614,10 +5094,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Cannot I View the Resource Running Status of DLI Queues on Cloud Eye?", @@ -5627,7 +5104,7 @@ "uri":"dli_03_0088.html", "node_id":"dli_03_0088.xml", "product_code":"dli", - "code":"273", + "code":"272", "des":"In DLI, 64 CU = 64 cores and 256 GB memory.In a Spark job, if the driver occupies 4 cores and 16 GB memory, the executor can occupy 60 cores and 240 GB memory.", "doc_type":"usermanual", "kw":"How Do I Allocate Queue Resources for Running Spark Jobs If I Have Purchased 64 CUs?,O&M Guide,User ", @@ -5635,10 +5112,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Allocate Queue Resources for Running Spark Jobs If I Have Purchased 64 CUs?", @@ -5648,7 +5122,7 @@ "uri":"dli_03_0159.html", "node_id":"dli_03_0159.xml", "product_code":"dli", - "code":"274", + "code":"273", "des":"Queue plans create failed. The plan xxx target cu is out of quota is displayed when you create a scheduled scaling task.The CU quota of the current account is insufficien", "doc_type":"usermanual", "kw":"Why Is Error \"Queue plans create failed. The plan xxx target cu is out of quota\" Reported When I Sch", @@ -5656,10 +5130,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"Queue plans create failed. The plan xxx target cu is out of quota\" Reported When I Schedule CU Changes?", @@ -5669,7 +5140,7 @@ "uri":"dli_03_0171.html", "node_id":"dli_03_0171.xml", "product_code":"dli", - "code":"275", + "code":"274", "des":"After a SQL job was submitted to the default queue, the job runs abnormally. The job log reported that the execution timed out. The exception logs are as follows:[ERROR] ", "doc_type":"usermanual", "kw":"Why Is a Timeout Exception Reported When a DLI SQL Statement Fails to Be Executed on the Default Que", @@ -5677,10 +5148,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is a Timeout Exception Reported When a DLI SQL Statement Fails to Be Executed on the Default Queue?", @@ -5690,7 +5158,7 @@ "uri":"dli_03_0276.html", "node_id":"dli_03_0276.xml", "product_code":"dli", - "code":"276", + "code":"275", "des":"In daily big data analysis work, it is important to allocate and manage compute resources properly to provide a good job execution environment.You can allocate resources ", "doc_type":"usermanual", "kw":"How Can I Check the Actual and Used CUs for an Elastic Resource Pool as Well as the Required CUs for", @@ -5708,7 +5176,7 @@ "uri":"dli_03_0022.html", "node_id":"dli_03_0022.xml", "product_code":"dli", - "code":"277", + "code":"276", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Datasource Connections", @@ -5716,10 +5184,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Datasource Connections", @@ -5729,7 +5194,7 @@ "uri":"dli_03_0110.html", "node_id":"dli_03_0110.xml", "product_code":"dli", - "code":"278", + "code":"277", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Datasource Connections", @@ -5737,10 +5202,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Datasource Connections", @@ -5750,7 +5212,7 @@ "uri":"dli_03_0128.html", "node_id":"dli_03_0128.xml", "product_code":"dli", - "code":"279", + "code":"278", "des":"You need to create a VPC peering connection to enable network connectivity. Take MRS as an example. If DLI and MRS clusters are in the same VPC, and the security group is", "doc_type":"usermanual", "kw":"Why Do I Need to Create a VPC Peering Connection for an Enhanced Datasource Connection?,Datasource C", @@ -5758,10 +5220,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Do I Need to Create a VPC Peering Connection for an Enhanced Datasource Connection?", @@ -5771,7 +5230,7 @@ "uri":"dli_03_0237.html", "node_id":"dli_03_0237.xml", "product_code":"dli", - "code":"280", + "code":"279", "des":"An enhanced datasource connection failed to pass the network connectivity test. Datasource connection cannot be bound to a queue. The following error information is displ", "doc_type":"usermanual", "kw":"Failed to Bind a Queue to an Enhanced Datasource Connection,Datasource Connections,User Guide", @@ -5779,10 +5238,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Failed to Bind a Queue to an Enhanced Datasource Connection", @@ -5792,7 +5248,7 @@ "uri":"dli_03_0238.html", "node_id":"dli_03_0238.xml", "product_code":"dli", - "code":"281", + "code":"280", "des":"The outbound rule had been configured for the security group of the queue associated with the enhanced datasource connection. The datasource authentication used a passwor", "doc_type":"usermanual", "kw":"DLI Failed to Connect to GaussDB(DWS) Through an Enhanced Datasource Connection,Datasource Connectio", @@ -5800,10 +5256,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"DLI Failed to Connect to GaussDB(DWS) Through an Enhanced Datasource Connection", @@ -5813,7 +5266,7 @@ "uri":"dli_03_0179.html", "node_id":"dli_03_0179.xml", "product_code":"dli", - "code":"282", + "code":"281", "des":"A datasource connection is created and bound to a queue. The connectivity test fails and the following error information is displayed:failed to connect to specified addre", "doc_type":"usermanual", "kw":"How Do I Do if the Datasource Connection Is Created But the Network Connectivity Test Fails?,Datasou", @@ -5821,10 +5274,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Do if the Datasource Connection Is Created But the Network Connectivity Test Fails?", @@ -5834,7 +5284,7 @@ "uri":"dli_03_0186.html", "node_id":"dli_03_0186.xml", "product_code":"dli", - "code":"283", + "code":"282", "des":"Configuring the Connection Between a DLI Queue and a Data Source in a Private NetworkIf your DLI job needs to connect to a data source, for example, MRS, RDS, CSS, Kafka,", "doc_type":"usermanual", "kw":"How Do I Configure the Network Between a DLI Queue and a Data Source?,Datasource Connections,User Gu", @@ -5842,10 +5292,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Configure the Network Between a DLI Queue and a Data Source?", @@ -5855,7 +5302,7 @@ "uri":"dli_03_0257.html", "node_id":"dli_03_0257.xml", "product_code":"dli", - "code":"284", + "code":"283", "des":"The possible causes and solutions are as follows:If you have created a queue, do not bind it to a datasource connection immediately. Wait for 5 to 10 minutes. After the c", "doc_type":"usermanual", "kw":"What Can I Do If a Datasource Connection Is Stuck in Creating State When I Try to Bind a Queue to It", @@ -5863,10 +5310,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"What Can I Do If a Datasource Connection Is Stuck in Creating State When I Try to Bind a Queue to It?", @@ -5876,7 +5320,7 @@ "uri":"dli_03_0259.html", "node_id":"dli_03_0259.xml", "product_code":"dli", - "code":"285", + "code":"284", "des":"DLI enhanced datasource connection uses VPC peering to directly connect the VPC networks of the desired data sources for point-to-point data exchanges.", "doc_type":"usermanual", "kw":"How Do I Connect DLI to Data Sources?,Datasource Connections,User Guide", @@ -5884,10 +5328,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Connect DLI to Data Sources?", @@ -5897,7 +5338,7 @@ "uri":"dli_03_0112.html", "node_id":"dli_03_0112.xml", "product_code":"dli", - "code":"286", + "code":"285", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Cross-Source Analysis", @@ -5905,10 +5346,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Cross-Source Analysis", @@ -5918,7 +5356,7 @@ "uri":"dli_03_0011.html", "node_id":"dli_03_0011.xml", "product_code":"dli", - "code":"287", + "code":"286", "des":"To perform query on data stored on services rather than DLI, perform the following steps:Assume that the data to be queried is stored on multiple services (for example, O", "doc_type":"usermanual", "kw":"How Can I Perform Query on Data Stored on Services Rather Than DLI?,Cross-Source Analysis,User Guide", @@ -5926,10 +5364,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Can I Perform Query on Data Stored on Services Rather Than DLI?", @@ -5939,7 +5374,7 @@ "uri":"dli_03_0085.html", "node_id":"dli_03_0085.xml", "product_code":"dli", - "code":"288", + "code":"287", "des":"Connect VPCs in different regions.Create an enhanced datasource connection on DLI and bind it to a queue.Add a DLI route.", "doc_type":"usermanual", "kw":"How Can I Access Data Across Regions?,Cross-Source Analysis,User Guide", @@ -5947,10 +5382,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Can I Access Data Across Regions?", @@ -5960,7 +5392,7 @@ "uri":"dli_03_0028.html", "node_id":"dli_03_0028.xml", "product_code":"dli", - "code":"289", + "code":"288", "des":"When data is inserted into DLI, set the ID field to NULL.", "doc_type":"usermanual", "kw":"How Do I Set the Auto-increment Primary Key or Other Fields That Are Automatically Filled in the RDS", @@ -5968,10 +5400,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Set the Auto-increment Primary Key or Other Fields That Are Automatically Filled in the RDS Table When Creating a DLI and Associating It with the RDS Table?", @@ -5981,7 +5410,7 @@ "uri":"dli_03_0256.html", "node_id":"dli_03_0256.xml", "product_code":"dli", - "code":"290", + "code":"289", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Datasource Connection O&M", @@ -5989,10 +5418,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Datasource Connection O&M", @@ -6002,7 +5428,7 @@ "uri":"dli_03_0047.html", "node_id":"dli_03_0047.xml", "product_code":"dli", - "code":"291", + "code":"290", "des":"Possible CausesThe network connectivity is abnormal. Check whether the security group is correctly selected and whether the VPC is correctly configured.The network connec", "doc_type":"usermanual", "kw":"Why Is the Error Message \"communication link failure\" Displayed When I Use a Newly Activated Datasou", @@ -6010,10 +5436,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is the Error Message \"communication link failure\" Displayed When I Use a Newly Activated Datasource Connection?", @@ -6023,7 +5446,7 @@ "uri":"dli_03_0080.html", "node_id":"dli_03_0080.xml", "product_code":"dli", - "code":"292", + "code":"291", "des":"The cluster host information is not added to the datasource connection. As a result, the KRB authentication fails, the connection times out, and no error is recorded in l", "doc_type":"usermanual", "kw":"Connection Times Out During MRS HBase Datasource Connection, and No Error Is Recorded in Logs,Dataso", @@ -6031,10 +5454,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Connection Times Out During MRS HBase Datasource Connection, and No Error Is Recorded in Logs", @@ -6044,7 +5464,7 @@ "uri":"dli_03_0111.html", "node_id":"dli_03_0111.xml", "product_code":"dli", - "code":"293", + "code":"292", "des":"When you create a VPC peering connection for the datasource connection, the following error information is displayed:Before you create a datasource connection, check whet", "doc_type":"usermanual", "kw":"Why Can't I Find the Subnet When Creating a DLI Datasource Connection?,Datasource Connection O&M,Use", @@ -6052,10 +5472,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Can't I Find the Subnet When Creating a DLI Datasource Connection?", @@ -6065,7 +5482,7 @@ "uri":"dli_03_0239.html", "node_id":"dli_03_0239.xml", "product_code":"dli", - "code":"294", + "code":"293", "des":"A datasource RDS table was created in the DataArts Studio, and the insert overwrite statement was executed to write data into RDS. DLI.0999: BatchUpdateException: Incorre", "doc_type":"usermanual", "kw":"Error Message \"Incorrect string value\" Is Displayed When insert overwrite Is Executed on a Datasourc", @@ -6073,10 +5490,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Error Message \"Incorrect string value\" Is Displayed When insert overwrite Is Executed on a Datasource RDS Table", @@ -6086,7 +5500,7 @@ "uri":"dli_03_0250.html", "node_id":"dli_03_0250.xml", "product_code":"dli", - "code":"295", + "code":"294", "des":"The system failed to create a datasource RDS table, and null pointer error was reported.The following table creation statement was used:The RDS database is in a PostGre c", "doc_type":"usermanual", "kw":"Null Pointer Error Is Displayed When the System Creates a Datasource RDS Table,Datasource Connection", @@ -6094,10 +5508,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Null Pointer Error Is Displayed When the System Creates a Datasource RDS Table", @@ -6107,7 +5518,7 @@ "uri":"dli_03_0251.html", "node_id":"dli_03_0251.xml", "product_code":"dli", - "code":"296", + "code":"295", "des":"The system failed to execute insert overwrite on the datasource GaussDB(DWS) table, and org.postgresql.util.PSQLException: ERROR: tuple concurrently updated was displayed", "doc_type":"usermanual", "kw":"Error Message \"org.postgresql.util.PSQLException: ERROR: tuple concurrently updated\" Is Displayed Wh", @@ -6115,10 +5526,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Error Message \"org.postgresql.util.PSQLException: ERROR: tuple concurrently updated\" Is Displayed When the System Executes insert overwrite on a Datasource GaussDB(DWS) Table", @@ -6128,7 +5536,7 @@ "uri":"dli_03_0252.html", "node_id":"dli_03_0252.xml", "product_code":"dli", - "code":"297", + "code":"296", "des":"A datasource table was used to import data to a CloudTable HBase table. This HBase table contains a column family and a rowkey for 100 million simulating data records. Th", "doc_type":"usermanual", "kw":"RegionTooBusyException Is Reported When Data Is Imported to a CloudTable HBase Table Through a Datas", @@ -6136,10 +5544,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"RegionTooBusyException Is Reported When Data Is Imported to a CloudTable HBase Table Through a Datasource Table", @@ -6149,7 +5554,7 @@ "uri":"dli_03_0253.html", "node_id":"dli_03_0253.xml", "product_code":"dli", - "code":"298", + "code":"297", "des":"A table was created on GaussDB(DWS) and then a datasource connection was created on DLI to read and write data. An error message was displayed during data writing, indica", "doc_type":"usermanual", "kw":"A Null Value Is Written Into a Non-Null Field When a DLI Datasource Connection Is Used to Connect to", @@ -6157,10 +5562,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"A Null Value Is Written Into a Non-Null Field When a DLI Datasource Connection Is Used to Connect to a GaussDB(DWS) Table", @@ -6170,7 +5572,7 @@ "uri":"dli_03_0254.html", "node_id":"dli_03_0254.xml", "product_code":"dli", - "code":"299", + "code":"298", "des":"A datasource GaussDB(DWS) table and the datasource connection were created in DLI, and the schema of the source table in GaussDB(DWS) were updated. During the job executi", "doc_type":"usermanual", "kw":"An Insert Operation Failed After the Schema of the GaussDB(DWS) Source Table Is Updated,Datasource C", @@ -6178,10 +5580,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"An Insert Operation Failed After the Schema of the GaussDB(DWS) Source Table Is Updated", @@ -6191,7 +5590,7 @@ "uri":"dli_03_0056.html", "node_id":"dli_03_0056.xml", "product_code":"dli", - "code":"300", + "code":"299", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"APIs", @@ -6199,10 +5598,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"APIs", @@ -6212,7 +5608,7 @@ "uri":"dli_03_0060.html", "node_id":"dli_03_0060.xml", "product_code":"dli", - "code":"301", + "code":"300", "des":"In the REST API provided by DLI, the request header can be added to the request URI, for example, Content-Type.Content-Type indicates the request body type or format. The", "doc_type":"usermanual", "kw":"Why Is Error \"unsupported media Type\" Reported When I Subimt a SQL Job?,APIs,User Guide", @@ -6220,10 +5616,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"unsupported media Type\" Reported When I Subimt a SQL Job?", @@ -6233,7 +5626,7 @@ "uri":"dli_03_0125.html", "node_id":"dli_03_0125.xml", "product_code":"dli", - "code":"302", + "code":"301", "des":"When different IAM users call an API under the same enterprise project in the same region, the project ID is the same.", "doc_type":"usermanual", "kw":"Is the Project ID Fixed when Different IAM Users Call an API?,APIs,User Guide", @@ -6241,10 +5634,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Is the Project ID Fixed when Different IAM Users Call an API?", @@ -6254,7 +5644,7 @@ "uri":"dli_03_0178.html", "node_id":"dli_03_0178.xml", "product_code":"dli", - "code":"303", + "code":"302", "des":"When the API call for submitting a SQL job times out, and the following error information is displayed:There are currently no resources tracked in the state, so there is ", "doc_type":"usermanual", "kw":"What Can I Do If an Error Is Reported When the Execution of the API for Creating a SQL Job Times Out", @@ -6262,10 +5652,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"What Can I Do If an Error Is Reported When the Execution of the API for Creating a SQL Job Times Out?", @@ -6275,7 +5662,7 @@ "uri":"dli_03_0058.html", "node_id":"dli_03_0058.xml", "product_code":"dli", - "code":"304", + "code":"303", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"SDKs", @@ -6283,10 +5670,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"SDKs", @@ -6296,7 +5680,7 @@ "uri":"dli_03_0073.html", "node_id":"dli_03_0073.xml", "product_code":"dli", - "code":"305", + "code":"304", "des":"When you query the SQL job results using SDK, the system checks the job status when the job is submitted. The timeout interval set in the system is 300s. If the job is no", "doc_type":"usermanual", "kw":"How Do I Set the Timeout Duration for Querying SQL Job Results Using SDK?,SDKs,User Guide", @@ -6304,10 +5688,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Set the Timeout Duration for Querying SQL Job Results Using SDK?", @@ -6317,7 +5698,7 @@ "uri":"dli_03_0255.html", "node_id":"dli_03_0255.xml", "product_code":"dli", - "code":"306", + "code":"305", "des":"Run the ping command to check whether dli.xxx can be accessed.If dli.xxx can be accessed, check whether DNS resolution is correctly configured.If dli.xxx can be accessed,", "doc_type":"usermanual", "kw":"How Do I Handle the dli.xxx,unable to resolve host address Error?,SDKs,User Guide", @@ -6325,10 +5706,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Handle the dli.xxx,unable to resolve host address Error?", @@ -6338,7 +5716,7 @@ "uri":"dli_01_00006.html", "node_id":"dli_01_00006.xml", "product_code":"dli", - "code":"307", + "code":"306", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Change History,User Guide", diff --git a/docs/dli/umn/CLASS.TXT.json b/docs/dli/umn/CLASS.TXT.json index 082ba8e3..8f7006d1 100644 --- a/docs/dli/umn/CLASS.TXT.json +++ b/docs/dli/umn/CLASS.TXT.json @@ -38,7 +38,7 @@ { "desc":"Only the latest 100 jobs are displayed on DLI's SparkUI.A maximum of 1,000 job results can be displayed on the console. To view more or all jobs, export the job data to O", "product_code":"dli", - "title":"Constraints and Limitations", + "title":"Notes and Constraints", "uri":"dli_07_0005.html", "doc_type":"usermanual", "p_code":"1", @@ -362,7 +362,7 @@ { "desc":"Elastic scaling can be performed for a newly created queue only when there were jobs running in this queue.Queues with 16 CUs do not support scale-out or scale-in.Queues ", "product_code":"dli", - "title":"Elastic Queue Scaling", + "title":"Elastic Scaling of Queues", "uri":"dli_01_0487.html", "doc_type":"usermanual", "p_code":"34", @@ -522,7 +522,7 @@ "code":"58" }, { - "desc":"If CUs of a yearly/monthly elastic resource pool cannot meet your service requirements, you can modify the CUs. In this case, you will be charged based on the number of C", + "desc":"If the current specifications of your elastic resource pool do not meet your service needs, you can modify them using the change specifications function.In the navigation", "product_code":"dli", "title":"Modifying Specifications", "uri":"dli_01_0524.html", @@ -827,6 +827,15 @@ "p_code":"89", "code":"92" }, + { + "desc":"VPC sharing allows sharing VPC resources created in one account with other accounts using Resource Access Manager (RAM). For example, account A can share its VPC and subn", + "product_code":"dli", + "title":"Establishing a Network Connection Between DLI and Resources in a Shared VPC", + "uri":"dli_01_0624.html", + "doc_type":"usermanual", + "p_code":"89", + "code":"93" + }, { "desc":"Delete an enhanced datasource connection that is no longer used on the console.Log in to the DLI management console.In the left navigation pane, choose Datasource Connect", "product_code":"dli", @@ -834,16 +843,16 @@ "uri":"dli_01_0553.html", "doc_type":"usermanual", "p_code":"89", - "code":"93" + "code":"94" }, { "desc":"Host information is the mapping between host IP addresses and domain names. After you configure host information, jobs can only use the configured domain names to access ", "product_code":"dli", - "title":"Modifying Host Information", + "title":"Modifying Host Information in an Elastic Resource Pool", "uri":"dli_01_0013.html", "doc_type":"usermanual", "p_code":"89", - "code":"94" + "code":"95" }, { "desc":"The CIDR block of the DLI queue that is bound with a datasource connection cannot overlap with that of the data source.The default queue cannot be bound with a connection", @@ -852,7 +861,7 @@ "uri":"dli_01_0514.html", "doc_type":"usermanual", "p_code":"89", - "code":"95" + "code":"96" }, { "desc":"A route is configured with the destination, next hop type, and next hop to determine where the network traffic is directed. Routes are classified into system routes and c", @@ -861,7 +870,7 @@ "uri":"dli_01_0014.html", "doc_type":"usermanual", "p_code":"89", - "code":"96" + "code":"97" }, { "desc":"Delete a route that is no longer used.A custom route table cannot be deleted if it is associated with a subnet.Log in to the DLI management console.In the left navigation", @@ -870,7 +879,7 @@ "uri":"dli_01_0556.html", "doc_type":"usermanual", "p_code":"89", - "code":"97" + "code":"98" }, { "desc":"Enhanced connections support user authorization by project. After authorization, users in the project have the permission to perform operations on the enhanced connection", @@ -879,7 +888,7 @@ "uri":"dli_01_0018.html", "doc_type":"usermanual", "p_code":"89", - "code":"98" + "code":"99" }, { "desc":"A tag is a key-value pair customized by users and used to identify cloud resources. It helps users to classify and search for cloud resources. A tag consists of a tag key", @@ -888,7 +897,7 @@ "uri":"dli_01_0019.html", "doc_type":"usermanual", "p_code":"89", - "code":"99" + "code":"100" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -897,7 +906,7 @@ "uri":"dli_01_0422.html", "doc_type":"usermanual", "p_code":"", - "code":"100" + "code":"101" }, { "desc":"When analyzing across multiple sources, it is not recommended to configure authentication information directly in a job as it can lead to password leakage. Instead, you a", @@ -905,8 +914,8 @@ "title":"Overview", "uri":"dli_01_0561.html", "doc_type":"usermanual", - "p_code":"100", - "code":"101" + "p_code":"101", + "code":"102" }, { "desc":"Create a CSS datasource authentication on the DLI console to store the authentication information of the CSS security cluster to DLI. This will allow you to access to the", @@ -914,8 +923,8 @@ "title":"Creating a CSS Datasource Authentication", "uri":"dli_01_0427.html", "doc_type":"usermanual", - "p_code":"100", - "code":"102" + "p_code":"101", + "code":"103" }, { "desc":"Create a Kerberos datasource authentication on the DLI console to store the authentication information of the data source to DLI. This will allow you to access to the dat", @@ -923,8 +932,8 @@ "title":"Creating a Kerberos Datasource Authentication", "uri":"dli_01_0558.html", "doc_type":"usermanual", - "p_code":"100", - "code":"103" + "p_code":"101", + "code":"104" }, { "desc":"Create a Kafka_SSL datasource authentication on the DLI console to store the Kafka authentication information to DLI. This will allow you to access to Kafka instances wit", @@ -932,8 +941,8 @@ "title":"Creating a Kafka_SSL Datasource Authentication", "uri":"dli_01_0560.html", "doc_type":"usermanual", - "p_code":"100", - "code":"104" + "p_code":"101", + "code":"105" }, { "desc":"Create a password datasource authentication on the DLI console to store passwords of the GaussDB(DWS), RDS, DCS, and DDS data sources to DLI. This will allow you to acces", @@ -941,8 +950,8 @@ "title":"Creating a Password Datasource Authentication", "uri":"dli_01_0559.html", "doc_type":"usermanual", - "p_code":"100", - "code":"105" + "p_code":"101", + "code":"106" }, { "desc":"Grant permissions on a datasource authentication to users so multiple user jobs can use the datasource authentication without affecting each other.The administrator and t", @@ -950,8 +959,8 @@ "title":"Datasource Authentication Permission Management", "uri":"dli_01_0480.html", "doc_type":"usermanual", - "p_code":"100", - "code":"106" + "p_code":"101", + "code":"107" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -960,7 +969,7 @@ "uri":"dli_01_0485.html", "doc_type":"usermanual", "p_code":"", - "code":"107" + "code":"108" }, { "desc":"DLI allows you to set variables that are frequently used during job development as global variables on the DLI management console. This avoids repeated definitions during", @@ -968,8 +977,8 @@ "title":"Global Variables", "uri":"dli_01_0476.html", "doc_type":"usermanual", - "p_code":"107", - "code":"108" + "p_code":"108", + "code":"109" }, { "desc":"You can grant permissions on a global variable to users.The administrator and the global variable owner have all permissions. You do not need to set permissions for them,", @@ -977,8 +986,8 @@ "title":"Permission Management for Global Variables", "uri":"dli_01_0533.html", "doc_type":"usermanual", - "p_code":"107", - "code":"109" + "p_code":"108", + "code":"110" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -987,7 +996,7 @@ "uri":"dli_01_0408.html", "doc_type":"usermanual", "p_code":"", - "code":"110" + "code":"111" }, { "desc":"DLI has a comprehensive permission control mechanism and supports fine-grained authentication through Identity and Access Management (IAM). You can create policies in IAM", @@ -995,8 +1004,8 @@ "title":"Overview", "uri":"dli_01_0440.html", "doc_type":"usermanual", - "p_code":"110", - "code":"111" + "p_code":"111", + "code":"112" }, { "desc":"You can use Identity and Access Management (IAM) to implement fine-grained permissions control on DLI resources. For details, see Overview.If your cloud account does not ", @@ -1004,8 +1013,8 @@ "title":"Creating an IAM User and Granting Permissions", "uri":"dli_01_0418.html", "doc_type":"usermanual", - "p_code":"110", - "code":"112" + "p_code":"111", + "code":"113" }, { "desc":"Custom policies can be created as a supplement to the system policies of DLI. You can add actions to custom policies. For the actions supported for custom policies, see \"", @@ -1013,8 +1022,8 @@ "title":"Creating a Custom Policy", "uri":"dli_01_0451.html", "doc_type":"usermanual", - "p_code":"110", - "code":"113" + "p_code":"111", + "code":"114" }, { "desc":"A resource is an object that exists within a service. You can select DLI resources by specifying their paths.", @@ -1022,8 +1031,8 @@ "title":"DLI Resources", "uri":"dli_01_0417.html", "doc_type":"usermanual", - "p_code":"110", - "code":"114" + "p_code":"111", + "code":"115" }, { "desc":"Request conditions are useful in determining when a custom policy takes effect. A request condition consists of a condition key and operator. Condition keys are either gl", @@ -1031,8 +1040,8 @@ "title":"DLI Request Conditions", "uri":"dli_01_0475.html", "doc_type":"usermanual", - "p_code":"110", - "code":"115" + "p_code":"111", + "code":"116" }, { "desc":"Table 1 lists the common operations supported by each system policy of DLI. Choose proper system policies according to this table. For details about the SQL statement per", @@ -1040,8 +1049,8 @@ "title":"Common Operations Supported by DLI System Policy", "uri":"dli_01_0441.html", "doc_type":"usermanual", - "p_code":"110", - "code":"116" + "p_code":"111", + "code":"117" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1050,16 +1059,16 @@ "uri":"dli_01_0513.html", "doc_type":"usermanual", "p_code":"", - "code":"117" + "code":"118" }, { - "desc":"On the DLI management console, you can import data stored on OBS to DLI tables from Data Management > Databases and Tables > Table Management and SQL Editor pages. For de", + "desc":"On the DLI management console, you can import data stored in OBS into DLI tables.To import OBS data to a DLI table, either choose Data Management > Databases and Tables i", "product_code":"dli", "title":"Importing Data to a DLI Table", "uri":"dli_01_0420.html", "doc_type":"usermanual", - "p_code":"117", - "code":"118" + "p_code":"118", + "code":"119" }, { "desc":"This section describes metrics reported by DLI to Cloud Eye as well as their namespaces and dimensions. You can use the management console or APIs provided by Cloud Eye t", @@ -1067,8 +1076,8 @@ "title":"Viewing Monitoring Metrics", "uri":"dli_01_0445.html", "doc_type":"usermanual", - "p_code":"117", - "code":"119" + "p_code":"118", + "code":"120" }, { "desc":"With CTS, you can record operations associated with DLI for later query, audit, and backtrack operations.", @@ -1076,8 +1085,8 @@ "title":"DLI Operations That Can Be Recorded by CTS", "uri":"dli_01_0318.html", "doc_type":"usermanual", - "p_code":"117", - "code":"120" + "p_code":"118", + "code":"121" }, { "desc":"A quota limits the quantity of a resource available to users, thereby preventing spikes in the usage of the resource.You can also request for an increased quota if your e", @@ -1085,8 +1094,8 @@ "title":"Quota Management", "uri":"dli_01_0550.html", "doc_type":"usermanual", - "p_code":"117", - "code":"121" + "p_code":"118", + "code":"122" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1095,7 +1104,7 @@ "uri":"dli_01_0539.html", "doc_type":"usermanual", "p_code":"", - "code":"122" + "code":"123" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1103,8 +1112,8 @@ "title":"Flink Jobs", "uri":"dli_03_0037.html", "doc_type":"usermanual", - "p_code":"122", - "code":"123" + "p_code":"123", + "code":"124" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1112,8 +1121,8 @@ "title":"Usage", "uri":"dli_03_0137.html", "doc_type":"usermanual", - "p_code":"123", - "code":"124" + "p_code":"124", + "code":"125" }, { "desc":"DLI Flink jobs support the following data formats:Avro, Avro_merge, BLOB, CSV, EMAIL, JSON, ORC, Parquet, and XML.DLI Flink jobs support data from the following data sour", @@ -1121,8 +1130,8 @@ "title":"What Data Formats and Data Sources Are Supported by DLI Flink Jobs?", "uri":"dli_03_0083.html", "doc_type":"usermanual", - "p_code":"124", - "code":"125" + "p_code":"125", + "code":"126" }, { "desc":"A sub-user can view queues but cannot view Flink jobs. You can authorize the sub-user using DLI or IAM.Authorization on DLILog in to the DLI console using a tenant accoun", @@ -1130,8 +1139,8 @@ "title":"How Do I Authorize a Subuser to View Flink Jobs?", "uri":"dli_03_0139.html", "doc_type":"usermanual", - "p_code":"124", - "code":"126" + "p_code":"125", + "code":"127" }, { "desc":"DLI Flink jobs are highly available. You can enable the automatic restart function to automatically restart your jobs after short-time faults of peripheral services are r", @@ -1139,8 +1148,8 @@ "title":"How Do I Set Auto Restart upon Exception for a Flink Job?", "uri":"dli_03_0090.html", "doc_type":"usermanual", - "p_code":"124", - "code":"127" + "p_code":"125", + "code":"128" }, { "desc":"When you create a Flink SQL job or Flink Jar job, you can select Save Job Log on the job editing page to save job running logs to OBS.To set the OBS bucket for storing th", @@ -1148,8 +1157,8 @@ "title":"How Do I Save Flink Job Logs?", "uri":"dli_03_0099.html", "doc_type":"usermanual", - "p_code":"124", - "code":"128" + "p_code":"125", + "code":"129" }, { "desc":"DLI can output Flink job results to DIS. You can view the results in DIS. For details, see \"Obtaining Data from DIS\" in Data Ingestion Service User Guide.DLI can output F", @@ -1157,8 +1166,8 @@ "title":"How Can I Check Flink Job Results?", "uri":"dli_03_0043.html", "doc_type":"usermanual", - "p_code":"124", - "code":"129" + "p_code":"125", + "code":"130" }, { "desc":"Choose Job Management > Flink Jobs. In the Operation column of the target job, choose More > Permissions. When a new user is authorized, No such user. userName:xxxx. is d", @@ -1166,8 +1175,8 @@ "title":"Why Is Error \"No such user. userName:xxxx.\" Reported on the Flink Job Management Page When I Grant Permission to a User?", "uri":"dli_03_0160.html", "doc_type":"usermanual", - "p_code":"124", - "code":"130" + "p_code":"125", + "code":"131" }, { "desc":"Checkpoint was enabled when a Flink job is created, and the OBS bucket for storing checkpoints was specified. After a Flink job is manually stopped, no message is display", @@ -1175,8 +1184,8 @@ "title":"How Do I Know Which Checkpoint the Flink Job I Stopped Will Be Restored to When I Start the Job Again?", "uri":"dli_03_0180.html", "doc_type":"usermanual", - "p_code":"124", - "code":"131" + "p_code":"125", + "code":"132" }, { "desc":"When you set running parameters of a DLI Flink job, you can enable Alarm Generation upon Job Exception to receive alarms when the job runs abnormally or is in arrears.If ", @@ -1184,8 +1193,8 @@ "title":"Why Is a Message Displayed Indicating That the SMN Topic Does Not Exist When I Use the SMN Topic in DLI?", "uri":"dli_03_0036.html", "doc_type":"usermanual", - "p_code":"124", - "code":"132" + "p_code":"125", + "code":"133" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1193,8 +1202,8 @@ "title":"Flink SQL", "uri":"dli_03_0131.html", "doc_type":"usermanual", - "p_code":"123", - "code":"133" + "p_code":"124", + "code":"134" }, { "desc":"The consumption capability of a Flink SQL job depends on the data source transmission, queue size, and job parameter settings. The peak consumption is 10 Mbit/s.", @@ -1202,8 +1211,8 @@ "title":"How Much Data Can Be Processed in a Day by a Flink SQL Job?", "uri":"dli_03_0130.html", "doc_type":"usermanual", - "p_code":"133", - "code":"134" + "p_code":"134", + "code":"135" }, { "desc":"The temp stream in Flink SQL is similar to a subquery. It is a logical stream used to simplify the SQL logic and does not generate data storage. Therefore, there is no ne", @@ -1211,17 +1220,17 @@ "title":"Does Data in the Temporary Stream of Flink SQL Need to Be Cleared Periodically? How Do I Clear the Data?", "uri":"dli_03_0061.html", "doc_type":"usermanual", - "p_code":"133", - "code":"135" + "p_code":"134", + "code":"136" }, { - "desc":"SymptomWhen you create a Flink SQL job and configure the parameters, you select an OBS bucket you have created. The system displays a message indicating that the OBS buck", + "desc":"When you create a Flink SQL job and configure the parameters, you select an OBS bucket you have created. The system displays a message indicating that the OBS bucket is n", "product_code":"dli", "title":"Why Is a Message Displayed Indicating That the OBS Bucket Is Not Authorized When I Select an OBS Bucket for a Flink SQL Job?", "uri":"dli_03_0138.html", "doc_type":"usermanual", - "p_code":"133", - "code":"136" + "p_code":"134", + "code":"137" }, { "desc":"When using a Flink SQL job, you need to create an OBS partition table for subsequent batch processing.In the following example, the day field is used as the partition fie", @@ -1229,8 +1238,8 @@ "title":"How Do I Create an OBS Partitioned Table for a Flink SQL Job?", "uri":"dli_03_0089.html", "doc_type":"usermanual", - "p_code":"133", - "code":"137" + "p_code":"134", + "code":"138" }, { "desc":"In this example, the day field is used as the partition field with the parquet encoding format (only the parquet format is supported currently) to dump car_info data to O", @@ -1238,8 +1247,8 @@ "title":"How Do I Dump Data to OBS and Create an OBS Partitioned Table?", "uri":"dli_03_0075.html", "doc_type":"usermanual", - "p_code":"133", - "code":"138" + "p_code":"134", + "code":"139" }, { "desc":"When I run the creation statement with an EL expression in the table name in a Flink SQL job, the following error message is displayed:DLI.0005: AnalysisException: t_user", @@ -1247,8 +1256,8 @@ "title":"Why Is Error Message \"DLI.0005\" Displayed When I Use an EL Expression to Create a Table in a Flink SQL Job?", "uri":"dli_03_0167.html", "doc_type":"usermanual", - "p_code":"133", - "code":"139" + "p_code":"134", + "code":"140" }, { "desc":"After data is written to OBS through the Flink job output stream, data cannot be queried from the DLI table created in the OBS file path.For example, use the following Fl", @@ -1256,8 +1265,8 @@ "title":"Why Is No Data Queried in the DLI Table Created Using the OBS File Path When Data Is Written to OBS by a Flink Job Output Stream?", "uri":"dli_03_0168.html", "doc_type":"usermanual", - "p_code":"133", - "code":"140" + "p_code":"134", + "code":"141" }, { "desc":"After a Flink SQL job is submitted on DLI, the job fails to be executed. The following error information is displayed in the job log:connect to DIS failed java.lang.Illeg", @@ -1265,8 +1274,8 @@ "title":"Why Does a Flink SQL Job Fails to Be Executed, and Is \"connect to DIS failed java.lang.IllegalArgumentException: Access key cannot be null\" Displayed in the Log?", "uri":"dli_03_0174.html", "doc_type":"usermanual", - "p_code":"133", - "code":"141" + "p_code":"134", + "code":"142" }, { "desc":"Semantic verification for a Flink SQL job (reading DIS data) fails. The following information is displayed when the job fails:Get dis channel xxxinfo failed. error info: ", @@ -1274,8 +1283,8 @@ "title":"Why Is Error \"Not authorized\" Reported When a Flink SQL Job Reads DIS Data?", "uri":"dli_03_0176.html", "doc_type":"usermanual", - "p_code":"133", - "code":"142" + "p_code":"134", + "code":"143" }, { "desc":"After a Flink SQL job consumed Kafka and sent data to the Elasticsearch cluster, the job was successfully executed, but no data is available.Possible causes are as follow", @@ -1283,8 +1292,8 @@ "title":"Data Writing Fails After a Flink SQL Job Consumed Kafka and Sank Data to the Elasticsearch Cluster", "uri":"dli_03_0232.html", "doc_type":"usermanual", - "p_code":"133", - "code":"143" + "p_code":"134", + "code":"144" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1292,34 +1301,25 @@ "title":"Flink Jar Jobs", "uri":"dli_03_0132.html", "doc_type":"usermanual", - "p_code":"123", - "code":"144" - }, - { - "desc":"The procedure is as follows:Add the following code to the JAR file code of the Flink Jar job:// Configure the pom file on which the StreamExecutionEnvironment depends.\nSt", - "product_code":"dli", - "title":"How Do I Configure Checkpoints for Flink Jar Jobs and Save the Checkpoints to OBS?", - "uri":"dli_03_0038.html", - "doc_type":"usermanual", - "p_code":"144", + "p_code":"124", "code":"145" }, { - "desc":"Configuration files can be uploaded for user-defined jobs (JAR).Upload the configuration file to DLI through Package Management.In the Other Dependencies area of the Flin", + "desc":"You can upload configuration files for custom jobs (Jar).Upload the configuration file to DLI through Package Management.In the Other Dependencies area of the Flink Jar j", "product_code":"dli", - "title":"Does a Flink JAR Job Support Configuration File Upload? How Do I Upload a Configuration File?", + "title":"Can I Upload Configuration Files for Flink Jar Jobs?", "uri":"dli_03_0044.html", "doc_type":"usermanual", - "p_code":"144", + "p_code":"145", "code":"146" }, { "desc":"The dependency of your Flink job conflicts with a built-in dependency of the DLI Flink platform. As a result, the job submission fails.Delete your JAR file that is the sa", "product_code":"dli", - "title":"Why Does the Submission Fail Due to Flink JAR File Conflict?", + "title":"Why Does a Flink Jar Package Conflict Result in Submission Failure?", "uri":"dli_03_0119.html", "doc_type":"usermanual", - "p_code":"144", + "p_code":"145", "code":"147" }, { @@ -1328,7 +1328,7 @@ "title":"Why Does a Flink Jar Job Fail to Access GaussDB(DWS) and a Message Is Displayed Indicating Too Many Client Connections?", "uri":"dli_03_0161.html", "doc_type":"usermanual", - "p_code":"144", + "p_code":"145", "code":"148" }, { @@ -1337,7 +1337,7 @@ "title":"Why Is Error Message \"Authentication failed\" Displayed During Flink Jar Job Running?", "uri":"dli_03_0165.html", "doc_type":"usermanual", - "p_code":"144", + "p_code":"145", "code":"149" }, { @@ -1346,7 +1346,7 @@ "title":"Why Is Error Invalid OBS Bucket Name Reported After a Flink Job Submission Failed?", "uri":"dli_03_0233.html", "doc_type":"usermanual", - "p_code":"144", + "p_code":"145", "code":"150" }, { @@ -1355,7 +1355,7 @@ "title":"Why Does the Flink Submission Fail Due to Hadoop JAR File Conflict?", "uri":"dli_03_0234.html", "doc_type":"usermanual", - "p_code":"144", + "p_code":"145", "code":"151" }, { @@ -1364,7 +1364,7 @@ "title":"How Do I Connect a Flink jar Job to SASL_SSL?", "uri":"dli_03_0266.html", "doc_type":"usermanual", - "p_code":"144", + "p_code":"145", "code":"152" }, { @@ -1373,7 +1373,7 @@ "title":"Performance Tuning", "uri":"dli_03_0133.html", "doc_type":"usermanual", - "p_code":"123", + "p_code":"124", "code":"153" }, { @@ -1409,7 +1409,7 @@ "title":"O&M Guide", "uri":"dli_03_0135.html", "doc_type":"usermanual", - "p_code":"123", + "p_code":"124", "code":"157" }, { @@ -1499,7 +1499,7 @@ "title":"Problems Related to SQL Jobs", "uri":"dli_03_0020.html", "doc_type":"usermanual", - "p_code":"122", + "p_code":"123", "code":"167" }, { @@ -1755,9 +1755,9 @@ "code":"195" }, { - "desc":"If the execution of an SQL statement takes a long time, you need to access the Spark UI to check the execution status.If data skew occurs, the running time of a stage exc", + "desc":"Data skew is a common issue during the execution of SQL jobs. When data is unevenly distributed, some compute nodes process significantly more data than others, which can", "product_code":"dli", - "title":"How Do I Eliminate Data Skew by Configuring AE Parameters?", + "title":"How Do I Do When Data Skew Occurs During the Execution of a SQL Job?", "uri":"dli_03_0093.html", "doc_type":"usermanual", "p_code":"192", @@ -1895,7 +1895,7 @@ "title":"Problems Related to Spark Jobs", "uri":"dli_03_0021.html", "doc_type":"usermanual", - "p_code":"122", + "p_code":"123", "code":"211" }, { @@ -1935,7 +1935,7 @@ "code":"215" }, { - "desc":"Hard-coded or plaintext AK and SK pose significant security risks. To ensure security, encrypt your AK and SK, store them in configuration files or environment variables,", + "desc":"To obtain the AK/SK, set the parameters as follows:Create a SparkContext using code.val sc: SparkContext = new SparkContext()\nsc.hadoopConfiguration.set(\"fs.obs.access.ke", "product_code":"dli", "title":"How Do I Set the AK/SK for a Queue to Operate an OBS Table?", "uri":"dli_03_0017.html", @@ -2084,7 +2084,7 @@ "title":"Product Consultation", "uri":"dli_03_0001.html", "doc_type":"usermanual", - "p_code":"122", + "p_code":"123", "code":"232" }, { @@ -2096,15 +2096,6 @@ "p_code":"232", "code":"233" }, - { - "desc":"Data Lake Insight (DLI) is a serverless data processing and analysis service fully compatible with Apache Spark and Apache Flink ecosystems. It frees you from managing an", - "product_code":"dli", - "title":"What Is DLI?", - "uri":"dli_03_0002.html", - "doc_type":"usermanual", - "p_code":"233", - "code":"234" - }, { "desc":"DLI supports the following data formats:ParquetCSVORCJsonAvro", "product_code":"dli", @@ -2112,7 +2103,7 @@ "uri":"dli_03_0025.html", "doc_type":"usermanual", "p_code":"233", - "code":"235" + "code":"234" }, { "desc":"The Spark component of DLI is a fully managed service. You can only use the DLI Spark through its APIs. .The Spark component of MRS is built on the VM in an MRS cluster. ", @@ -2121,7 +2112,7 @@ "uri":"dli_03_0115.html", "doc_type":"usermanual", "p_code":"233", - "code":"236" + "code":"235" }, { "desc":"DLI data can be stored in either of the following:OBS: Data used by SQL jobs, Spark jobs, and Flink jobs can be stored in OBS, reducing storage costs.DLI: The column-base", @@ -2130,7 +2121,7 @@ "uri":"dli_03_0029.html", "doc_type":"usermanual", "p_code":"233", - "code":"237" + "code":"236" }, { "desc":"DLI tables store data within the DLI service, and you do not need to know the data storage path.OBS tables store data in your OBS buckets, and you need to manage the sour", @@ -2139,7 +2130,7 @@ "uri":"dli_03_0117.html", "doc_type":"usermanual", "p_code":"233", - "code":"238" + "code":"237" }, { "desc":"Currently, DLI supports analysis only on the data uploaded to the cloud. In scenarios where regular (for example, on a per day basis) one-off analysis on incremental data", @@ -2148,7 +2139,7 @@ "uri":"dli_03_0010.html", "doc_type":"usermanual", "p_code":"233", - "code":"239" + "code":"238" }, { "desc":"Data in the OBS bucket shared by IAM users under the same account can be imported. You cannot import data in the OBS bucket shared with other IAM account.", @@ -2157,7 +2148,7 @@ "uri":"dli_03_0129.html", "doc_type":"usermanual", "p_code":"233", - "code":"240" + "code":"239" }, { "desc":"Log in to the management console.Click in the upper left corner and select a region and a project.Click the My Quota icon in the upper right corner of the page.The Serv", @@ -2166,7 +2157,7 @@ "uri":"dli_03_0264.html", "doc_type":"usermanual", "p_code":"233", - "code":"241" + "code":"240" }, { "desc":"No, a global variable can only be used by the user who created it. Global variables can be used to simplify complex parameters. For example, long and difficult variables ", @@ -2175,7 +2166,7 @@ "uri":"dli_03_0263.html", "doc_type":"usermanual", "p_code":"233", - "code":"242" + "code":"241" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2184,7 +2175,7 @@ "uri":"dli_03_0222.html", "doc_type":"usermanual", "p_code":"232", - "code":"243" + "code":"242" }, { "desc":"If you are suggested to perform following operations to run a large number of DLI jobs:Group the DLI jobs by type, and run each group on a queue.Alternatively, create IAM", @@ -2192,8 +2183,8 @@ "title":"How Do I Manage Tens of Thousands of Jobs Running on DLI?", "uri":"dli_03_0126.html", "doc_type":"usermanual", - "p_code":"243", - "code":"244" + "p_code":"242", + "code":"243" }, { "desc":"The field names of tables that have been created cannot be changed.You can create a table, define new table fields, and migrate data from the old table to the new one.", @@ -2201,8 +2192,8 @@ "title":"How Do I Change the Name of a Field in a Created Table?", "uri":"dli_03_0162.html", "doc_type":"usermanual", - "p_code":"243", - "code":"245" + "p_code":"242", + "code":"244" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2211,7 +2202,7 @@ "uri":"dli_03_0261.html", "doc_type":"usermanual", "p_code":"232", - "code":"246" + "code":"245" }, { "desc":"No. The spark.acls.enable configuration item is not used in DLI. The Apache Spark command injection vulnerability (CVE-2022-33891) does not exist in DLI.", @@ -2219,8 +2210,8 @@ "title":"Does DLI Have the Apache Spark Command Injection Vulnerability (CVE-2022-33891)?", "uri":"dli_03_0260.html", "doc_type":"usermanual", - "p_code":"246", - "code":"247" + "p_code":"245", + "code":"246" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2228,8 +2219,8 @@ "title":"Quota", "uri":"dli_03_0053.html", "doc_type":"usermanual", - "p_code":"122", - "code":"248" + "p_code":"123", + "code":"247" }, { "desc":"Log in to the management console.Click in the upper left corner and select Region and Project.Click (the My Quotas icon) in the upper right corner.The Service Quota pag", @@ -2237,8 +2228,8 @@ "title":"How Do I View My Quotas?", "uri":"dli_03_0031.html", "doc_type":"usermanual", - "p_code":"248", - "code":"249" + "p_code":"247", + "code":"248" }, { "desc":"The system does not support online quota adjustment. To increase a resource quota, dial the hotline or send an email to the customer service. We will process your applica", @@ -2246,8 +2237,8 @@ "title":"How Do I Increase a Quota?", "uri":"dli_03_0032.html", "doc_type":"usermanual", - "p_code":"248", - "code":"250" + "p_code":"247", + "code":"249" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2255,8 +2246,8 @@ "title":"Permission", "uri":"dli_03_0054.html", "doc_type":"usermanual", - "p_code":"122", - "code":"251" + "p_code":"123", + "code":"250" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2264,8 +2255,8 @@ "title":"Usage", "uri":"dli_03_0223.html", "doc_type":"usermanual", - "p_code":"251", - "code":"252" + "p_code":"250", + "code":"251" }, { "desc":"DLI has a comprehensive permission control mechanism and supports fine-grained authentication through Identity and Access Management (IAM). You can create policies in IAM", @@ -2273,8 +2264,8 @@ "title":"How Do I Manage Fine-Grained DLI Permissions?", "uri":"dli_03_0100.html", "doc_type":"usermanual", - "p_code":"252", - "code":"253" + "p_code":"251", + "code":"252" }, { "desc":"You cannot perform permission-related operations on the partition column of a partitioned table.However, when you grant the permission of any non-partition column in a pa", @@ -2282,8 +2273,8 @@ "title":"What Is Column Permission Granting of a DLI Partition Table?", "uri":"dli_03_0008.html", "doc_type":"usermanual", - "p_code":"252", - "code":"254" + "p_code":"251", + "code":"253" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2291,8 +2282,8 @@ "title":"O&M Guide", "uri":"dli_03_0226.html", "doc_type":"usermanual", - "p_code":"251", - "code":"255" + "p_code":"250", + "code":"254" }, { "desc":"When you submit a job, a message is displayed indicating that the job fails to be submitted due to insufficient permission caused by arrears. In this case, you need to ch", @@ -2300,8 +2291,8 @@ "title":"Why Does My Account Have Insufficient Permissions Due to Arrears?", "uri":"dli_03_0140.html", "doc_type":"usermanual", - "p_code":"255", - "code":"256" + "p_code":"254", + "code":"255" }, { "desc":"When the user update an existing program package, the following error information is displayed:\"error_code\"*DLI.0003\",\"error_msg\":\"Permission denied for resource 'resourc", @@ -2309,8 +2300,8 @@ "title":"Why Does the System Display a Message Indicating Insufficient Permissions When I Update a Program Package?", "uri":"dli_03_0195.html", "doc_type":"usermanual", - "p_code":"255", - "code":"257" + "p_code":"254", + "code":"256" }, { "desc":"When the SQL query statement is executed, the system displays a message indicating that the user does not have the permission to query resources.Error information: DLI.00", @@ -2318,8 +2309,8 @@ "title":"Why Is Error \"DLI.0003: Permission denied for resource...\" Reported When I Run a SQL Statement?", "uri":"dli_03_0227.html", "doc_type":"usermanual", - "p_code":"255", - "code":"258" + "p_code":"254", + "code":"257" }, { "desc":"The table permission has been granted and verified. However, after a period of time, an error is reported indicating that the table query fails.There are two possible rea", @@ -2327,8 +2318,8 @@ "title":"Why Can't I Query Table Data After I've Been Granted Table Permissions?", "uri":"dli_03_0228.html", "doc_type":"usermanual", - "p_code":"255", - "code":"259" + "p_code":"254", + "code":"258" }, { "desc":"If a table inherits database permissions, you do not need to regrant the inherited permissions to the table.When you grant permissions on a table on the console:If you se", @@ -2336,8 +2327,8 @@ "title":"Will an Error Be Reported if the Inherited Permissions Are Regranted to a Table That Inherits Database Permissions?", "uri":"dli_03_0057.html", "doc_type":"usermanual", - "p_code":"255", - "code":"260" + "p_code":"254", + "code":"259" }, { "desc":"User A created Table1.User B created View1 based on Table1.After the Select Table permission on Table1 is granted to user C, user C fails to query View1.User B does not h", @@ -2345,8 +2336,8 @@ "title":"Why Can't I Query a View After I'm Granted the Select Table Permission on the View?", "uri":"dli_03_0067.html", "doc_type":"usermanual", - "p_code":"255", - "code":"261" + "p_code":"254", + "code":"260" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2354,8 +2345,8 @@ "title":"Queue", "uri":"dli_03_0049.html", "doc_type":"usermanual", - "p_code":"122", - "code":"262" + "p_code":"123", + "code":"261" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2363,8 +2354,8 @@ "title":"Usage", "uri":"dli_03_0229.html", "doc_type":"usermanual", - "p_code":"262", - "code":"263" + "p_code":"261", + "code":"262" }, { "desc":"Currently, you are not allowed to modify the description of a created queue. You can add the description when purchasing the queue.", @@ -2372,8 +2363,8 @@ "title":"Does the Description of a DLI Queue Can Be Modified?", "uri":"dli_03_0109.html", "doc_type":"usermanual", - "p_code":"263", - "code":"264" + "p_code":"262", + "code":"263" }, { "desc":"Deleting a queue does not cause table data loss in your database.", @@ -2381,8 +2372,8 @@ "title":"Will Table Data in My Database Be Lost If I Delete a Queue?", "uri":"dli_03_0166.html", "doc_type":"usermanual", - "p_code":"263", - "code":"265" + "p_code":"262", + "code":"264" }, { "desc":"You need to develop a mechanism to retry failed jobs. When a faulty queue is recovered, your application tries to submit the failed jobs to the queue again.", @@ -2390,8 +2381,8 @@ "title":"How Does DLI Ensure the Reliability of Spark Jobs When a Queue Is Abnormal?", "uri":"dli_03_0170.html", "doc_type":"usermanual", - "p_code":"263", - "code":"266" + "p_code":"262", + "code":"265" }, { "desc":"DLI allows you to subscribe to an SMN topic for failed jobs.Log in to the DLI console.In the navigation pane on the left, choose Queue Management.On the Queue Management ", @@ -2399,8 +2390,8 @@ "title":"How Do I Monitor Queue Exceptions?", "uri":"dli_03_0098.html", "doc_type":"usermanual", - "p_code":"263", - "code":"267" + "p_code":"262", + "code":"266" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2408,8 +2399,8 @@ "title":"O&M Guide", "uri":"dli_03_0230.html", "doc_type":"usermanual", - "p_code":"262", - "code":"268" + "p_code":"261", + "code":"267" }, { "desc":"To check the running status of the DLI queue and determine whether to run more jobs on that queue, you need to check the queue load.Search for Cloud Eye on the console.In", @@ -2417,8 +2408,8 @@ "title":"How Do I View DLI Queue Load?", "uri":"dli_03_0095.html", "doc_type":"usermanual", - "p_code":"268", - "code":"269" + "p_code":"267", + "code":"268" }, { "desc":"You need to check the large number of jobs in the Submitting and Running states on the queue.Use Cloud Eye to view jobs in different states on the queue. The procedure is", @@ -2426,8 +2417,8 @@ "title":"How Do I Determine Whether There Are Too Many Jobs in the Current Queue?", "uri":"dli_03_0183.html", "doc_type":"usermanual", - "p_code":"268", - "code":"270" + "p_code":"267", + "code":"269" }, { "desc":"Currently, DLI provides two types of queues, For SQL and For general use. SQL queues are used to run SQL jobs. General-use queues are compatible with Spark queues of earl", @@ -2435,8 +2426,8 @@ "title":"How Do I Switch an Earlier-Version Spark Queue to a General-Purpose Queue?", "uri":"dli_03_0065.html", "doc_type":"usermanual", - "p_code":"268", - "code":"271" + "p_code":"267", + "code":"270" }, { "desc":"DLI queues do not use resources or bandwidth when no job is running. In this case, the running status of DLI queues is not displayed on CES.", @@ -2444,8 +2435,8 @@ "title":"Why Cannot I View the Resource Running Status of DLI Queues on Cloud Eye?", "uri":"dli_03_0193.html", "doc_type":"usermanual", - "p_code":"268", - "code":"272" + "p_code":"267", + "code":"271" }, { "desc":"In DLI, 64 CU = 64 cores and 256 GB memory.In a Spark job, if the driver occupies 4 cores and 16 GB memory, the executor can occupy 60 cores and 240 GB memory.", @@ -2453,8 +2444,8 @@ "title":"How Do I Allocate Queue Resources for Running Spark Jobs If I Have Purchased 64 CUs?", "uri":"dli_03_0088.html", "doc_type":"usermanual", - "p_code":"268", - "code":"273" + "p_code":"267", + "code":"272" }, { "desc":"Queue plans create failed. The plan xxx target cu is out of quota is displayed when you create a scheduled scaling task.The CU quota of the current account is insufficien", @@ -2462,8 +2453,8 @@ "title":"Why Is Error \"Queue plans create failed. The plan xxx target cu is out of quota\" Reported When I Schedule CU Changes?", "uri":"dli_03_0159.html", "doc_type":"usermanual", - "p_code":"268", - "code":"274" + "p_code":"267", + "code":"273" }, { "desc":"After a SQL job was submitted to the default queue, the job runs abnormally. The job log reported that the execution timed out. The exception logs are as follows:[ERROR] ", @@ -2471,8 +2462,8 @@ "title":"Why Is a Timeout Exception Reported When a DLI SQL Statement Fails to Be Executed on the Default Queue?", "uri":"dli_03_0171.html", "doc_type":"usermanual", - "p_code":"268", - "code":"275" + "p_code":"267", + "code":"274" }, { "desc":"In daily big data analysis work, it is important to allocate and manage compute resources properly to provide a good job execution environment.You can allocate resources ", @@ -2480,8 +2471,8 @@ "title":"How Can I Check the Actual and Used CUs for an Elastic Resource Pool as Well as the Required CUs for a Job?", "uri":"dli_03_0276.html", "doc_type":"usermanual", - "p_code":"268", - "code":"276" + "p_code":"267", + "code":"275" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2489,8 +2480,8 @@ "title":"Datasource Connections", "uri":"dli_03_0022.html", "doc_type":"usermanual", - "p_code":"122", - "code":"277" + "p_code":"123", + "code":"276" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2498,8 +2489,8 @@ "title":"Datasource Connections", "uri":"dli_03_0110.html", "doc_type":"usermanual", - "p_code":"277", - "code":"278" + "p_code":"276", + "code":"277" }, { "desc":"You need to create a VPC peering connection to enable network connectivity. Take MRS as an example. If DLI and MRS clusters are in the same VPC, and the security group is", @@ -2507,8 +2498,8 @@ "title":"Why Do I Need to Create a VPC Peering Connection for an Enhanced Datasource Connection?", "uri":"dli_03_0128.html", "doc_type":"usermanual", - "p_code":"278", - "code":"279" + "p_code":"277", + "code":"278" }, { "desc":"An enhanced datasource connection failed to pass the network connectivity test. Datasource connection cannot be bound to a queue. The following error information is displ", @@ -2516,8 +2507,8 @@ "title":"Failed to Bind a Queue to an Enhanced Datasource Connection", "uri":"dli_03_0237.html", "doc_type":"usermanual", - "p_code":"278", - "code":"280" + "p_code":"277", + "code":"279" }, { "desc":"The outbound rule had been configured for the security group of the queue associated with the enhanced datasource connection. The datasource authentication used a passwor", @@ -2525,8 +2516,8 @@ "title":"DLI Failed to Connect to GaussDB(DWS) Through an Enhanced Datasource Connection", "uri":"dli_03_0238.html", "doc_type":"usermanual", - "p_code":"278", - "code":"281" + "p_code":"277", + "code":"280" }, { "desc":"A datasource connection is created and bound to a queue. The connectivity test fails and the following error information is displayed:failed to connect to specified addre", @@ -2534,8 +2525,8 @@ "title":"How Do I Do if the Datasource Connection Is Created But the Network Connectivity Test Fails?", "uri":"dli_03_0179.html", "doc_type":"usermanual", - "p_code":"278", - "code":"282" + "p_code":"277", + "code":"281" }, { "desc":"Configuring the Connection Between a DLI Queue and a Data Source in a Private NetworkIf your DLI job needs to connect to a data source, for example, MRS, RDS, CSS, Kafka,", @@ -2543,8 +2534,8 @@ "title":"How Do I Configure the Network Between a DLI Queue and a Data Source?", "uri":"dli_03_0186.html", "doc_type":"usermanual", - "p_code":"278", - "code":"283" + "p_code":"277", + "code":"282" }, { "desc":"The possible causes and solutions are as follows:If you have created a queue, do not bind it to a datasource connection immediately. Wait for 5 to 10 minutes. After the c", @@ -2552,8 +2543,8 @@ "title":"What Can I Do If a Datasource Connection Is Stuck in Creating State When I Try to Bind a Queue to It?", "uri":"dli_03_0257.html", "doc_type":"usermanual", - "p_code":"278", - "code":"284" + "p_code":"277", + "code":"283" }, { "desc":"DLI enhanced datasource connection uses VPC peering to directly connect the VPC networks of the desired data sources for point-to-point data exchanges.", @@ -2561,8 +2552,8 @@ "title":"How Do I Connect DLI to Data Sources?", "uri":"dli_03_0259.html", "doc_type":"usermanual", - "p_code":"278", - "code":"285" + "p_code":"277", + "code":"284" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2570,8 +2561,8 @@ "title":"Cross-Source Analysis", "uri":"dli_03_0112.html", "doc_type":"usermanual", - "p_code":"277", - "code":"286" + "p_code":"276", + "code":"285" }, { "desc":"To perform query on data stored on services rather than DLI, perform the following steps:Assume that the data to be queried is stored on multiple services (for example, O", @@ -2579,8 +2570,8 @@ "title":"How Can I Perform Query on Data Stored on Services Rather Than DLI?", "uri":"dli_03_0011.html", "doc_type":"usermanual", - "p_code":"286", - "code":"287" + "p_code":"285", + "code":"286" }, { "desc":"Connect VPCs in different regions.Create an enhanced datasource connection on DLI and bind it to a queue.Add a DLI route.", @@ -2588,8 +2579,8 @@ "title":"How Can I Access Data Across Regions?", "uri":"dli_03_0085.html", "doc_type":"usermanual", - "p_code":"286", - "code":"288" + "p_code":"285", + "code":"287" }, { "desc":"When data is inserted into DLI, set the ID field to NULL.", @@ -2597,8 +2588,8 @@ "title":"How Do I Set the Auto-increment Primary Key or Other Fields That Are Automatically Filled in the RDS Table When Creating a DLI and Associating It with the RDS Table?", "uri":"dli_03_0028.html", "doc_type":"usermanual", - "p_code":"286", - "code":"289" + "p_code":"285", + "code":"288" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2606,8 +2597,8 @@ "title":"Datasource Connection O&M", "uri":"dli_03_0256.html", "doc_type":"usermanual", - "p_code":"277", - "code":"290" + "p_code":"276", + "code":"289" }, { "desc":"Possible CausesThe network connectivity is abnormal. Check whether the security group is correctly selected and whether the VPC is correctly configured.The network connec", @@ -2615,8 +2606,8 @@ "title":"Why Is the Error Message \"communication link failure\" Displayed When I Use a Newly Activated Datasource Connection?", "uri":"dli_03_0047.html", "doc_type":"usermanual", - "p_code":"290", - "code":"291" + "p_code":"289", + "code":"290" }, { "desc":"The cluster host information is not added to the datasource connection. As a result, the KRB authentication fails, the connection times out, and no error is recorded in l", @@ -2624,8 +2615,8 @@ "title":"Connection Times Out During MRS HBase Datasource Connection, and No Error Is Recorded in Logs", "uri":"dli_03_0080.html", "doc_type":"usermanual", - "p_code":"290", - "code":"292" + "p_code":"289", + "code":"291" }, { "desc":"When you create a VPC peering connection for the datasource connection, the following error information is displayed:Before you create a datasource connection, check whet", @@ -2633,8 +2624,8 @@ "title":"Why Can't I Find the Subnet When Creating a DLI Datasource Connection?", "uri":"dli_03_0111.html", "doc_type":"usermanual", - "p_code":"290", - "code":"293" + "p_code":"289", + "code":"292" }, { "desc":"A datasource RDS table was created in the DataArts Studio, and the insert overwrite statement was executed to write data into RDS. DLI.0999: BatchUpdateException: Incorre", @@ -2642,8 +2633,8 @@ "title":"Error Message \"Incorrect string value\" Is Displayed When insert overwrite Is Executed on a Datasource RDS Table", "uri":"dli_03_0239.html", "doc_type":"usermanual", - "p_code":"290", - "code":"294" + "p_code":"289", + "code":"293" }, { "desc":"The system failed to create a datasource RDS table, and null pointer error was reported.The following table creation statement was used:The RDS database is in a PostGre c", @@ -2651,8 +2642,8 @@ "title":"Null Pointer Error Is Displayed When the System Creates a Datasource RDS Table", "uri":"dli_03_0250.html", "doc_type":"usermanual", - "p_code":"290", - "code":"295" + "p_code":"289", + "code":"294" }, { "desc":"The system failed to execute insert overwrite on the datasource GaussDB(DWS) table, and org.postgresql.util.PSQLException: ERROR: tuple concurrently updated was displayed", @@ -2660,8 +2651,8 @@ "title":"Error Message \"org.postgresql.util.PSQLException: ERROR: tuple concurrently updated\" Is Displayed When the System Executes insert overwrite on a Datasource GaussDB(DWS) Table", "uri":"dli_03_0251.html", "doc_type":"usermanual", - "p_code":"290", - "code":"296" + "p_code":"289", + "code":"295" }, { "desc":"A datasource table was used to import data to a CloudTable HBase table. This HBase table contains a column family and a rowkey for 100 million simulating data records. Th", @@ -2669,8 +2660,8 @@ "title":"RegionTooBusyException Is Reported When Data Is Imported to a CloudTable HBase Table Through a Datasource Table", "uri":"dli_03_0252.html", "doc_type":"usermanual", - "p_code":"290", - "code":"297" + "p_code":"289", + "code":"296" }, { "desc":"A table was created on GaussDB(DWS) and then a datasource connection was created on DLI to read and write data. An error message was displayed during data writing, indica", @@ -2678,8 +2669,8 @@ "title":"A Null Value Is Written Into a Non-Null Field When a DLI Datasource Connection Is Used to Connect to a GaussDB(DWS) Table", "uri":"dli_03_0253.html", "doc_type":"usermanual", - "p_code":"290", - "code":"298" + "p_code":"289", + "code":"297" }, { "desc":"A datasource GaussDB(DWS) table and the datasource connection were created in DLI, and the schema of the source table in GaussDB(DWS) were updated. During the job executi", @@ -2687,8 +2678,8 @@ "title":"An Insert Operation Failed After the Schema of the GaussDB(DWS) Source Table Is Updated", "uri":"dli_03_0254.html", "doc_type":"usermanual", - "p_code":"290", - "code":"299" + "p_code":"289", + "code":"298" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2696,8 +2687,8 @@ "title":"APIs", "uri":"dli_03_0056.html", "doc_type":"usermanual", - "p_code":"122", - "code":"300" + "p_code":"123", + "code":"299" }, { "desc":"In the REST API provided by DLI, the request header can be added to the request URI, for example, Content-Type.Content-Type indicates the request body type or format. The", @@ -2705,8 +2696,8 @@ "title":"Why Is Error \"unsupported media Type\" Reported When I Subimt a SQL Job?", "uri":"dli_03_0060.html", "doc_type":"usermanual", - "p_code":"300", - "code":"301" + "p_code":"299", + "code":"300" }, { "desc":"When different IAM users call an API under the same enterprise project in the same region, the project ID is the same.", @@ -2714,8 +2705,8 @@ "title":"Is the Project ID Fixed when Different IAM Users Call an API?", "uri":"dli_03_0125.html", "doc_type":"usermanual", - "p_code":"300", - "code":"302" + "p_code":"299", + "code":"301" }, { "desc":"When the API call for submitting a SQL job times out, and the following error information is displayed:There are currently no resources tracked in the state, so there is ", @@ -2723,8 +2714,8 @@ "title":"What Can I Do If an Error Is Reported When the Execution of the API for Creating a SQL Job Times Out?", "uri":"dli_03_0178.html", "doc_type":"usermanual", - "p_code":"300", - "code":"303" + "p_code":"299", + "code":"302" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2732,8 +2723,8 @@ "title":"SDKs", "uri":"dli_03_0058.html", "doc_type":"usermanual", - "p_code":"122", - "code":"304" + "p_code":"123", + "code":"303" }, { "desc":"When you query the SQL job results using SDK, the system checks the job status when the job is submitted. The timeout interval set in the system is 300s. If the job is no", @@ -2741,8 +2732,8 @@ "title":"How Do I Set the Timeout Duration for Querying SQL Job Results Using SDK?", "uri":"dli_03_0073.html", "doc_type":"usermanual", - "p_code":"304", - "code":"305" + "p_code":"303", + "code":"304" }, { "desc":"Run the ping command to check whether dli.xxx can be accessed.If dli.xxx can be accessed, check whether DNS resolution is correctly configured.If dli.xxx can be accessed,", @@ -2750,8 +2741,8 @@ "title":"How Do I Handle the dli.xxx,unable to resolve host address Error?", "uri":"dli_03_0255.html", "doc_type":"usermanual", - "p_code":"304", - "code":"306" + "p_code":"303", + "code":"305" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2760,6 +2751,6 @@ "uri":"dli_01_00006.html", "doc_type":"usermanual", "p_code":"", - "code":"307" + "code":"306" } ] \ No newline at end of file diff --git a/docs/dli/umn/dli_01_00006.html b/docs/dli/umn/dli_01_00006.html index 2406d648..cbbcb3e1 100644 --- a/docs/dli/umn/dli_01_00006.html +++ b/docs/dli/umn/dli_01_00006.html @@ -8,7 +8,13 @@ -

2024-04-28

+

2024-07-11

+ +

Modified the following section:

+ + + +

2024-04-28

Added the following section:

@@ -25,13 +31,13 @@

2023-11-01

Modified the following content:

- +

2023-10-08

Modified the following content:

- +

2023-09-07

diff --git a/docs/dli/umn/dli_01_0003.html b/docs/dli/umn/dli_01_0003.html index fe5fa05c..35c6b2bf 100644 --- a/docs/dli/umn/dli_01_0003.html +++ b/docs/dli/umn/dli_01_0003.html @@ -8,9 +8,9 @@

In cross-source development scenarios, there is a risk of password leakage if datasource authentication information is directly configured. You are advised to use the datasource authentication provided by DLI. For details, see Overview.

-

Constraints

  • Datasource connections cannot be created for the default queue.
  • Flink jobs can directly access DIS, OBS, and SMN data sources without using datasource connections.
  • VPC Administrator permissions are required for enhanced connections to use VPCs, subnets, routes, VPC peering connections.
  • If you use an enhanced datasource connection, the CIDR block of the elastic resource pool or queue cannot overlap with that of the data source.
  • Only queues bound with datasource connections can access datasource tables.
  • Datasource tables do not support the preview function.
  • When checking the connectivity of datasource connections, the constraints on IP addresses are as follows:
    • The IP address must be valid, which consists of four decimal numbers separated by periods (.). The value ranges from 0 to 255.
    • During the test, you can add a port after the IP address and separate them with colons (:). The port can contain a maximum of five digits. The value ranges from 0 to 65535.

      For example, 192.168.xx.xx or 192.168.xx.xx:8181.

      +

      Constraints

      • Datasource connections cannot be created for the default queue.
      • Flink jobs can directly access DIS, OBS, and SMN data sources without using datasource connections.
      • VPC Administrator permissions are required for enhanced connections to use VPCs, subnets, routes, VPC peering connections.
      • If you use an enhanced datasource connection, the CIDR block of the elastic resource pool or queue cannot overlap with that of the data source.
      • Only queues bound with datasource connections can access datasource tables.
      • Datasource tables do not support the preview function.
      • When checking the connectivity of datasource connections, the notes and constraints on IP addresses are:
        • The IP address must be valid, which consists of four decimal numbers separated by periods (.). The value ranges from 0 to 255.
        • During the test, you can add a port after the IP address and separate them with colons (:). The port can contain a maximum of five digits. The value ranges from 0 to 65535.

          For example, 192.168.xx.xx or 192.168.xx.xx:8181.

        -
      • When checking the connectivity of datasource connections, the constraints on domain names are as follows:
        • The domain name can contain 1 to 255 characters. Only letters, digits, underscores (_), and hyphens (-) are allowed.
        • The top-level domain name must contain at least two letters, for example, .com, .net, and .cn.
        • During the test, you can add a port after the domain name and separate them with colons (:). The port can contain a maximum of five digits. The value ranges from 0 to 65535.

          For example, example.com:8080.

          +
        • When checking the connectivity of datasource connections, the notes and constraints on domain names are:
          • The domain name can contain 1 to 255 characters. Only letters, digits, underscores (_), and hyphens (-) are allowed.
          • The top-level domain name must contain at least two letters, for example, .com, .net, and .cn.
          • During the test, you can add a port after the domain name and separate them with colons (:). The port can contain a maximum of five digits. The value ranges from 0 to 65535.

            For example, example.com:8080.

      diff --git a/docs/dli/umn/dli_01_0005.html b/docs/dli/umn/dli_01_0005.html index b077a7b4..6f1a0004 100644 --- a/docs/dli/umn/dli_01_0005.html +++ b/docs/dli/umn/dli_01_0005.html @@ -52,9 +52,9 @@

Creating a Table

Before creating a table, ensure that a database has been created.

-
  1. You can create a table on either the Databases and Tables page or the SQL Editor page.

    Datasource connection tables, such as View tables, HBase (MRS) tables, OpenTSDB (MRS) tables, GaussDB(DWS) tables, RDS tables, and CSS tables, cannot be created. You can use SQL to create views and datasource connection tables. For details, see sections Creating a View and Creating a Datasource Connection Table in the Data Lake Insight SQL Syntax Reference.

    +
    1. You can create a table on either the Databases and Tables page or the SQL Editor page.

      Datasource connection tables, such as View tables, HBase (MRS) tables, OpenTSDB (MRS) tables, GaussDB(DWS) tables, RDS tables, and CSS tables, cannot be created. You can use SQL to create views and datasource connection tables. For details, see sections Creating a View and Creating a Datasource Connection Table in the Data Lake Insight SQL Syntax Reference.

      -
      • To create a table on the Data Management page:
        1. On the left of the management console, choose Data Management > Databases and Tables.
        2. On the Databases and Tables page, select the database for which you want to create a table. In the Operation column, click More > Create Table to create a table in the current database.
        +
        • To create a table on the Data Management page:
          1. On the left of the management console, choose Data Management > Databases and Tables.
          2. On the Databases and Tables page, select the database for which you want to create a table. In the Operation column, click More > Create Table to create a table in the current database.
        • To create a table on the SQL Editor page:
          1. On the left of the management console, click SQL Editor.
          2. In the navigation pane of the displayed SQL Editor page, click Databases. You can create a table in either of the following ways:
            • Click a database name. In the Tables area, click on the right to create a table in the current database.
            • Click on the right of the database and choose Create Table from the shortcut menu to create a table in the current database.
        @@ -108,7 +108,7 @@

        Type

        Data type of a column. This parameter corresponds to Column Name.

        -
        • string: The data is of the string type.
        • int: Each integer is stored on four bytes.
        • date: The value ranges from 0000-01-01 to 9999-12-31.
        • double: Each number is stored on eight bytes.
        • boolean: Each value is stored on one byte.
        • decimal: The valid bits are positive integers between 1 to 38, including 1 and 38. The decimal digits are integers less than 10.
        • smallint/short: The number is stored on two bytes.
        • bigint/long: The number is stored on eight bytes.
        • timestamp: The data indicates a date and time. The value can be accurate to six decimal points.
        • float: Each number is stored on four bytes.
        • tinyint: Each number is stored on one byte. Only OBS tables support this data type.
        +
        • string: The data is of the string type.
        • int: Each integer is stored on four bytes.
        • date: The value ranges from 0000-01-01 to 9999-12-31.
        • double: Each number is stored on eight bytes.
        • boolean: Each value is stored on one byte.
        • decimal: The valid bits are positive integers between 1 to 38, including 1 and 38. The decimal digits are integers less than 10.
        • smallint/short: The number is stored on two bytes.
        • bigint/long: The number is stored on eight bytes.
        • timestamp: The data indicates a date and time. The value can be accurate to six decimal points.
        • float: Each number is stored on four bytes.
        • tinyint: Each number is stored on one byte. Only OBS tables support this data type.

        string

        @@ -144,7 +144,7 @@

        Data Format

        DLI supports the following data formats:

        -
        • Parquet: DLI can read non-compressed data or data that is compressed using Snappy and gzip.
        • CSV: DLI can read non-compressed data or data that is compressed using gzip.
        • ORC: DLI can read non-compressed data or data that is compressed using Snappy.
        • JSON: DLI can read non-compressed data or data that is compressed using gzip.
        • Avro: DLI can read uncompressed Avro data.
        +
        • Parquet: DLI can read non-compressed data or data that is compressed using Snappy and gzip.
        • CSV: DLI can read non-compressed data or data that is compressed using gzip.
        • ORC: DLI can read non-compressed data or data that is compressed using Snappy.
        • JSON: DLI can read non-compressed data or data that is compressed using gzip.
        • Avro: DLI can read uncompressed Avro data.

        CSV

        @@ -177,7 +177,7 @@

        User-defined Quotation Character

        -

        This parameter is valid only when Data Format is set to CSV and you select User-defined Quotation Character.

        +

        This parameter is valid only when Data Format is set to CSV and you select User-defined Quotation Character.

        The following quotation characters are supported:

        • Single quotation mark (')
        • Double quotation marks (")
        • Others: Enter a user-defined quotation character.
        @@ -213,7 +213,7 @@
  2. -

  3. Click OK.

    After a table is created, you can view and select the table for use on the Data Management page or SQL Editor page.

    +

  4. Click OK.

    After a table is created, you can view and select the table for use on the Data Management page or SQL Editor page.

  5. (Optional) After a DLI table is created, you can decide whether to directly import data to the table.
diff --git a/docs/dli/umn/dli_01_0006.html b/docs/dli/umn/dli_01_0006.html index b1e3aecf..0973a4bb 100644 --- a/docs/dli/umn/dli_01_0006.html +++ b/docs/dli/umn/dli_01_0006.html @@ -5,9 +5,9 @@

For example, to connect DLI to the MRS, RDS, CSS, Kafka, or GaussDB(DWS) data source, you need to enable the network between DLI and the VPC of the data source.

Create an enhanced datasource connection on the console.

-

Constraints

  • Datasource connections cannot be created for the default queue.
  • Flink jobs can directly access DIS, OBS, and SMN data sources without using datasource connections.
  • VPC Administrator permissions are required for enhanced connections to use VPCs, subnets, routes, VPC peering connections.
  • If you use an enhanced datasource connection, the CIDR block of the elastic resource pool or queue cannot overlap with that of the data source.
  • Only queues bound with datasource connections can access datasource tables.
  • Datasource tables do not support the preview function.
  • When checking the connectivity of datasource connections, the constraints on IP addresses are as follows:
    • The IP address must be valid, which consists of four decimal numbers separated by periods (.). The value ranges from 0 to 255.
    • During the test, you can add a port after the IP address and separate them with colons (:). The port can contain a maximum of five digits. The value ranges from 0 to 65535.

      For example, 192.168.xx.xx or 192.168.xx.xx:8181.

      +

      Constraints

      • Datasource connections cannot be created for the default queue.
      • Flink jobs can directly access DIS, OBS, and SMN data sources without using datasource connections.
      • VPC Administrator permissions are required for enhanced connections to use VPCs, subnets, routes, VPC peering connections.
      • If you use an enhanced datasource connection, the CIDR block of the elastic resource pool or queue cannot overlap with that of the data source.
      • Only queues bound with datasource connections can access datasource tables.
      • Datasource tables do not support the preview function.
      • When checking the connectivity of datasource connections, the notes and constraints on IP addresses are:
        • The IP address must be valid, which consists of four decimal numbers separated by periods (.). The value ranges from 0 to 255.
        • During the test, you can add a port after the IP address and separate them with colons (:). The port can contain a maximum of five digits. The value ranges from 0 to 65535.

          For example, 192.168.xx.xx or 192.168.xx.xx:8181.

        -
      • When checking the connectivity of datasource connections, the constraints on domain names are as follows:
        • The domain name can contain 1 to 255 characters. Only letters, digits, underscores (_), and hyphens (-) are allowed.
        • The top-level domain name must contain at least two letters, for example, .com, .net, and .cn.
        • During the test, you can add a port after the domain name and separate them with colons (:). The port can contain a maximum of five digits. The value ranges from 0 to 65535.

          For example, example.com:8080.

          +
        • When checking the connectivity of datasource connections, the notes and constraints on domain names are:
          • The domain name can contain 1 to 255 characters. Only letters, digits, underscores (_), and hyphens (-) are allowed.
          • The top-level domain name must contain at least two letters, for example, .com, .net, and .cn.
          • During the test, you can add a port after the domain name and separate them with colons (:). The port can contain a maximum of five digits. The value ranges from 0 to 65535.

            For example, example.com:8080.

      @@ -137,7 +137,6 @@
    • In the navigation pane of the DLI management console, choose Resources > Queue Management.
    • Locate the queue bound with the enhanced datasource connection, click More in the Operation column, and select Test Address Connectivity.
    • Enter the data source connection address and port number to test the network connectivity.

      Format: IP address:Port number

      Before testing the connection, ensure that the security group of the external data source has allowed access from the CIDR block of the queue.

      -

diff --git a/docs/dli/umn/dli_01_0012.html b/docs/dli/umn/dli_01_0012.html index b6ccd14f..88f5d69f 100644 --- a/docs/dli/umn/dli_01_0012.html +++ b/docs/dli/umn/dli_01_0012.html @@ -16,7 +16,7 @@ - diff --git a/docs/dli/umn/dli_01_0013.html b/docs/dli/umn/dli_01_0013.html index 62cd1b36..c2d3396b 100644 --- a/docs/dli/umn/dli_01_0013.html +++ b/docs/dli/umn/dli_01_0013.html @@ -1,6 +1,6 @@ -

Modifying Host Information

+

Modifying Host Information in an Elastic Resource Pool

Scenario

Host information is the mapping between host IP addresses and domain names. After you configure host information, jobs can only use the configured domain names to access corresponding hosts. After a datasource connection is created, you can modify the host information.

When accessing the HBase cluster of MRS, you need to configure the host name (domain name) and IP address of the instance.

diff --git a/docs/dli/umn/dli_01_0228.html b/docs/dli/umn/dli_01_0228.html index 1d48ede9..f75ecdbe 100644 --- a/docs/dli/umn/dli_01_0228.html +++ b/docs/dli/umn/dli_01_0228.html @@ -5,7 +5,7 @@

Difference Between DLI Tables and OBS Tables

  • Data stored in DLI tables is applicable to delay-sensitive services, such as interactive queries.
  • Data stored in OBS tables is applicable to delay-insensitive services, such as historical data statistics and analysis.
-

Constraints

  • Database
    • default is the database built in DLI. You cannot create a database named default.
    • DLI supports a maximum of 50 databases.
    +

    Notes and Constraints

    • Database
      • default is the database built in DLI. You cannot create a database named default.
      • DLI supports a maximum of 50 databases.
    • Table
      • DLI supports a maximum of 5,000 tables.
      • DLI supports the following table types:
        • MANAGED: Data is stored in a DLI table.
        • EXTERNAL: Data is stored in an OBS table.
        • View: A view can only be created using SQL statements.
        • Datasource table: The table type is also EXTERNAL.
      • You cannot specify a storage path when creating a DLI table.
    • Data import
      • Only OBS data can be imported to DLI or OBS.
      • You can import data in CSV, Parquet, ORC, JSON, or Avro format from OBS to tables created on DLI.
      • To import data in CSV format to a partitioned table, place the partition column in the last column of the data source.
      • The encoding format of imported data can only be UTF-8.
      @@ -104,6 +104,7 @@

      Last Accessed

      Last time when an operation was performed on the table.

      +

      The last access time of a table refers only to the last time it was updated, not the time it was read (SELECT operation).

      Operation

      diff --git a/docs/dli/umn/dli_01_0318.html b/docs/dli/umn/dli_01_0318.html index 1d7930df..2d5289e3 100644 --- a/docs/dli/umn/dli_01_0318.html +++ b/docs/dli/umn/dli_01_0318.html @@ -3,465 +3,535 @@

      DLI Operations That Can Be Recorded by CTS

      With CTS, you can record operations associated with DLI for later query, audit, and backtrack operations.

      -
      Table 1 DLI operations that can be recorded by CTS

      Operation

      +
      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/docs/dli/umn/dli_01_0320.html b/docs/dli/umn/dli_01_0320.html index 8cddb3ff..066b8c93 100644 --- a/docs/dli/umn/dli_01_0320.html +++ b/docs/dli/umn/dli_01_0320.html @@ -218,7 +218,6 @@
    • Select a queue from the queue list in the upper left corner of the SQL job editing window. For details about how to create a queue, see Creating a Queue.
    • In the upper right corner of the SQL job editing window, select a database, for example, qw, from the Databases drop-down list.
    • Create a table, for example, qw. For details about how to create a database and table, see Creating a Database or a Table.
    • In the SQL job editing window, enter the following SQL statement:
    • Table 1 DLI operations that can be recorded by CTS

      Operation

      Resource Type

      +

      Resource Type

      Trace Name

      Creating a database

      +

      Creating a database

      database

      +

      database

      createDatabase

      Deleting a database

      +

      Deleting a database

      database

      +

      database

      deleteDatabase

      Modifying the Database Owner

      +

      Changing the database owner

      database

      +

      database

      alterDatabaseOwner

      Creating a table

      +

      Creating a table

      table

      +

      table

      createTable

      Deleting tables

      +

      Deleting tables

      table

      +

      table

      deleteTable

      Exporting table data

      +

      Exporting table data

      table

      +

      table

      exportData

      Importing table data

      +

      Importing table data

      table

      +

      table

      importData

      Modifying the owner of a table

      +

      Modifying the owner of a table

      table

      +

      table

      alterTableOwner

      Creating a queue

      +

      Creating a queue

      queue

      +

      queue

      createQueue

      Deleting a queue

      +

      Deleting a queue

      queue

      +

      queue

      dropQueue

      Granting permissions to a queue

      +

      Granting permissions to a queue

      queue

      +

      queue

      shareQueue

      Modifying a Queue CIDR Block

      +

      Modifying the CIDR block of a queue

      queue

      +

      queue

      replaceQueue

      Restarting a queue

      +

      Restarting a queue

      queue

      +

      queue

      queueActions

      Scaling out/in a queue

      +

      Scaling out/in a queue

      queue

      +

      queue

      queueActions

      Submitting a job (SQL)

      +

      Submitting a job (SQL)

      queue

      +

      queue

      submitJob

      Canceling a job (SQL)

      +

      Canceling a job (SQL)

      queue

      +

      queue

      cancelJob

      Granting DLI the permission to access OBS buckets

      +

      Granting DLI the permission to access OBS buckets

      obs

      +

      obs

      obsAuthorize

      Checking the SQL syntax

      +

      Checking the SQL syntax

      job

      +

      job

      checkSQL

      Creating a job

      +

      Creating a job

      job

      +

      job

      createJob

      Updating a job

      +

      Updating a job

      job

      +

      job

      updateJob

      Deleting a job

      +

      Deleting a job

      job

      +

      job

      deleteJob

      Purchasing CUH packages

      +

      Creating a Flink OpenSource SQL job

      order

      +

      jobs

      +

      createStreamSqlJob

      +

      Updating a Flink OpenSource SQL job

      +

      jobs

      +

      updateStreamSqlJob

      +

      Deleting a Flink job

      +

      jobs

      +

      deleteStreamJobs

      +

      Stopping a Flink job

      +

      jobs

      +

      stopStreamJobs

      +

      Submitting a Flink job

      +

      jobs

      +

      submitStreamJobs

      +

      Creating a Flink Jar job

      +

      jobs

      +

      createStreamJarJob

      +

      Updating a Flink Jar job

      +

      jobs

      +

      updateStreamJarJob

      +

      Checking Flink jobs

      +

      jobs

      +

      checkStreamJob

      +

      Importing a savepoint

      +

      jobs

      +

      dealSavepoint

      +

      Purchasing CUH packages

      +

      order

      orderPackage

      Freezing resources

      +

      Freezing resources

      resource

      +

      resource

      freezeResource

      Unfreezing resources

      +

      Unfreezing resources

      resource

      +

      resource

      unfreezeResource

      Terminating resources

      +

      Terminating resources

      resource

      +

      resource

      deleteResource

      Clearing resources

      +

      Clearing resources

      resource

      +

      resource

      cleanResource

      Granting data permissions

      +

      Granting data permissions

      data

      +

      data

      dataAuthorize

      Granting permissions on other projects

      +

      Granting permissions on other projects

      data

      +

      data

      authorizeProjectData

      Exporting query results

      +

      Exporting query results

      data

      +

      data

      storeJobResult

      Saving a SQL template

      +

      Saving a SQL template

      sqlTemplate

      +

      sqlTemplate

      saveSQLTemplate

      Updating a SQL template

      +

      Updating a SQL template

      sqlTemplate

      +

      sqlTemplate

      updateSQLTemplate

      Deleting a SQL template

      +

      Deleting a SQL template

      sqlTemplate

      +

      sqlTemplate

      deleteSQLTemplate

      Creating a Flink template

      +

      Creating a Flink template

      flinkTemplate

      +

      template

      createStreamTemplate

      Updating a Flink template

      +

      Updating a Flink template

      flinkTemplate

      +

      template

      createStreamTemplate

      +

      updateStreamTemplate

      Deleting a Flink template

      +

      Checking Flink templates

      flinkTemplate

      +

      template

      +

      checkStreamTemplate

      +

      Deleting a Flink template

      +

      flinkTemplate

      deleteStreamTemplate

      Creating a data upload task

      +

      Creating a data upload task

      uploader

      +

      uploader

      createUploadJob

      Obtaining the authentication to perform a data upload task

      +

      Obtaining the authentication to perform a data upload task

      uploader

      +

      uploader

      getUploadAuthInfo

      Submitting a data upload task

      +

      Submitting a data upload task

      uploader

      +

      uploader

      commitUploadJob

      Creating a datasource authentication and uploading a certificate

      +

      Creating a datasource authentication and uploading a certificate

      authInfo

      +

      authInfo

      uploadAuthInfo

      Updating a datasource authentication

      +

      Updating a datasource authentication

      authInfo

      +

      authInfo

      updateAuthInfop

      Deleting a datasource authentication

      +

      Deleting a datasource authentication

      authInfo

      +

      authInfo

      deleteAuthInfo

      Updating the quota

      +

      Updating the quota

      quota

      +

      quota

      updateQuota

      Uploading a resource package

      +

      Uploading a resource package

      pkgResource

      +

      pkgResource

      uploadResources

      Deleting a resource package

      +

      Deleting a resource package

      pkgResource

      +

      pkgResource

      deleteResource

      Creating a basic datasource connection

      +

      Creating a basic datasource connection

      datasource

      +

      datasource

      createDatasourceConn

      Deleting a basic datasource connection

      +

      Deleting a basic datasource connection

      datasource

      +

      datasource

      deleteDatasourceConn

      Reactivating a basic datasource connection

      +

      Reactivating a basic datasource connection

      datasource

      +

      datasource

      reactivateDSConnection

      Creating an enhanced datasource connection

      +

      Creating an enhanced datasource connection

      datasource

      +

      datasource

      createConnection

      Deleting an enhanced datasource connection

      +

      Deleting an enhanced datasource connection

      datasource

      +

      datasource

      getConnection

      Binding a queue

      +

      Binding a queue

      datasource

      +

      datasource

      associateQueueToDatasourceConn

      Unbinding a queue

      +

      Unbinding a queue

      datasource

      +

      datasource

      disassociateQueueToDatasourceConn

      Modifying the host information

      +

      Modifying the host information

      datasource

      +

      datasource

      updateHostInfo

      Adding a route

      +

      Adding a route

      datasource

      +

      datasource

      addRoute

      Deleting a route

      +

      Deleting a route

      datasource

      +

      datasource

      deleteRoute

      Creating a topic

      +

      Creating a topic

      smn

      +

      smn

      createTopic

      Creating an agency

      +

      Creating an agency

      agency

      +

      agency

      createAgencyV2

      Creating a batch processing job

      +

      Creating a batch processing job

      batch

      +

      batch

      createBatch

      Canceling a batch processing job

      +

      Canceling a batch processing job

      batch

      +

      batch

      cancelBatch

      Creating a session

      +

      Creating a session

      session

      +

      session

      createSession

      Deleting a session

      +

      Deleting a session

      session

      +

      session

      deleteSession

      Creating a statement

      +

      Creating a statement

      statement

      +

      statement

      createStatement

      Canceling execution of a statement

      +

      Canceling execution of a statement

      statement

      +

      statement

      cancelStatement

      Creating a global variable

      +

      Creating a global variable

      globalVar

      +

      globalVar

      createGlobalVariable

      Deleting a global variable

      +

      Deleting a global variable

      globalVar

      +

      globalVar

      deleteGlobalVariable

      Modifying a global variable

      +

      Modifying a global variable

      globalVar

      +

      globalVar

      updateGlobalVariable

      1
      SELECT * FROM qw.qw LIMIT 10;
       
      -

      Alternatively, you can double-click the table name qw. The query statement is automatically entered in the SQL job editing window.

    • On top of the editing window, click More > Verify Syntax to check whether the SQL statement is correct.
      1. If the verification fails, check the SQL statement syntax by referring to Data Lake Insight SQL Syntax Reference.
      2. If the syntax verification is successful, click Execute. Read and agree to the privacy agreement. Click OK to execute the SQL statement.
      3. After the execution is complete, you can view the execution result in the area under the SQL job editing window.
      diff --git a/docs/dli/umn/dli_01_0384.html b/docs/dli/umn/dli_01_0384.html index 27f7efa0..738016f5 100644 --- a/docs/dli/umn/dli_01_0384.html +++ b/docs/dli/umn/dli_01_0384.html @@ -5,7 +5,7 @@

      On the Overview page, click Create Job in the upper right corner of the Spark Jobs tab or click Create Job in the upper right corner of the Spark Jobs page. The Spark job editing page is displayed.

      On the Spark job editing page, a message is displayed, indicating that a temporary DLI data bucket will be created. The created bucket is used to store temporary data generated by DLI, such as job logs and job results. You cannot view job logs if you choose not to create it. The bucket will be created and the default bucket name is used.

      If you do not need to create a DLI temporary data bucket and do not want to receive this message, select Do not show again and click Cancel.

      -

      Prerequisites

      • You have uploaded the dependencies to the corresponding OBS bucket on the Data Management > Package Management page. For details, see Creating a Package.
      • Before creating a Spark job to access other external data sources, such as OpenTSDB, HBase, Kafka, GaussDB(DWS), RDS, CSS, CloudTable, DCS Redis, and DDS, you need to create a datasource connection to enable the network between the job running queue and external data sources.
        • For details about the external data sources that can be accessed by Spark jobs, see Cross-Source Analysis Development Methods.
        • For details about how to create a datasource connection, see Enhanced Datasource Connections.

          On the Resources > Queue Management page, locate the queue you have created, and choose More > Test Address Connectivity in the Operation column to check whether the network connection between the queue and the data source is normal. For details, see Testing Address Connectivity.

          +

          Prerequisites

          • You have uploaded the dependencies to the corresponding OBS bucket on the Data Management > Package Management page. For details, see Creating a Package.
          • Before creating a Spark job to access other external data sources, such as OpenTSDB, HBase, Kafka, GaussDB(DWS), RDS, CSS, CloudTable, DCS Redis, and DDS, you need to create a datasource connection to enable the network between the job running queue and external data sources.
            • For details about the external data sources that can be accessed by Spark jobs, see Cross-Source Analysis Development Methods.
            • For details about how to create a datasource connection, see Enhanced Datasource Connections.

              On the Resources > Queue Management page, locate the queue you have created, click More in the Operation column, and select Test Address Connectivity to check if the network connection between the queue and the data source is normal. For details, see Testing Address Connectivity.

          diff --git a/docs/dli/umn/dli_01_0397.html b/docs/dli/umn/dli_01_0397.html index a6e22a68..df3b613a 100644 --- a/docs/dli/umn/dli_01_0397.html +++ b/docs/dli/umn/dli_01_0397.html @@ -2556,156 +2556,161 @@
    • +

      Flink 1.15 Dependencies

      Obtain information about the Flink 1.15 dependencies from the logs of a Flink job.

      +
      1. Check the logs of a Flink job.
        1. Log in to the DLI console. In the navigation pane on the left, choose Job Management > Flink Jobs.
        2. Click the name of the desired job. On the displayed page, click the Run Log tab.
        3. Check the latest run logs. For more logs, check the OBS bucket where the job logs are stored.
        +
      2. Search for dependency information in the logs.

        Search for Classpath: in the logs to check the dependencies.

        +
      +

      Flink 1.12 Dependencies

      -
      Table 4 Flink 1.12 dependencies

      Dependency

      +
      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - @@ -2714,120 +2719,120 @@

      Flink 1.10 Dependencies

      Only queues created after December 2020 can use the Flink 1.10 dependencies.

      -
      Table 4 Flink 1.12 dependencies

      Dependency

      bcpkix-jdk15on-1.60.jar

      +

      bcpkix-jdk15on-1.60.jar

      flink-json-1.12.2-ei-313001-dli-2022011002.jar

      +

      flink-json-1.12.2-ei-313001-dli-2022011002.jar

      libtensorflow-1.12.0.jar

      +

      libtensorflow-1.12.0.jar

      bcprov-jdk15on-1.60.jar

      +

      bcprov-jdk15on-1.60.jar

      flink-kubernetes_2.11-1.12.2-ei-313001-dli-2022011002.jar

      +

      flink-kubernetes_2.11-1.12.2-ei-313001-dli-2022011002.jar

      log4j-1.2-api-2.17.1.jar

      +

      log4j-1.2-api-2.17.1.jar

      clickhouse-jdbc-0.3.1-ei-313001-SNAPSHOT.jar

      +

      clickhouse-jdbc-0.3.1-ei-313001-SNAPSHOT.jar

      flink-metrics-prometheus_2.11-1.12.2-ei-313001-dli-2022011002.jar

      +

      flink-metrics-prometheus_2.11-1.12.2-ei-313001-dli-2022011002.jar

      log4j-api-2.17.1.jar

      +

      log4j-api-2.17.1.jar

      commons-codec-1.9.jar

      +

      commons-codec-1.9.jar

      flink-obs-hadoop-fs-2.0.0-20220226.034421-73.jar

      +

      flink-obs-hadoop-fs-2.0.0-20220226.034421-73.jar

      log4j-core-2.17.1.jar

      +

      log4j-core-2.17.1.jar

      commons-configuration-1.7.jar

      +

      commons-configuration-1.7.jar

      flink-s3-fs-hadoop-1.12.2.jar

      +

      flink-s3-fs-hadoop-1.12.2.jar

      log4j-slf4j-impl-2.17.1.jar

      +

      log4j-slf4j-impl-2.17.1.jar

      dataflow-fs-obs-2.0.0-20220226.034402-190.jar

      +

      dataflow-fs-obs-2.0.0-20220226.034402-190.jar

      flink-shaded-zookeeper-3.6.3-ei-313001-SNAPSHOT.jar

      +

      flink-shaded-zookeeper-3.6.3-ei-313001-SNAPSHOT.jar

      luxor-encrypt-2.0.0-20220405.072004-199.jar

      +

      luxor-encrypt-2.0.0-20220405.072004-199.jar

      deeplearning4j-core-0.9.1.jar

      +

      deeplearning4j-core-0.9.1.jar

      flink-sql-avro-1.12.2-ei-313001-dli-2022011002.jar

      +

      flink-sql-avro-1.12.2-ei-313001-dli-2022011002.jar

      luxor-fs3-2.0.0-20220405.072025-195.jar

      +

      luxor-fs3-2.0.0-20220405.072025-195.jar

      deeplearning4j-nlp-0.9.1.jar

      +

      deeplearning4j-nlp-0.9.1.jar

      flink-sql-avro-confluent-registry-1.12.2-ei-313001-dli-2022011002.jar

      +

      flink-sql-avro-confluent-registry-1.12.2-ei-313001-dli-2022011002.jar

      luxor-obs-fs3-2.0.0-20220405.072030-195.jar

      +

      luxor-obs-fs3-2.0.0-20220405.072030-195.jar

      deeplearning4j-nn-0.9.1.jar

      +

      deeplearning4j-nn-0.9.1.jar

      flink-table_2.11-1.12.2-ei-313001-dli-2022011002.jar

      +

      flink-table_2.11-1.12.2-ei-313001-dli-2022011002.jar

      manager-hadoop-security-crypter-8.1.3-313001-SNAPSHOT.jar

      +

      manager-hadoop-security-crypter-8.1.3-313001-SNAPSHOT.jar

      ejml-cdense-0.33.jar

      +

      ejml-cdense-0.33.jar

      flink-table-blink_2.11-1.12.2-ei-313001-dli-2022011002.jar

      +

      flink-table-blink_2.11-1.12.2-ei-313001-dli-2022011002.jar

      manager-wc2frm-8.1.3-313001-SNAPSHOT.jar

      +

      manager-wc2frm-8.1.3-313001-SNAPSHOT.jar

      ejml-core-0.33.jar

      +

      ejml-core-0.33.jar

      guava-18.0.jar

      +

      guava-18.0.jar

      mrs-obs-provider-3.1.1.49.jar

      +

      mrs-obs-provider-3.1.1.49.jar

      ejml-ddense-0.33.jar

      +

      ejml-ddense-0.33.jar

      guava-26.0-jre.jar

      +

      guava-26.0-jre.jar

      nd4j-api-0.9.1.jar

      +

      nd4j-api-0.9.1.jar

      ejml-dsparse-0.33.jar

      +

      ejml-dsparse-0.33.jar

      hadoop-hdfs-client-3.1.1-ei-302002.jar

      +

      hadoop-hdfs-client-3.1.1-ei-302002.jar

      nd4j-native-0.9.1.jar

      +

      nd4j-native-0.9.1.jar

      ejml-experimental-0.33.jar

      +

      ejml-experimental-0.33.jar

      hadoop-3.1.1-46.jar

      +

      hadoop-3.1.1-46.jar

      nd4j-native-api-0.9.1.jar

      +

      nd4j-native-api-0.9.1.jar

      ejml-fdense-0.33.jar

      +

      ejml-fdense-0.33.jar

      hadoop-plugins-8.1.3-313001-SNAPSHOT.jar

      +

      hadoop-plugins-8.1.3-313001-SNAPSHOT.jar

      nd4j-native-platform-0.9.1.jar

      +

      nd4j-native-platform-0.9.1.jar

      ejml-simple-0.33.jar

      +

      ejml-simple-0.33.jar

      httpasyncclient-4.1.2.jar

      +

      httpasyncclient-4.1.2.jar

      okhttp-3.14.8.jar

      +

      okhttp-3.14.8.jar

      ejml-zdense-0.33.jar

      +

      ejml-zdense-0.33.jar

      httpclient-4.5.3.jar

      +

      httpclient-4.5.3.jar

      okio-1.14.0.jar

      +

      okio-1.14.0.jar

      elsa-3.0.0-M7.jar

      +

      elsa-3.0.0-M7.jar

      httpcore-4.4.4.jar

      +

      httpcore-4.4.4.jar

      ranger-obs-client-0.1.1.jar

      +

      ranger-obs-client-0.1.1.jar

      flink-changelog-json-1.12.2-ei-313001-dli-2022011002.jar

      +

      flink-changelog-json-1.12.2-ei-313001-dli-2022011002.jar

      httpcore-nio-4.4.4.jar

      +

      httpcore-nio-4.4.4.jar

      secComponentApi-1.0.5.jar

      +

      secComponentApi-1.0.5.jar

      flink-csv-1.12.2-ei-313001-dli-2022011002.jar

      +

      flink-csv-1.12.2-ei-313001-dli-2022011002.jar

      java-xmlbuilder-1.1.jar

      +

      java-xmlbuilder-1.1.jar

      slf4j-api-1.7.26.jar

      +

      slf4j-api-1.7.26.jar

      flink-dist_2.11-1.12.2-ei-313001-dli-2022011002.jar

      +

      flink-dist_2.11-1.12.2-ei-313001-dli-2022011002.jar

      jna-4.1.0.jar

      +

      jna-4.1.0.jar

      tensorflow-1.12.0.jar

      +

      tensorflow-1.12.0.jar

      Table 5 Flink 1.10 dependencies

      Dependency

      +
      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - @@ -2835,120 +2840,120 @@

      Flink 1.7.2 Dependencies

      -
      Table 5 Flink 1.10 dependencies

      Dependency

      bcpkix-jdk15on-1.60.jar

      +

      bcpkix-jdk15on-1.60.jar

      esdk-obs-java-3.20.6.1.jar

      +

      esdk-obs-java-3.20.6.1.jar

      java-xmlbuilder-1.1.jar

      +

      java-xmlbuilder-1.1.jar

      bcprov-jdk15on-1.60.jar

      +

      bcprov-jdk15on-1.60.jar

      flink-cep_2.11-1.10.0.jar

      +

      flink-cep_2.11-1.10.0.jar

      jna-4.1.0.jar

      +

      jna-4.1.0.jar

      commons-codec-1.9.jar

      +

      commons-codec-1.9.jar

      flink-cep-scala_2.11-1.10.0.jar

      +

      flink-cep-scala_2.11-1.10.0.jar

      libtensorflow-1.12.0.jar

      +

      libtensorflow-1.12.0.jar

      commons-configuration-1.7.jar

      +

      commons-configuration-1.7.jar

      flink-dist_2.11-1.10.0.jar

      +

      flink-dist_2.11-1.10.0.jar

      log4j-over-slf4j-1.7.26.jar

      +

      log4j-over-slf4j-1.7.26.jar

      deeplearning4j-core-0.9.1.jar

      +

      deeplearning4j-core-0.9.1.jar

      flink-python_2.11-1.10.0.jar

      +

      flink-python_2.11-1.10.0.jar

      logback-classic-1.2.3.jar

      +

      logback-classic-1.2.3.jar

      deeplearning4j-nlp-0.9.1.jar

      +

      deeplearning4j-nlp-0.9.1.jar

      flink-queryable-state-runtime_2.11-1.10.0.jar

      +

      flink-queryable-state-runtime_2.11-1.10.0.jar

      logback-core-1.2.3.jar

      +

      logback-core-1.2.3.jar

      deeplearning4j-nn-0.9.1.jar

      +

      deeplearning4j-nn-0.9.1.jar

      flink-sql-client_2.11-1.10.0.jar

      +

      flink-sql-client_2.11-1.10.0.jar

      nd4j-api-0.9.1.jar

      +

      nd4j-api-0.9.1.jar

      ejml-cdense-0.33.jar

      +

      ejml-cdense-0.33.jar

      flink-state-processor-api_2.11-1.10.0.jar

      +

      flink-state-processor-api_2.11-1.10.0.jar

      nd4j-native-0.9.1.jar

      +

      nd4j-native-0.9.1.jar

      ejml-core-0.33.jar

      +

      ejml-core-0.33.jar

      flink-table_2.11-1.10.0.jar

      +

      flink-table_2.11-1.10.0.jar

      nd4j-native-api-0.9.1.jar

      +

      nd4j-native-api-0.9.1.jar

      ejml-ddense-0.33.jar

      +

      ejml-ddense-0.33.jar

      flink-table-blink_2.11-1.10.0.jar

      +

      flink-table-blink_2.11-1.10.0.jar

      nd4j-native-platform-0.9.1.jar

      +

      nd4j-native-platform-0.9.1.jar

      ejml-dsparse-0.33.jar

      +

      ejml-dsparse-0.33.jar

      guava-26.0-jre.jar

      +

      guava-26.0-jre.jar

      okhttp-3.14.8.jar

      +

      okhttp-3.14.8.jar

      ejml-experimental-0.33.jar

      +

      ejml-experimental-0.33.jar

      hadoop-3.1.1-41.jar

      +

      hadoop-3.1.1-41.jar

      okio-1.14.0.jar

      +

      okio-1.14.0.jar

      ejml-fdense-0.33.jar

      +

      ejml-fdense-0.33.jar

      httpasyncclient-4.1.2.jar

      +

      httpasyncclient-4.1.2.jar

      secComponentApi-1.0.5.jar

      +

      secComponentApi-1.0.5.jar

      ejml-simple-0.33.jar

      +

      ejml-simple-0.33.jar

      httpclient-4.5.3.jar

      +

      httpclient-4.5.3.jar

      slf4j-api-1.7.26.jar

      +

      slf4j-api-1.7.26.jar

      ejml-zdense-0.33.jar

      +

      ejml-zdense-0.33.jar

      httpcore-4.4.4.jar

      +

      httpcore-4.4.4.jar

      tensorflow-1.12.0.jar

      +

      tensorflow-1.12.0.jar

      elsa-3.0.0-M7.jar

      +

      elsa-3.0.0-M7.jar

      httpcore-nio-4.4.4.jar

      +

      httpcore-nio-4.4.4.jar

      -

      +

      -

      - - - - @@ -47,7 +47,7 @@ - diff --git a/docs/dli/umn/dli_01_0563.html b/docs/dli/umn/dli_01_0563.html index aac50a18..f0a8216f 100644 --- a/docs/dli/umn/dli_01_0563.html +++ b/docs/dli/umn/dli_01_0563.html @@ -5,7 +5,7 @@

      You can set Spark driver parameters to improve the scheduling efficiency of queues.

      This section describes how to set queue properties on the management console.

      -

      Constraints and Limitations

      • Only SQL queues of the Spark engine support configuring queue properties.
      • Setting queue properties is only supported after the queue has been created.
      • Currently, only queue properties related to the Spark driver can be set.
      • Queue properties cannot be set in batches.
      • For a queue in an elastic resource pool, if the minimum CUs of the queue is less than 16 CUs, both Max. Spark Driver Instances and Max. Prestart Spark Driver Instances set in the queue properties do not apply.
      +

      Notes and Constraints

      • Only SQL queues of the Spark engine support configuring queue properties.
      • Setting queue properties is only supported after the queue has been created.
      • Currently, only queue properties related to the Spark driver can be set.
      • Queue properties cannot be set in batches.
      • For a queue in an elastic resource pool, if the minimum CUs of the queue is less than 16 CUs, both Max. Spark Driver Instances and Max. Prestart Spark Driver Instances set in the queue properties do not apply.

      Procedure

      1. In the navigation pane of the DLI management console, choose Resources > Queue Management.
      2. In the Operation column of the queue, choose More > Set Property.
      3. Go to the queue property setting page and set property parameters. For details about the property parameters, see Table 1.
      Table 6 Flink 1.7.2 dependencies

      Dependency

      +
      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/docs/dli/umn/dli_01_0407.html b/docs/dli/umn/dli_01_0407.html index e2afc426..57925345 100644 --- a/docs/dli/umn/dli_01_0407.html +++ b/docs/dli/umn/dli_01_0407.html @@ -5,7 +5,7 @@ -

      Constraints

      • A package can be deleted, but a package group cannot be deleted.
      • The following types of packages can be uploaded:
        • JAR: JAR file
        • PyFile: User Python file
        • File: User file
        • ModelFile: User AI model file
        +

        Notes and Constraints

        • A package can be deleted, but a package group cannot be deleted.
        • The following types of packages can be uploaded:
          • JAR: JAR file
          • PyFile: User Python file
          • File: User file
          • ModelFile: User AI model file

        Package Management Page

        diff --git a/docs/dli/umn/dli_01_0420.html b/docs/dli/umn/dli_01_0420.html index ef2561a9..2667210e 100644 --- a/docs/dli/umn/dli_01_0420.html +++ b/docs/dli/umn/dli_01_0420.html @@ -1,13 +1,9 @@

        Importing Data to a DLI Table

        -

        Importing Data Using OBS

        On the DLI management console, you can import data stored on OBS to DLI tables from Data Management > Databases and Tables > Table Management and SQL Editor pages. For details, see Importing Data to the Table.

        -
        -

        Importing Data Using CDM

        Use the Cloud Data Migration (CDM) service to import data from OBS to DLI. You need to create a CDM queue first.

        -

        -

        For details about how to create the queue, see "Migrating Data from OBS to DLI" in the Cloud Data Migration User Guide.

        -

        Pay attention to the following configurations:

        -
        • The VPC to which the DLI account belongs is the same as the VPC of the CDM queue.
        • You need to create two links, including a DLI link and an OBS link.
        • The format of the file to be transmitted can be CSV or JSON.
        +

        Importing Data Using OBS

        On the DLI management console, you can import data stored in OBS into DLI tables.

        +

        To import OBS data to a DLI table, either choose Data Management > Databases and Tables in the navigation pane on the left, locate a desired database, and click Tables in the Operation column, or choose SQL Editor in the navigation pane on the left.

        +

        For details, see Importing Data to the Table.

        Importing Data Using DIS

        Use the Data Ingestion Service (DIS) service to import data to DLI. You need to create a DIS stream.

        diff --git a/docs/dli/umn/dli_01_0426.html b/docs/dli/umn/dli_01_0426.html index 0600ec2e..8be3272e 100644 --- a/docs/dli/umn/dli_01_0426.html +++ b/docs/dli/umn/dli_01_0426.html @@ -10,9 +10,11 @@
      • + - diff --git a/docs/dli/umn/dli_01_0454.html b/docs/dli/umn/dli_01_0454.html index ad3e3c99..c10532f0 100644 --- a/docs/dli/umn/dli_01_0454.html +++ b/docs/dli/umn/dli_01_0454.html @@ -17,7 +17,7 @@

        If the port of the Kafka server is listened on by the host name, you need to add the mapping between the host name and IP address of the Kafka Broker node to the datasource connection.

      • CloudTable as the data input and output channel

        To use CloudTable as the data input and output channel, create a cluster in CloudTable and obtain the cluster ID.

      • CSS as the output channel

        To use CSS as the data output channel, create a cluster in CSS and obtain the cluster's private network address. For details, see Getting Started in the Cloud Search Service User Guide.

        -
      • DCS as the output channel

        To use DCS as the output channel, create a Redis cache instance in DCS and obtain the address used for Flink jobs to connect to the Redis instance. For detailed operations, see Getting Started in the Distributed Cache Service User Guide.

        +
      • DCS as the output channel

        To use DCS as the output channel, create a Redis cache instance in DCS and obtain the address used for Flink jobs to connect to the Redis instance. For details, see "Buying a DCS Redis Instance" in Distributed Cache Service User Guide.

      diff --git a/docs/dli/umn/dli_01_0487.html b/docs/dli/umn/dli_01_0487.html index eb29b90b..779e7c28 100644 --- a/docs/dli/umn/dli_01_0487.html +++ b/docs/dli/umn/dli_01_0487.html @@ -1,18 +1,20 @@ -

      Elastic Queue Scaling

      +

      Elastic Scaling of Queues

      Prerequisites

      Elastic scaling can be performed for a newly created queue only when there were jobs running in this queue.

      -

      Precautions

      • Queues with 16 CUs do not support scale-out or scale-in.
      • Queues with 64 CUs do not support scale-in.
      • If Status of queue xxx is assigning, which is not available is displayed on the Elastic Scaling page, the queue can be scaled only after the queue resources are allocated.
      +

      Notes and Constraints

      • Queues with 16 CUs do not support scale-out or scale-in.
      • Queues with 64 CUs do not support scale-in.
      • If Status of queue xxx is assigning, which is not available is displayed on the Elastic Scaling page, the queue can be scaled only after the queue resources are allocated.
      • If there are not enough physical resources, a queue may not be able to scale out to the desired target size.
      • The system does not guarantee that a queue will be scaled in to the desired target size. Typically, the system checks the resource usage before scaling in the queue to determine if there is enough space for scaling in. If the existing resources cannot be scaled in according to the minimum scaling step, the queue may not be scaled in successfully or only partially.

        The scaling step may vary depending on the resource specifications, usually 16 CUs, 32 CUs, 48 CUs, 64 CUs, etc.

        +

        For example, if the queue size is 48 CUs and job execution uses 18 CUs, the remaining 30 CUs do not meet the requirement for scaling in by the minimum step of 32 CUs. If a scaling in task is executed, it will fail.

        +

      Scaling Out

      If the current queue specifications do not meet service requirements, you can add the number of CUs to scale out the queue.

      -

      Scale-out is time-consuming. After you perform scale-out on the Elastic Scaling page of DLI, wait for about 10 minutes. The duration is related to the CU amount to add. After a period of time, refresh the Queue Management page and check whether values of Specifications and Actual CUs are the same to determine whether the scale-out is successful. Alternatively, on the Job Management page, check the status of the SCALE_QUEUE SQL job. If the job status is Scaling, the queue is being scaled out.

      +

      Scale-out is time-consuming. After you perform scale-out on the Elastic Scaling page of DLI, wait for about 10 minutes. The duration is related to the CU amount to add. After a period of time, refresh the Queue Management page and check whether values of Specifications and Actual CUs are the same to determine whether the scale-out is successful. Alternatively, on the Job Management page, check the status of the SCALE_QUEUE SQL job. If the job status is Scaling, the queue is being scaled out.

      The procedure is as follows:

      1. On the left of the DLI management console, click Resources > Queue Management.
      2. Select the queue to be scaled out and choose More > Elastic Scaling in the Operation column.
      3. On the displayed page, select Scale-out for Operation and set the scale-out amount.
      4. Click .

      Scaling In

      If the current queue specifications are too much for your computing service, you can reduce the number of CUs to scale in the queue.

      -
      • Scale-in is time-consuming. After you perform scale-in on the Elastic Scaling page of DLI, wait for about 10 minutes. The duration is related to the CU amount to reduce. After a period of time, refresh the Queue Management page and check whether values of Specifications and Actual CUs are the same to determine whether the scale-in is successful. Alternatively, on the Job Management page, check the status of the SCALE_QUEUE SQL job. If the job status is Scaling, the queue is being scaled in.
      • The system may not fully scale in the queue to the target size. If the current queue is in use or the service volume of the queue is large, the scale-in may fail or only partial specifications may be reduced.
      • By default, the minimum number of CUs is 16. That is, when the queue specifications are 16 CUs, you cannot scale in the queue.
      +
      • Scale-in is time-consuming. After you perform scale-in on the Elastic Scaling page of DLI, wait for about 10 minutes. The duration is related to the CU amount to reduce. After a period of time, refresh the Queue Management page and check whether values of Specifications and Actual CUs are the same to determine whether the scale-in is successful. Alternatively, on the Job Management page, check the status of the SCALE_QUEUE SQL job. If the job status is Scaling, the queue is being scaled in.
      • By default, the minimum number of CUs is 16. That is, when the queue specifications are 16 CUs, you cannot scale in the queue.

      The procedure is as follows:

      1. On the left of the DLI management console, click Resources > Queue Management.
      2. Select the queue to be scaled out, click More in the Operation column, and select Elastic Scaling.
      3. On the displayed page, select Scale-in for Operation and set the scale-in amount.
      4. Click .
      diff --git a/docs/dli/umn/dli_01_0489.html b/docs/dli/umn/dli_01_0489.html index 133f1c4a..9bae064a 100644 --- a/docs/dli/umn/dli_01_0489.html +++ b/docs/dli/umn/dli_01_0489.html @@ -2,8 +2,8 @@

      Testing Address Connectivity

      It can be used to test the connectivity between the DLI queue and the peer IP address specified by the user in common scenarios, or the connectivity between the DLI queue and the peer IP address bound to the datasource connection in datasource connection scenarios. The operation is as follows:

      -
      1. On the Queue Management page, locate the row containing the target queue, click More in the Operation column, and select Test Address Connectivity.
      2. On the Test Address Connectivity page, enter the address to be tested. The domain name and IP address are supported, and the port number can be specified.
      3. Click Test.

        If the test address is reachable, a message is displayed on the page, indicating that the address is reachable.

        -

        If the test address is unreachable, the system displays a message indicating that the address is unreachable. Check the network configurations and try again. Network configurations include the VPC peering and the datasource connection. Check whether they have been activated.

        +
        1. On the Queue Management page, locate the row containing the target queue, click More in the Operation column, and select Test Address Connectivity.
        2. On the Test Address Connectivity page, enter the address to be tested. The domain name and IP address are supported, and the port number can be specified.
        3. Click Test.

          If the test address is reachable, a message is displayed on the page, indicating that the address is reachable.

          +

          If the test address is unreachable, the system displays a message indicating that the address is unreachable. Check the network configurations and retry. Network configurations include the VPC peering and the datasource connection. Check whether they have been activated.

      diff --git a/docs/dli/umn/dli_01_0504.html b/docs/dli/umn/dli_01_0504.html index 6c85a9c6..2f441848 100644 --- a/docs/dli/umn/dli_01_0504.html +++ b/docs/dli/umn/dli_01_0504.html @@ -21,7 +21,7 @@
      @@ -30,7 +30,7 @@ - @@ -39,8 +39,13 @@
      Table 6 Flink 1.7.2 dependencies

      Dependency

      bcpkix-jdk15on-1.60.jar

      +

      bcpkix-jdk15on-1.60.jar

      esdk-obs-java-3.1.3.jar

      +

      esdk-obs-java-3.1.3.jar

      httpcore-4.4.4.jar

      +

      httpcore-4.4.4.jar

      bcprov-jdk15on-1.60.jar

      +

      bcprov-jdk15on-1.60.jar

      flink-cep_2.11-1.7.0.jar

      +

      flink-cep_2.11-1.7.0.jar

      httpcore-nio-4.4.4.jar

      +

      httpcore-nio-4.4.4.jar

      commons-codec-1.9.jar

      +

      commons-codec-1.9.jar

      flink-cep-scala_2.11-1.7.0.jar

      +

      flink-cep-scala_2.11-1.7.0.jar

      java-xmlbuilder-1.1.jar

      +

      java-xmlbuilder-1.1.jar

      commons-configuration-1.7.jar

      +

      commons-configuration-1.7.jar

      flink-dist_2.11-1.7.0.jar

      +

      flink-dist_2.11-1.7.0.jar

      jna-4.1.0.jar

      +

      jna-4.1.0.jar

      deeplearning4j-core-0.9.1.jar

      +

      deeplearning4j-core-0.9.1.jar

      flink-gelly_2.11-1.7.0.jar

      +

      flink-gelly_2.11-1.7.0.jar

      libtensorflow-1.12.0.jar

      +

      libtensorflow-1.12.0.jar

      deeplearning4j-nlp-0.9.1.jar

      +

      deeplearning4j-nlp-0.9.1.jar

      flink-gelly-scala_2.11-1.7.0.jar

      +

      flink-gelly-scala_2.11-1.7.0.jar

      log4j-over-slf4j-1.7.21.jar

      +

      log4j-over-slf4j-1.7.21.jar

      deeplearning4j-nn-0.9.1.jar

      +

      deeplearning4j-nn-0.9.1.jar

      flink-ml_2.11-1.7.0.jar

      +

      flink-ml_2.11-1.7.0.jar

      logback-classic-1.2.3.jar

      +

      logback-classic-1.2.3.jar

      ejml-cdense-0.33.jar

      +

      ejml-cdense-0.33.jar

      flink-python_2.11-1.7.0.jar

      +

      flink-python_2.11-1.7.0.jar

      logback-core-1.2.3.jar

      +

      logback-core-1.2.3.jar

      ejml-core-0.33.jar

      +

      ejml-core-0.33.jar

      flink-queryable-state-runtime_2.11-1.7.0.jar

      +

      flink-queryable-state-runtime_2.11-1.7.0.jar

      nd4j-api-0.9.1.jar

      +

      nd4j-api-0.9.1.jar

      ejml-ddense-0.33.jar

      +

      ejml-ddense-0.33.jar

      flink-shaded-curator-1.7.0.jar

      +

      flink-shaded-curator-1.7.0.jar

      nd4j-native-0.9.1.jar

      +

      nd4j-native-0.9.1.jar

      ejml-dsparse-0.33.jar

      +

      ejml-dsparse-0.33.jar

      flink-shaded-hadoop2-uber-1.7.0.jar

      +

      flink-shaded-hadoop2-uber-1.7.0.jar

      nd4j-native-api-0.9.1.jar

      +

      nd4j-native-api-0.9.1.jar

      ejml-experimental-0.33.jar

      +

      ejml-experimental-0.33.jar

      flink-table_2.11-1.7.0.jar

      +

      flink-table_2.11-1.7.0.jar

      nd4j-native-platform-0.9.1.jar

      +

      nd4j-native-platform-0.9.1.jar

      ejml-fdense-0.33.jar

      +

      ejml-fdense-0.33.jar

      guava-26.0-jre.jar

      +

      guava-26.0-jre.jar

      okhttp-3.14.8.jar

      +

      okhttp-3.14.8.jar

      ejml-simple-0.33.jar

      +

      ejml-simple-0.33.jar

      hadoop-3.1.1-41-20201014.085840-4.jar

      +

      hadoop-3.1.1-41-20201014.085840-4.jar

      okio-1.14.0.jar

      +

      okio-1.14.0.jar

      ejml-zdense-0.33.jar

      +

      ejml-zdense-0.33.jar

      httpasyncclient-4.1.2.jar

      +

      httpasyncclient-4.1.2.jar

      slf4j-api-1.7.21.jar

      +

      slf4j-api-1.7.21.jar

      elsa-3.0.0-M7.jar

      +

      elsa-3.0.0-M7.jar

      httpclient-4.5.12.jar

      +

      httpclient-4.5.12.jar

      tensorflow-1.12.0.jar

      +

      tensorflow-1.12.0.jar

      16–64 CUs

      • High reliability and availability are not supported.
      • Queue properties and job priorities cannot be set.
      • Notebook instances cannot be interconnected with.
      -

      For more constraints and limitations on elastic resource pools, see Constraints.

      +

      For more notes and constraints on elastic resource pools, see Notes and Constraints.

      This edition is suitable for testing scenarios with low resource consumption and low requirements for resource reliability and availability.

      64 CUs or higher

      For more constraints and limitations on elastic resource pools, see Constraints.

      +

      For more notes and constraints on elastic resource pools, see Notes and Constraints.

      This edition offers powerful computing capabilities, high availability, and flexible resource management. It is suitable for large-scale computing tasks and business scenarios with long-term resource planning needs.

      -

      Constraints

      • The region of an elastic resource pool cannot be changed.
      • Jobs of Flink 1.10 or later can run in elastic resource pools.
      • The network segment of an elastic resource pool cannot be changed after being set.
      • You can only view the scaling history of resource pools in the last 30 days.
      • Elastic resource pools cannot access the Internet.

        +

        Notes and Constraints

        • The region of an elastic resource pool cannot be changed.
        • Jobs of Flink 1.10 or later can run in elastic resource pools.
        • The network segment of an elastic resource pool cannot be changed after being set.
        • You can only view the scaling history of resource pools in the last 30 days.
        • Elastic resource pools cannot access the Internet.

          +
        • Changes to elastic resource pool CUs can occur when setting the CU, adding or deleting queues in an elastic resource pool, or modifying the scaling policies of queues in an elastic resource pool, or when the system automatically triggers elastic resource pool scaling. However, in some cases, the system cannot guarantee that the scaling will reach the target CUs as planned.
          • If there are not enough physical resources, an elastic resource pool may not be able to scale out to the desired target size.
          • The system does not guarantee that an elastic resource pool will be scaled in to the desired target size.

            The system checks the resource usage before scaling in the elastic resource pool to determine if there is enough space for scaling in. If the existing resources cannot be scaled in according to the minimum scaling step, the pool may not be scaled in successfully or only partially.

            +

            The scaling step may vary depending on the resource specifications, usually 16 CUs, 32 CUs, 48 CUs, 64 CUs, etc.

            +

            For example, if the elastic resource pool has a capacity of 192 CUs and the queues in the pool are using 68 CUs due to running jobs, the plan is to scale in to 64 CUs.

            +

            When executing a scaling in task, the system determines that there are 124 CUs remaining and scales in by the minimum step of 64 CUs. However, the remaining 60 CUs cannot be scaled in any further. Therefore, after the elastic resource pool executes the scaling in task, its capacity is reduced to 128 CUs.

            +

        Scenario

        Resources too fixed to meet a range of requirements.

        @@ -71,11 +76,11 @@

      Efficiency

      +

      Scale-out duration

      You need to set scaling tasks repeatedly to improve the resource utilization.

      +

      You will need to spend several minutes manually scaling out.

      Dynamic scaling can be done in seconds.

      +

      No manual intervention is required, as dynamic scale out can be done in seconds.

      Resource utilization

      diff --git a/docs/dli/umn/dli_01_0524.html b/docs/dli/umn/dli_01_0524.html index bc54155f..0b2db158 100644 --- a/docs/dli/umn/dli_01_0524.html +++ b/docs/dli/umn/dli_01_0524.html @@ -1,10 +1,7 @@

      Modifying Specifications

      -

      Scenario

      If CUs of a yearly/monthly elastic resource pool cannot meet your service requirements, you can modify the CUs. In this case, you will be charged based on the number of CUs exceeding that of the yearly/monthly elastic resource pool.

      -

      For example, you have purchased an elastic resource pool with 64 CUs, and you find that most time data processing needs 128 CUs. You can add 64 CUs to the elastic resource pool and be billed based on a CU/hour basis. To save more, you can scale up your elastic resource pool to 128 CUs and be billed on a yearly/monthly basis for the 128-CU package.

      -
      -

      Precautions

      Currently, only yearly/monthly elastic resource pools can be scaled.

      +

      Scenario

      If the current specifications of your elastic resource pool do not meet your service needs, you can modify them using the change specifications function.

      Scaling Up

      1. In the navigation pane on the left of the console, choose Resources > Resource Pool.
      2. Select the elastic resource pool you want and choose More > Modify Specifications in the Operation column.
      3. In the Modify Specifications dialog page, set Operation to Scale-out and specify the number of CUs you want to add.
      4. Confirm the changes and click OK.
      5. Choose Job Management > SQL Jobs to view the status of the SCALE_POOL SQL job.

        If the job status is Scaling, the elastic resource pool is scaling up. Wait until the job status changes to Finished.

      diff --git a/docs/dli/umn/dli_01_0531.html b/docs/dli/umn/dli_01_0531.html index d9a30ffd..e3129681 100644 --- a/docs/dli/umn/dli_01_0531.html +++ b/docs/dli/umn/dli_01_0531.html @@ -14,7 +14,7 @@

      Step 1: Prepare a Data Source

      In this example, Kafka is the data source.

      For more information about Flink job data, see Preparing Flink Job Data.

      -

      Enable DIS to import Kafka data to DLI. For details, see "Buying a Kafka Instance" in the Distributed Message Service Kafka User Guide.

      +

      Enable DIS to import Kafka data to DLI. For details, see "Buying a Kafka Instance" in Distributed Message Service Kafka User Guide.

      1. Create the dependent Kafka resources.
        Before creating a Kafka instance, ensure the availability of resources, including a virtual private cloud (VPC), subnet, security group, and security group rules.
        • For details about how to create a VPC and subnet, see "Creating a VPC and Subnet" in Virtual Private Cloud User Guide. For details about how to create and use a subnet in an existing VPC, see "Create a Subnet for the VPC" in Virtual Private Cloud User Guide.
          • The created VPC and the Kafka instance you will create must be in the same region.
          • Retain the default settings unless otherwise specified.
        • For details about how to create a security group, see "Creating a Security Group" in the Virtual Private Cloud User Guide. For details about how to add rules to a security group, see "Creating a Subnet for the VPC" in the Virtual Private Cloud User Guide.
        @@ -29,7 +29,7 @@

        Step 2: Prepare a Data Output Channel

        To use RDS as the data output channel, create an RDS MySQL instance. For details, see "Getting Started with RDS for MySQL" in Getting Started with Relational Database Service.

        1. Log in to the RDS management console.
        2. Select a region in the upper left corner.
        3. Click Buy DB Instance in the upper right corner of the page and set related parameters. Retain the default values for other parameters.
          • Region: Select the region where DLI is located.
          • DB Instance Name: Enter rds-dliflink.
          • DB Engine: Select MySQL.
          • DB Engine Version: Select 8.0.
          • DB Instance Type: Select Primary/Standby.
          • Storage Type: Cloud SSD may be selected by default.
          • Primary AZ: Select a custom AZ.
          • Standby AZ: Select a custom AZ.
          • Instance Class: Select a class as needed and choose 2 vCPUs | 8 GB.
          • Storage Space (GB): Set it to 40.
          • VPC: Select the VPC and subnet created in 1.
          • Database Port: Enter 3306.
          • Security Group: Select the security group created in 1.
          • Administrator Password: **** (Keep the password secure. The system cannot retrieve your password.)
          • Confirm Password: ****
          • Parameter Template: Choose Default-MySQL-8.0.
          -
        4. Click Next and confirm the specifications.
        5. Click Submit. The RDS DB instance is created.
        6. Log in to the MySQL database and create table orders in database flink.
          Log in to the MySQL instance, click the flink database. On the displayed page, click SQL Window. Enter the following table creation statement in the SQL editing pane to create a table.
          CREATE TABLE `flink`.`orders` (
          +
        7. Click Next and confirm the specifications.
        8. Click Submit. The RDS DB instance is created.
        9. Log in to the MySQL database and create table orders in database flink.
          Log in to the MySQL instance, click the flink database. On the displayed page, click SQL Window. Enter the following table creation statement in the SQL editing pane to create a table.
          CREATE TABLE `flink`.`orders` (
           	`order_id` VARCHAR(32) NOT NULL,
           	`order_channel` VARCHAR(32) NULL,
           	`order_time` VARCHAR(32) NULL,
          @@ -124,7 +124,7 @@ insert into jdbcSink select * from kafkaSource;
        10. Connect to the Kafka cluster and send the following test data to the Kafka topics:
          {"order_id":"202103241000000001", "order_channel":"webShop", "order_time":"2021-03-24 10:00:00", "pay_amount":"100.00", "real_pay":"100.00", "pay_time":"2021-03-24 10:02:03", "user_id":"0001", "user_name":"Alice", "area_id":"330106"} 
           
           {"order_id":"202103241606060001", "order_channel":"appShop", "order_time":"2021-03-24 16:06:06", "pay_amount":"200.00", "real_pay":"180.00", "pay_time":"2021-03-24 16:10:06", "user_id":"0001", "user_name":"Alice", "area_id":"330106"}
          -
        11. Run the following SQL statement in the MySQL database to view data in the table:
          select * from order;
          +
        12. Run the following SQL statement in the MySQL database to view data in the table:
          select * from orders;
          The following is an example of the execution result copied from the MySQL database:
          202103241000000001,webShop,2021-03-24 10:00:00,100.0,100.0,2021-03-24 10:02:03,0001,Alice,330106
           202103241606060001,appShop,2021-03-24 16:06:06,200.0,180.0,2021-03-24 16:10:06,0001,Alice,330106
          diff --git a/docs/dli/umn/dli_01_0538.html b/docs/dli/umn/dli_01_0538.html index 8d96cd1c..95c382a3 100644 --- a/docs/dli/umn/dli_01_0538.html +++ b/docs/dli/umn/dli_01_0538.html @@ -14,7 +14,7 @@
        13. - diff --git a/docs/dli/umn/dli_01_0552.html b/docs/dli/umn/dli_01_0552.html index e8470cee..27082aaa 100644 --- a/docs/dli/umn/dli_01_0552.html +++ b/docs/dli/umn/dli_01_0552.html @@ -53,7 +53,7 @@

      Tag key

      You can specify the tag key in either of the following ways:

      -
      • Click the text box for tag key and select a predefined tag key from the drop-down list.

        To add a predefined tag, you need to create one on TMS and then select it from the Tag key drop-down list. You can click View predefined tags to go to the Predefined Tags page of the TMS console. Then, click Create Tag in the upper corner of the page to create a predefined tag.

        +
        • Click the text box for tag key and select a predefined tag key from the drop-down list.

          To add a predefined tag, you need to create one on TMS and then select it from the Tag key drop-down list. You can click View predefined tags to go to the Predefined Tags page of the TMS console. Then, click Create Tag in the upper corner of the page to create a predefined tag.

        • Enter a tag key in the text box.
          NOTE:

          A tag key can contain a maximum of 128 characters. Only letters, digits, spaces, and special characters (_.:=+-@) are allowed, but the value cannot start or end with a space or start with _sys_.

          diff --git a/docs/dli/umn/dli_01_0561.html b/docs/dli/umn/dli_01_0561.html index bfbee214..2bd52901 100644 --- a/docs/dli/umn/dli_01_0561.html +++ b/docs/dli/umn/dli_01_0561.html @@ -5,7 +5,7 @@
          • DEW is a comprehensive cloud-based encryption service that addresses data security, key security, and complex key management issues. You are advised to use DEW to store authentication information for data sources.
          • Datasource authentication is used to manage authentication information for accessing specified data sources. After datasource authentication is configured, you do not need to repeatedly configure data source authentication information in jobs, improving data source authentication security while enabling DLI to securely access data sources.

          This section describes how to use datasource authentication provided by DLI.

          -

          Constraints

          • Only Spark SQL and Flink OpenSource SQL 1.12 jobs support datasource authentication.
          • DLI supports four types of datasource authentication. Select an authentication type specific to each data source.
            • CSS: applies to 6.5.4 or later CSS clusters with the security mode enabled.
            • Kerberos: applies to MRS security clusters with Kerberos authentication enabled.
            • Kafka_SSL: applies to Kafka with SSL enabled.
            • Password: applies to GaussDB(DWS), RDS, DDS, and DCS.
            +

            Notes and Constraints

            • Only Spark SQL and Flink OpenSource SQL 1.12 jobs support datasource authentication.
            • DLI supports four types of datasource authentication. Select an authentication type specific to each data source.
              • CSS: applies to 6.5.4 or later CSS clusters with the security mode enabled.
              • Kerberos: applies to MRS security clusters with Kerberos authentication enabled.
              • Kafka_SSL: applies to Kafka with SSL enabled.
              • Password: applies to GaussDB(DWS), RDS, DDS, and DCS.

            Datasource Authentication Types

            DLI supports four types of datasource authentication. Select an authentication type specific to each data source.

            @@ -18,7 +18,7 @@

      Data Source

      Constraints

      +

      Notes and Constraints

      Data Source

      Constraints

      +

      Notes and Constraints

      Table 1 Queue properties

      Property

      diff --git a/docs/dli/umn/dli_01_0624.html b/docs/dli/umn/dli_01_0624.html new file mode 100644 index 00000000..8dc87e3b --- /dev/null +++ b/docs/dli/umn/dli_01_0624.html @@ -0,0 +1,71 @@ + + +

      Establishing a Network Connection Between DLI and Resources in a Shared VPC

      +

      VPC Sharing Overview

      VPC sharing allows sharing VPC resources created in one account with other accounts using Resource Access Manager (RAM). For example, account A can share its VPC and subnets with account B. After accepting the share, account B can view the shared VPC and subnets and use them to create resources.

      +
      +

      DLI Use Cases

      An enterprise IT management account creates a VPC and subnets and shares them with other service accounts to facilitate centralized configuration of VPC security policies and orderly resource management.

      +

      Service accounts use the shared VPC and subnets to create resources and want to use DLI to submit jobs and access resources in the shared VPC. To do this, they need to establish a network connection between DLI and the resources in the shared VPC.

      +

      For example, account A is the enterprise IT management account and the owner of VPC resources. It creates the VPC and subnets and shares them with service account B.

      +

      Account B is a service account that uses the shared VPC and subnets to create resources and uses DLI to access them.

      +
      +

      Prerequisites

      Account A, as the resource owner, has created a VPC and subnets and designated account B as the principal.

      +
      +

      Establishing a Network Connection Between DLI and Resources in a Shared VPC

      1. Account A creates an enhanced datasource connection.

        1. Log in to the DLI management console using account A.
        2. In the navigation pane on the left, choose Datasource Connections.
        3. On the displayed Enhanced tab, click Create.

          Set parameters based on Table 1.

          + +
          + + + + + + + + + + + + + + + + + + + + + + +
          Table 1 Parameters for creating an enhanced datasource connection

          Parameter

          +

          Description

          +

          Connection Name

          +

          Name of the datasource connection to be created

          +

          Resource Pool

          +

          You do not need to set this parameter in this scenario.

          +

          VPC

          +

          VPC shared by account A to account B

          +

          Subnet

          +

          Subnet shared by account A to account B

          +

          Host Information

          +

          You do not need to set this parameter in this scenario.

          +

          Tags

          +

          Tags used to identify cloud resources. A tag includes the tag key and tag value.

          +
          +
          +
        4. Click OK.
        +

      2. Account A grants account B access to the enhanced datasource connection created in 1.

        1. In the enhanced datasource connection list, locate the row containing the newly created one, click More in the Operation column, and select Manage Permission from the drop-down list.
        2. In the displayed Permissions dialog box, select Grant Permission for Set Permission, enter the ID of the project account B belongs to in Project ID, and click OK.
        +

      3. Account B binds a DLI elastic resource pool to the shared enhanced datasource connection.

        1. Log in to the DLI management console using account B.
        2. In the navigation pane on the left, choose Datasource Connections.
        3. On the displayed Enhanced tab, locate the row containing the enhanced datasource connection shared by account A, click More in the Operation column, and select Bind Resource Pool from the drop-down list.
        4. In the displayed Bind Resource Pool dialog box, select the created elastic resource pool for Resource Pool and click OK.

          If there is no elastic resource pool available, create one by referring to Creating an Elastic Resource Pool.

          +
        +

      4. Account B tests the network connectivity between the elastic resource pool and resources in the VPC.

        If there are resources in the shared VPC, ensure that the security group the resources belong to has allowed access to the elastic resource pool's CIDR block.

        +
        +
        1. Obtain the private IP address and port number of the data source in the shared VPC.

          Take the RDS data source as an example. On the Instances page, click the target DB instance. On the displayed page, locate the Connection Information pane and view the private IP address. In the Connection Information pane, locate the Database Port to view the port number of the RDS DB instance.

          +
        2. In the navigation pane of the DLI management console, choose Resources > Queue Management.
        3. Locate the queue under the elastic resource pool bound with the enhanced datasource connection, click More in the Operation column, and select Test Address Connectivity.
        4. Enter the data source connection address and port number to test the network connectivity.

          If the address is reachable, it means that account B has established a network connection between the DLI resource and the resources in the shared VPC. Account B can then submit jobs to the elastic resource pool's queue and access the resources in the shared VPC.

          +
        +

      +
      +
      +
      + +
      + diff --git a/docs/dli/umn/dli_03_0002.html b/docs/dli/umn/dli_03_0002.html deleted file mode 100644 index e2523026..00000000 --- a/docs/dli/umn/dli_03_0002.html +++ /dev/null @@ -1,11 +0,0 @@ - - -

      What Is DLI?

      -

      Data Lake Insight (DLI) is a serverless data processing and analysis service fully compatible with Apache Spark and Apache Flink ecosystems. It frees you from managing any server. DLI supports standard SQL and is compatible with Spark and Flink SQL. It also supports multiple access modes, and is compatible with mainstream data formats. DLI supports SQL statements and Spark applications for heterogeneous data sources, including CloudTable, RDS, GaussDB(DWS), CSS, OBS, custom databases on ECSs, and offline databases.

      -
      -
      - -
      - diff --git a/docs/dli/umn/dli_03_0017.html b/docs/dli/umn/dli_03_0017.html index 75b87126..83dc65b1 100644 --- a/docs/dli/umn/dli_03_0017.html +++ b/docs/dli/umn/dli_03_0017.html @@ -1,23 +1,21 @@

      How Do I Set the AK/SK for a Queue to Operate an OBS Table?

      -

      Hard-coded or plaintext AK and SK pose significant security risks. To ensure security, encrypt your AK and SK, store them in configuration files or environment variables, and decrypt them when needed.

      -
      -
      • If the AK and SK are obtained, set the parameters as follows:
        • Create SparkContext using code
          val sc: SparkContext = new SparkContext()
          +

          Setting Up a Spark Jar Job to Obtain the AK/SK

          • To obtain the AK/SK, set the parameters as follows:
            • Create a SparkContext using code.
              val sc: SparkContext = new SparkContext()
               sc.hadoopConfiguration.set("fs.obs.access.key", ak)
               sc.hadoopConfiguration.set("fs.obs.secret.key", sk)
              -
            • Create SparkSession using code
              val sparkSession: SparkSession = SparkSession
              +
            • Create a SparkSession using code.
              val sparkSession: SparkSession = SparkSession
                     .builder()
                     .config("spark.hadoop.fs.obs.access.key", ak)
                     .config("spark.hadoop.fs.obs.secret.key", sk)
                     .enableHiveSupport()
                     .getOrCreate()
            -
          • If ak, sk, and securitytoken are obtained, the temporary AK/SK and security token must be used at the same time during authentication. The setting is as follows:
            • Create SparkContext using code
              val sc: SparkContext = new SparkContext()
              +
            • To obtain the AK/SK and security token and use them together for authentication, set the parameters as follows:
              • Create a SparkContext using code.
                val sc: SparkContext = new SparkContext()
                 sc.hadoopConfiguration.set("fs.obs.access.key", ak)
                 sc.hadoopConfiguration.set("fs.obs.secret.key", sk)
                 sc.hadoopConfiguration.set("fs.obs.session.token", sts)
                -
              • Create SparkSession using code
                val sparkSession: SparkSession = SparkSession
                +
              • Create a SparkSession using code.
                val sparkSession: SparkSession = SparkSession
                       .builder()
                       .config("spark.hadoop.fs.obs.access.key", ak)
                       .config("spark.hadoop.fs.obs.secret.key", sk)
                @@ -27,6 +25,7 @@ sc.hadoopConfiguration.set("fs.obs.session.token", sts)
          +
      -

      Structure of the data storage directory in OBS: obs://obs-sink/car_infos/day=xx/part-x-x.

      After the data is generated, the OBS partition table can be established for subsequent batch processing through the following SQL statements:

      @@ -48,11 +47,9 @@ stored as parquet location 'obs://obs-sink/car-infos';
      -
    • Restore partition information from the associated OBS path.
      1
      alter table car_infos recover partitions;
       
      -
    • diff --git a/docs/dli/umn/dli_03_0089.html b/docs/dli/umn/dli_03_0089.html index ab9cf67f..1165b642 100644 --- a/docs/dli/umn/dli_03_0089.html +++ b/docs/dli/umn/dli_03_0089.html @@ -30,7 +30,6 @@ sk = "{{mySk}}" );
      -

      Structure of the data storage directory in OBS: obs://obs-sink/car_infos/day=xx/part-x-x.

      After the data is generated, the OBS partition table can be established for subsequent batch processing through the following SQL statements:

      @@ -50,11 +49,9 @@ stored as parquet location 'obs://obs-sink/car-infos';
-
  • Restore partition information from the associated OBS path.
    1
    alter table car_infos recover partitions;
     
    -
  • diff --git a/docs/dli/umn/dli_03_0093.html b/docs/dli/umn/dli_03_0093.html index 4a8132e4..41bd0d9f 100644 --- a/docs/dli/umn/dli_03_0093.html +++ b/docs/dli/umn/dli_03_0093.html @@ -1,11 +1,38 @@ -

    How Do I Eliminate Data Skew by Configuring AE Parameters?

    -

    Scenario

    If the execution of an SQL statement takes a long time, you need to access the Spark UI to check the execution status.

    -

    If data skew occurs, the running time of a stage exceeds 20 minutes and only one task is running.

    -
    Figure 1 Data skew example
    +

    How Do I Do When Data Skew Occurs During the Execution of a SQL Job?

    +

    What Is Data Skew?

    Data skew is a common issue during the execution of SQL jobs. When data is unevenly distributed, some compute nodes process significantly more data than others, which can impact the efficiency of the entire computation process.

    +

    For example, if you notice that a SQL query is taking a long time to execute, you can check its status in SparkUI. See Figure 1. If you see a stage that has been running for over 20 minutes with only one task remaining, it is likely due to data skew.

    +
    Figure 1 Data skew example
    -

    Procedure

    1. Log in to the DLI management console. Choose Job Management > SQL Jobs in the navigation pane. On the displayed page, locate the job you want to modify and click Edit in the Operation column to switch to the SQL Editor page.
    2. On the SQL editor page, click Set Property and add the following Spark parameters through the Settings pane:

      +

      Common Data Skew Scenarios

      • Group By aggregation skew

        During the execution of Group By aggregation, if some grouping keys have significantly more data than others, the larger groups will consume more compute resources and time during the aggregation process, resulting in slower processing speeds and data skew.

        +
      • JOIN operation skew

        During table JOIN operations, if the keys involved in the JOIN are unevenly distributed in one of the tables, a large amount of data will be concentrated in a few tasks while others have already completed, causing data skew.

        +
      +
      +

      Solution for Group By Data Skew

      Select a subset of data and run select count(*) as sum,Key from tbl group by Key order by sum desc to identify which keys are causing data skew.

      +

      Then, for the skewed keys, you can handle them separately by adding a salt to split them into multiple tasks for individual statistics, and finally combine the results of the separate statistics.

      +

      For example, consider the following SQL query where Key01 is identified as the skewed key causing a single task to process a large amount of data. The following steps can be taken to handle it:

      +
      SELECT
      +  a.Key,
      +  SUM(a.sum) AS Cnt
      +FROM
      +  (
      +    SELECT
      +      Key,
      +      count(*) AS sum
      +    FROM
      +      tbl
      +    GROUP BY
      +      Key,
      +      CASE
      +        WHEN KEY = 'Key01' THEN floor(random () * 200)
      +        ELSE 0
      +      END
      +  ) a
      +GROUP BY
      +  a.Key;
      +
      +

      Solution for JOIN Data Skew

      1. Log in to the DLI management console. Choose Job Management > SQL Jobs in the navigation pane. On the displayed page, locate the job you want to modify and click Edit in the Operation column to switch to the SQL Editor page.
      2. On the SQL editor page, click Set Property and add the following Spark parameters through the Settings pane:

        The string followed by the colons (:) are the configuration parameters, and the strings following the colons are the values.

        spark.sql.enableToString:false
         spark.sql.adaptive.join.enabled:true
        diff --git a/docs/dli/umn/dli_03_0100.html b/docs/dli/umn/dli_03_0100.html
        index 55859cd2..84f70503 100644
        --- a/docs/dli/umn/dli_03_0100.html
        +++ b/docs/dli/umn/dli_03_0100.html
        @@ -2,7 +2,7 @@
         
         

        How Do I Manage Fine-Grained DLI Permissions?

        DLI has a comprehensive permission control mechanism and supports fine-grained authentication through Identity and Access Management (IAM). You can create policies in IAM to manage DLI permissions.

        -

        IAM allows you to create IAM users for your employees using your cloud account, and assign permissions to control their access to specific types of resources. For example, some software developers in your enterprise need to use DLI resources but must not delete them or perform any high-risk operations. To achieve this result, you can create IAM users for the software developers and grant them only the permissions required for using DLI resources.

        +

        With IAM, you can use your account to create IAM users for your employees, and assign permissions to the users to control their access to specific resource types. For example, some software developers in your enterprise need to use DLI resources but must not delete them or perform any high-risk operations. To achieve this result, you can create IAM users for the software developers and grant them only the permissions required for using DLI resources.

        For a new user, you need to log in for the system to record the metadata before using DLI.

        IAM is free to use, and you only need to pay for the resources in your account.

        diff --git a/docs/dli/umn/dli_03_0119.html b/docs/dli/umn/dli_03_0119.html index 5e0d016c..dcbf069f 100644 --- a/docs/dli/umn/dli_03_0119.html +++ b/docs/dli/umn/dli_03_0119.html @@ -1,6 +1,6 @@ -

        Why Does the Submission Fail Due to Flink JAR File Conflict?

        +

        Why Does a Flink Jar Package Conflict Result in Submission Failure?

        Symptom

        The dependency of your Flink job conflicts with a built-in dependency of the DLI Flink platform. As a result, the job submission fails.

        Solution

        Delete your JAR file that is the same as an existing one of the DLI Flink platform.

        diff --git a/docs/dli/umn/dli_03_0126.html b/docs/dli/umn/dli_03_0126.html index 67ee6d71..45e91663 100644 --- a/docs/dli/umn/dli_03_0126.html +++ b/docs/dli/umn/dli_03_0126.html @@ -2,7 +2,7 @@

        How Do I Manage Tens of Thousands of Jobs Running on DLI?

        If you are suggested to perform following operations to run a large number of DLI jobs:

        -
        • Group the DLI jobs by type, and run each group on a queue.
        • Alternatively, create IAM users to execute different types of jobs. .
        +
        • Group the DLI jobs by type, and run each group on a queue.
        • Alternatively, create IAM users to execute different types of jobs. .

        Possible Causes

        When you create a migration job to DLI on the CDM console, you set Resource Queue to a DLI queue for general purpose. It should be a queue for SQL.

        Solution

        1. On the DLI management console and click Queue Management in the navigation pane on the left. On the Queue Management page, check whether there are SQL queues.
          • If there are, go to 3.
          • If there are no SQL queues, go to 2 to buy an SQL queue.
          -
        2. Click Buy Queue to create a queue. Set Type to For SQL, set other parameters required, and click Buy.
        3. Go back to the CDM console and create a data migration job. Set Resource Queue to the created DLI SQL queue.
        4. Submit the migration job and view the job execution logs.
        +
      3. Choose Resources > Resource Pool. On the displayed page, locate the purchased elastic resource pool, and click Add Queue in the Operation column. Set Type to For SQL, set other parameters, and submit the creation request.
      4. Go back to the CDM console and create a data migration job. Set Resource Queue to the created DLI SQL queue.
      5. Submit the migration job and view the job execution logs.
    diff --git a/docs/dli/umn/dli_03_0179.html b/docs/dli/umn/dli_03_0179.html index c5904560..e486c053 100644 --- a/docs/dli/umn/dli_03_0179.html +++ b/docs/dli/umn/dli_03_0179.html @@ -6,7 +6,7 @@

    Check Whether a Port Number Is Added to the End of the Domain Name or IP Address

    The port number is required for the connectivity test.

    The following example tests the connectivity between a queue and a specified RDS DB instance. The RDS DB instance uses port 3306.

    @@ -21,8 +21,8 @@

    Solution: Modify the CIDR block of the queue or create another queue.

    Planing the CIDR blocks for your queues helps you to avoid this problem.

    -

    Check Whether the VPC Administrator Permission Is Granted to DLI

    View the connection logs to check whether there is the required permission.

    -

    Solution: Grant DLI the VPC Administrator permission and cancel the IAM ReadOnlyAccess authorization.

    +

    Check Whether the DLI Datasource Connections Agency Access Permission Is Granted to DLI

    You can determine if a connection failure is due to insufficient permissions by checking the connection logs.

    +

    Solution: Add the DLI Datasource Connections Agency Access authorization on the Global Configuration > Service Authorization page.

    Check Whether the Destination Security Group Allows Access from the CIDR Block of the Queue

    To connect to Kafka, GaussDB(DWS), and RDS instances, add security group rules for the DLI CIDR block to the security group where the instances belong. For example, to connect a queue to RDS, perform the following operations:
    1. Log in to the DLI console, choose Resources > Queue Management in the navigation pane on the left. On the displayed page, select the target queue, and click to expand the row containing the target queue to view its CIDR block.
    2. On the Instance Management page of the RDS console, click the instance name. In the Connection Information area, locate Database Port to obtain the port number of the RDS DB instance.
    3. In the Connection Information area locate the Security Group and click the group name to switch to the security group management page. Select the Inbound Rules tab and click Add Rule. Set the priority to 1, protocol to TCP, port to the database port number, and source to the CIDR block of the DLI queue. Click OK.
    diff --git a/docs/dli/umn/dli_03_0184.html b/docs/dli/umn/dli_03_0184.html index 3e6e94ce..75afb009 100644 --- a/docs/dli/umn/dli_03_0184.html +++ b/docs/dli/umn/dli_03_0184.html @@ -6,7 +6,7 @@

    Possible Causes

    If a table exists but cannot be queried, there is a high probability that the current user does not have the permission to query or operate the table.

    Solution

    Contact the user who creates the table and obtain the required permissions. To assign permissions, perform the following steps:

    -
    1. Log in to the DLI management console as the user who creates the table. Choose Data Management > Databases and Tables form the navigation pane on the left.
    2. Click the database name. The table management page is displayed. In the Operation column of the target table, click Permissions. The table permission management page is displayed.
    3. Click Set Permission. In the displayed dialog box, set Authorization Object to User, set Username to the name of the user that requires the permission, and select the required permissions. For example, Select Table and Insert permissions.
    4. Click OK.
    5. Log in to the DLI console as the user that has been granted permission and check whether the table can be queried.
    +
    1. Log in to the DLI management console as the user who creates the table. Choose Data Management > Databases and Tables form the navigation pane on the left.
    2. Click the database name. The table management page is displayed. In the Operation column of the target table, click Permissions. The table permission management page is displayed.
    3. Click Set Permission. In the displayed dialog box, set Authorization Object to User, set Username to the name of the user that requires the permission, and select the required permissions. For example, Select Table and Insert permissions.
    4. Click OK.
    5. Log in to the DLI console as the user that has been granted permission and check whether the table can be queried.
    diff --git a/docs/dli/umn/dli_03_0196.html b/docs/dli/umn/dli_03_0196.html index 9471e4f2..550d30cf 100644 --- a/docs/dli/umn/dli_03_0196.html +++ b/docs/dli/umn/dli_03_0196.html @@ -28,7 +28,7 @@
    Shuffle data skew is caused by unbalanced number of key values in join.
    1. Perform group by and count on a join to collect statistics on the number of key values of each join. The following is an example:

      Join table lefttbl and table righttbl. num in the lefttbl table is the key value of the join. You can perform group by and count on lefttbl.num.

      SELECT * FROM lefttbl a LEFT join righttbl b on a.num = b.int2;
       SELECT count(1) as count,num from lefttbl  group by lefttbl.num ORDER BY count desc;
      -
    2. Use concat(cast(round(rand() * 999999999) as string) to generate a random number for each key value.
    3. If the skew is serious and random numbers cannot be generated, see How Do I Eliminate Data Skew by Configuring AE Parameters?
    +
  • Use concat(cast(round(rand() * 999999999) as string) to generate a random number for each key value.
  • If the skew is serious and random numbers cannot be generated, see How Do I Do When Data Skew Occurs During the Execution of a SQL Job?
  • diff --git a/docs/dli/umn/dli_03_0211.html b/docs/dli/umn/dli_03_0211.html index 811df367..7a531cff 100644 --- a/docs/dli/umn/dli_03_0211.html +++ b/docs/dli/umn/dli_03_0211.html @@ -11,7 +11,7 @@ - diff --git a/docs/dli/umn/dli_03_0221.html b/docs/dli/umn/dli_03_0221.html index 46cfdd36..6cdbe06e 100644 --- a/docs/dli/umn/dli_03_0221.html +++ b/docs/dli/umn/dli_03_0221.html @@ -5,8 +5,6 @@

    Solution

    The user does not have the permission to query the table.

    -

    In the navigation pane on the left of the DLI console page, choose Data Management > Databases and Tables, search for the desired database table, view the permission configuration, and grant the table query permission to the user who requires it.

    +

    In the navigation pane on the left of the DLI console page, choose Data Management > Databases and Tables, search for the desired database table, view the permission configuration, and grant the table query permission to the user who requires it.

    diff --git a/docs/dli/umn/dli_07_0005.html b/docs/dli/umn/dli_07_0005.html index a96dfde7..51e1ba77 100644 --- a/docs/dli/umn/dli_07_0005.html +++ b/docs/dli/umn/dli_07_0005.html @@ -1,20 +1,25 @@ -

    Constraints and Limitations

    +

    Notes and Constraints

    On Jobs

    • Only the latest 100 jobs are displayed on DLI's SparkUI.
    • A maximum of 1,000 job results can be displayed on the console. To view more or all jobs, export the job data to OBS.
    • To export job run logs, you must have the permission to access OBS buckets. You need to configure a DLI job bucket on the Global Configuration > Project page in advance.
    • The View Log button is not available for synchronization jobs and jobs running on the default queue.
    • Only Spark jobs support custom images.
    • An elastic resource pool supports a maximum of 32,000 CUs.
    -

    For details about job constraints, see Job Management.

    +

    For more notes and constraints on jobs, see Job Management.

    On Queues

    • A queue named default is preset in DLI for you to experience. Resources are allocated on demand.
    • Queue types:
      • For SQL: Spark SQL jobs can be submitted to SQL queues.
      • For general purpose: The queue is used to run Spark programs, Flink SQL jobs, and Flink Jar jobs.

      The queue type cannot be changed. If you want to use another queue type, purchase a new queue.

    • The region of a queue cannot be changed.
    • Queues with 16 CUs do not support scale-out or scale-in.
    • Queues with 64 CUs do not support scale-in.
    • A newly created queue can be scaled in or out only after a job is executed on the queue.
    • DLI queues cannot access the Internet.

    -

    For more constraints on using a DLI queue, see Queue Overview.

    +

    For more notes and constraints on using a DLI queue, see Queue Overview.

    On Elastic Resource Pools

    • The region of an elastic resource pool cannot be changed.
    • Jobs of Flink 1.10 or later can run in elastic resource pools.
    • The network segment of an elastic resource pool cannot be changed after being set.
    • You can only view the scaling history of resource pools in the last 30 days.
    • Elastic resource pools cannot access the Internet.

      +
    • Changes to elastic resource pool CUs can occur when setting the CU, adding or deleting queues in an elastic resource pool, or modifying the scaling policies of queues in an elastic resource pool, or when the system automatically triggers elastic resource pool scaling. However, in some cases, the system cannot guarantee that the scaling will reach the target CUs as planned.
      • If there are not enough physical resources, an elastic resource pool may not be able to scale out to the desired target size.
      • The system does not guarantee that an elastic resource pool will be scaled in to the desired target size.

        The system checks the resource usage before scaling in the elastic resource pool to determine if there is enough space for scaling in. If the existing resources cannot be scaled in according to the minimum scaling step, the pool may not be scaled in successfully or only partially.

        +

        The scaling step may vary depending on the resource specifications, usually 16 CUs, 32 CUs, 48 CUs, 64 CUs, etc.

        +

        For example, if the elastic resource pool has a capacity of 192 CUs and the queues in the pool are using 68 CUs due to running jobs, the plan is to scale in to 64 CUs.

        +

        When executing a scaling in task, the system determines that there are 124 CUs remaining and scales in by the minimum step of 64 CUs. However, the remaining 60 CUs cannot be scaled in any further. Therefore, after the elastic resource pool executes the scaling in task, its capacity is reduced to 128 CUs.

      -

      For more constraints on elastic resource pools, see Elastic Resource Pool Overview.

      +
    +

    For more notes and constraints on elastic resource pools, see Elastic Resource Pool Overview.

    On Resources

    • Database
      • default is the database built in DLI. You cannot create a database named default.
      • DLI supports a maximum of 50 databases.
    • Table
      • DLI supports a maximum of 5,000 tables.
      • DLI supports the following table types:
        • MANAGED: Data is stored in a DLI table.
        • EXTERNAL: Data is stored in an OBS table.
        • View: A view can only be created using SQL statements.
        • Datasource table: The table type is also EXTERNAL.
        @@ -24,24 +29,24 @@
      • Package
        • A package can be deleted, but a package group cannot be deleted.
        • The following types of packages can be uploaded:
          • JAR: JAR file
          • PyFile: User Python file
          • File: User file
          • ModelFile: User AI model file
      -

      For details about constraints on resources, see Data Management.

      +

      For more notes and constraints on resources, see Data Management.

    -

    On Enhanced Datasource Connections

    • Datasource connections cannot be created for the default queue.
    • Flink jobs can directly access DIS, OBS, and SMN data sources without using datasource connections.
    • VPC Administrator permissions are required for enhanced connections to use VPCs, subnets, routes, VPC peering connections.
    • If you use an enhanced datasource connection, the CIDR block of the elastic resource pool or queue cannot overlap with that of the data source.
    • Only queues bound with datasource connections can access datasource tables.
    • Datasource tables do not support the preview function.
    • When checking the connectivity of datasource connections, the constraints on IP addresses are as follows:
      • The IP address must be valid, which consists of four decimal numbers separated by periods (.). The value ranges from 0 to 255.
      • During the test, you can add a port after the IP address and separate them with colons (:). The port can contain a maximum of five digits. The value ranges from 0 to 65535.

        For example, 192.168.xx.xx or 192.168.xx.xx:8181.

        +

        On Enhanced Datasource Connections

        • Datasource connections cannot be created for the default queue.
        • Flink jobs can directly access DIS, OBS, and SMN data sources without using datasource connections.
        • VPC Administrator permissions are required for enhanced connections to use VPCs, subnets, routes, VPC peering connections.
        • If you use an enhanced datasource connection, the CIDR block of the elastic resource pool or queue cannot overlap with that of the data source.
        • Only queues bound with datasource connections can access datasource tables.
        • Datasource tables do not support the preview function.
        • When checking the connectivity of datasource connections, the notes and constraints on IP addresses are:
          • The IP address must be valid, which consists of four decimal numbers separated by periods (.). The value ranges from 0 to 255.
          • During the test, you can add a port after the IP address and separate them with colons (:). The port can contain a maximum of five digits. The value ranges from 0 to 65535.

            For example, 192.168.xx.xx or 192.168.xx.xx:8181.

          -
        • When checking the connectivity of datasource connections, the constraints on domain names are as follows:
          • The domain name can contain 1 to 255 characters. Only letters, digits, underscores (_), and hyphens (-) are allowed.
          • The top-level domain name must contain at least two letters, for example, .com, .net, and .cn.
          • During the test, you can add a port after the domain name and separate them with colons (:). The port can contain a maximum of five digits. The value ranges from 0 to 65535.

            For example, example.com:8080.

            +
          • When checking the connectivity of datasource connections, the notes and constraints on domain names are:
            • The domain name can contain 1 to 255 characters. Only letters, digits, underscores (_), and hyphens (-) are allowed.
            • The top-level domain name must contain at least two letters, for example, .com, .net, and .cn.
            • During the test, you can add a port after the domain name and separate them with colons (:). The port can contain a maximum of five digits. The value ranges from 0 to 65535.

              For example, example.com:8080.

          -

          For more constraints on enhanced datasource connections, see Enhanced Datasource Connection Overview.

          +

          For more notes and constraints on enhanced datasource connections, see Enhanced Datasource Connection Overview.

        On Datasource Authentication

        • Only Spark SQL and Flink OpenSource SQL 1.12 jobs support datasource authentication.
        • DLI supports four types of datasource authentication. Select an authentication type specific to each data source.
          • CSS: applies to 6.5.4 or later CSS clusters with the security mode enabled.
          • Kerberos: applies to MRS security clusters with Kerberos authentication enabled.
          • Kafka_SSL: applies to Kafka with SSL enabled.
          • Password: applies to GaussDB(DWS), RDS, DDS, and DCS.
        -

        For more constraints on datasource authentication, see Datasource Authentication Introduction.

        +

        For more notes and constraints on datasource authentication, see Datasource Authentication Introduction.

        On SQL Syntax

        • Constraints on the SQL syntax:
          • You are not allowed to specify a storage path when creating a DLI table using SQL statements.
        • Constraints on the size of SQL statements:
          • Each SQL statement should contain less than 500,000 characters.
          • The size of each SQL statement must be less than 1 MB.
        -

        Other

        • For details about quota constraints, see Quotas.
        • Recommended browsers for logging in to DLI:
          • Google Chrome 43.0 or later
          • Mozilla Firefox 38.0 or later
          • Internet Explorer 9.0 or later
          +

          Other

          • For quota notes and constraints, see Quotas.
          • Recommended browsers for logging in to DLI:
            • Google Chrome 43.0 or later
            • Mozilla Firefox 38.0 or later
            • Internet Explorer 9.0 or later