diff --git a/docs/dli/umn/ALL_META.TXT.json b/docs/dli/umn/ALL_META.TXT.json index 8f4d0d36..e09a1d5b 100644 --- a/docs/dli/umn/ALL_META.TXT.json +++ b/docs/dli/umn/ALL_META.TXT.json @@ -87,7 +87,7 @@ "code":"5", "des":"Only the latest 100 jobs are displayed on DLI's SparkUI.A maximum of 1,000 job results can be displayed on the console. To view more or all jobs, export the job data to O", "doc_type":"usermanual", - "kw":"Constraints and Limitations,Service Overview,User Guide", + "kw":"Notes and Constraints,Service Overview,User Guide", "search_title":"", "metedata":[ { @@ -97,7 +97,7 @@ "IsBot":"Yes" } ], - "title":"Constraints and Limitations", + "title":"Notes and Constraints", "githuburl":"" }, { @@ -172,10 +172,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Getting Started", @@ -273,9 +270,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -314,10 +310,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Job Management", @@ -373,10 +366,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Flink Job Management", @@ -414,9 +404,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -435,9 +424,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -596,10 +584,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Spark Job Management", @@ -677,10 +662,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Queue Management", @@ -718,9 +700,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -759,9 +740,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -798,9 +778,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -814,7 +793,7 @@ "code":"41", "des":"Elastic scaling can be performed for a newly created queue only when there were jobs running in this queue.Queues with 16 CUs do not support scale-out or scale-in.Queues ", "doc_type":"usermanual", - "kw":"Elastic Queue Scaling,Queue Management,User Guide", + "kw":"Elastic Scaling of Queues,Queue Management,User Guide", "search_title":"", "metedata":[ { @@ -824,7 +803,7 @@ "IsBot":"Yes" } ], - "title":"Elastic Queue Scaling", + "title":"Elastic Scaling of Queues", "githuburl":"" }, { @@ -935,10 +914,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Elastic Resource Pool", @@ -956,10 +932,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Before You Start", @@ -1037,10 +1010,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Regular Operations", @@ -1171,7 +1141,7 @@ "node_id":"dli_01_0524.xml", "product_code":"dli", "code":"59", - "des":"If CUs of a yearly/monthly elastic resource pool cannot meet your service requirements, you can modify the CUs. In this case, you will be charged based on the number of C", + "des":"If the current specifications of your elastic resource pool do not meet your service needs, you can modify them using the change specifications function.In the navigation", "doc_type":"usermanual", "kw":"Modifying Specifications,Regular Operations,User Guide", "search_title":"", @@ -1256,10 +1226,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Data Management", @@ -1277,10 +1244,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Databases and Tables", @@ -1518,10 +1482,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Package Management", @@ -1599,9 +1560,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -1620,9 +1580,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -1661,10 +1620,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Job Templates", @@ -1682,9 +1638,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -1723,9 +1678,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -1744,10 +1698,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Appendix", @@ -1785,9 +1736,9 @@ "metedata":[ { "prodname":"dli", + "IsMulti":"Yes", "opensource":"true", "documenttype":"usermanual", - "IsMulti":"No", "IsBot":"Yes" } ], @@ -1858,34 +1809,31 @@ "githuburl":"" }, { - "uri":"dli_01_0553.html", - "node_id":"dli_01_0553.xml", + "uri":"dli_01_0624.html", + "node_id":"dli_01_0624.xml", "product_code":"dli", "code":"93", - "des":"Delete an enhanced datasource connection that is no longer used on the console.Log in to the DLI management console.In the left navigation pane, choose Datasource Connect", + "des":"VPC sharing allows sharing VPC resources created in one account with other accounts using Resource Access Manager (RAM). For example, account A can share its VPC and subn", "doc_type":"usermanual", - "kw":"Deleting an Enhanced Datasource Connection,Enhanced Datasource Connections,User Guide", + "kw":"Establishing a Network Connection Between DLI and Resources in a Shared VPC,Enhanced Datasource Conn", "search_title":"", "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], - "title":"Deleting an Enhanced Datasource Connection", + "title":"Establishing a Network Connection Between DLI and Resources in a Shared VPC", "githuburl":"" }, { - "uri":"dli_01_0013.html", - "node_id":"dli_01_0013.xml", + "uri":"dli_01_0553.html", + "node_id":"dli_01_0553.xml", "product_code":"dli", "code":"94", - "des":"Host information is the mapping between host IP addresses and domain names. After you configure host information, jobs can only use the configured domain names to access ", + "des":"Delete an enhanced datasource connection that is no longer used on the console.Log in to the DLI management console.In the left navigation pane, choose Datasource Connect", "doc_type":"usermanual", - "kw":"Modifying Host Information,Enhanced Datasource Connections,User Guide", + "kw":"Deleting an Enhanced Datasource Connection,Enhanced Datasource Connections,User Guide", "search_title":"", "metedata":[ { @@ -1896,14 +1844,35 @@ "IsBot":"Yes" } ], - "title":"Modifying Host Information", + "title":"Deleting an Enhanced Datasource Connection", + "githuburl":"" + }, + { + "uri":"dli_01_0013.html", + "node_id":"dli_01_0013.xml", + "product_code":"dli", + "code":"95", + "des":"Host information is the mapping between host IP addresses and domain names. After you configure host information, jobs can only use the configured domain names to access ", + "doc_type":"usermanual", + "kw":"Modifying Host Information in an Elastic Resource Pool,Enhanced Datasource Connections,User Guide", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "IsMulti":"Yes", + "opensource":"true", + "documenttype":"usermanual", + "IsBot":"Yes" + } + ], + "title":"Modifying Host Information in an Elastic Resource Pool", "githuburl":"" }, { "uri":"dli_01_0514.html", "node_id":"dli_01_0514.xml", "product_code":"dli", - "code":"95", + "code":"96", "des":"The CIDR block of the DLI queue that is bound with a datasource connection cannot overlap with that of the data source.The default queue cannot be bound with a connection", "doc_type":"usermanual", "kw":"Binding and Unbinding a Queue,Enhanced Datasource Connections,User Guide", @@ -1924,7 +1893,7 @@ "uri":"dli_01_0014.html", "node_id":"dli_01_0014.xml", "product_code":"dli", - "code":"96", + "code":"97", "des":"A route is configured with the destination, next hop type, and next hop to determine where the network traffic is directed. Routes are classified into system routes and c", "doc_type":"usermanual", "kw":"Adding a Route,Enhanced Datasource Connections,User Guide", @@ -1945,7 +1914,7 @@ "uri":"dli_01_0556.html", "node_id":"dli_01_0556.xml", "product_code":"dli", - "code":"97", + "code":"98", "des":"Delete a route that is no longer used.A custom route table cannot be deleted if it is associated with a subnet.Log in to the DLI management console.In the left navigation", "doc_type":"usermanual", "kw":"Deleting a Route,Enhanced Datasource Connections,User Guide", @@ -1953,9 +1922,9 @@ "metedata":[ { "prodname":"dli", + "IsMulti":"Yes", "opensource":"true", "documenttype":"usermanual", - "IsMulti":"No", "IsBot":"Yes" } ], @@ -1966,7 +1935,7 @@ "uri":"dli_01_0018.html", "node_id":"dli_01_0018.xml", "product_code":"dli", - "code":"98", + "code":"99", "des":"Enhanced connections support user authorization by project. After authorization, users in the project have the permission to perform operations on the enhanced connection", "doc_type":"usermanual", "kw":"Enhanced Connection Permission Management,Enhanced Datasource Connections,User Guide", @@ -1987,7 +1956,7 @@ "uri":"dli_01_0019.html", "node_id":"dli_01_0019.xml", "product_code":"dli", - "code":"99", + "code":"100", "des":"A tag is a key-value pair customized by users and used to identify cloud resources. It helps users to classify and search for cloud resources. A tag consists of a tag key", "doc_type":"usermanual", "kw":"Enhanced Datasource Connection Tag Management,Enhanced Datasource Connections,User Guide", @@ -2008,7 +1977,7 @@ "uri":"dli_01_0422.html", "node_id":"dli_01_0422.xml", "product_code":"dli", - "code":"100", + "code":"101", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Datasource Authentication", @@ -2016,9 +1985,9 @@ "metedata":[ { "prodname":"dli", + "IsMulti":"Yes", "opensource":"true", "documenttype":"usermanual", - "IsMulti":"No", "IsBot":"Yes" } ], @@ -2029,7 +1998,7 @@ "uri":"dli_01_0561.html", "node_id":"dli_01_0561.xml", "product_code":"dli", - "code":"101", + "code":"102", "des":"When analyzing across multiple sources, it is not recommended to configure authentication information directly in a job as it can lead to password leakage. Instead, you a", "doc_type":"usermanual", "kw":"Overview,Datasource Authentication,User Guide", @@ -2050,7 +2019,7 @@ "uri":"dli_01_0427.html", "node_id":"dli_01_0427.xml", "product_code":"dli", - "code":"102", + "code":"103", "des":"Create a CSS datasource authentication on the DLI console to store the authentication information of the CSS security cluster to DLI. This will allow you to access to the", "doc_type":"usermanual", "kw":"Creating a CSS Datasource Authentication,Datasource Authentication,User Guide", @@ -2071,7 +2040,7 @@ "uri":"dli_01_0558.html", "node_id":"dli_01_0558.xml", "product_code":"dli", - "code":"103", + "code":"104", "des":"Create a Kerberos datasource authentication on the DLI console to store the authentication information of the data source to DLI. This will allow you to access to the dat", "doc_type":"usermanual", "kw":"Creating a Kerberos Datasource Authentication,Datasource Authentication,User Guide", @@ -2092,7 +2061,7 @@ "uri":"dli_01_0560.html", "node_id":"dli_01_0560.xml", "product_code":"dli", - "code":"104", + "code":"105", "des":"Create a Kafka_SSL datasource authentication on the DLI console to store the Kafka authentication information to DLI. This will allow you to access to Kafka instances wit", "doc_type":"usermanual", "kw":"Creating a Kafka_SSL Datasource Authentication,Datasource Authentication,User Guide", @@ -2113,7 +2082,7 @@ "uri":"dli_01_0559.html", "node_id":"dli_01_0559.xml", "product_code":"dli", - "code":"105", + "code":"106", "des":"Create a password datasource authentication on the DLI console to store passwords of the GaussDB(DWS), RDS, DCS, and DDS data sources to DLI. This will allow you to acces", "doc_type":"usermanual", "kw":"Creating a Password Datasource Authentication,Datasource Authentication,User Guide", @@ -2134,7 +2103,7 @@ "uri":"dli_01_0480.html", "node_id":"dli_01_0480.xml", "product_code":"dli", - "code":"106", + "code":"107", "des":"Grant permissions on a datasource authentication to users so multiple user jobs can use the datasource authentication without affecting each other.The administrator and t", "doc_type":"usermanual", "kw":"Datasource Authentication Permission Management,Datasource Authentication,User Guide", @@ -2142,9 +2111,9 @@ "metedata":[ { "prodname":"dli", + "IsMulti":"Yes", "opensource":"true", "documenttype":"usermanual", - "IsMulti":"No", "IsBot":"Yes" } ], @@ -2155,7 +2124,7 @@ "uri":"dli_01_0485.html", "node_id":"dli_01_0485.xml", "product_code":"dli", - "code":"107", + "code":"108", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Global Configuration", @@ -2163,10 +2132,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Global Configuration", @@ -2176,7 +2142,7 @@ "uri":"dli_01_0476.html", "node_id":"dli_01_0476.xml", "product_code":"dli", - "code":"108", + "code":"109", "des":"DLI allows you to set variables that are frequently used during job development as global variables on the DLI management console. This avoids repeated definitions during", "doc_type":"usermanual", "kw":"Global Variables,Global Configuration,User Guide", @@ -2196,7 +2162,7 @@ "uri":"dli_01_0533.html", "node_id":"dli_01_0533.xml", "product_code":"dli", - "code":"109", + "code":"110", "des":"You can grant permissions on a global variable to users.The administrator and the global variable owner have all permissions. You do not need to set permissions for them,", "doc_type":"usermanual", "kw":"Permission Management for Global Variables,Global Configuration,User Guide", @@ -2216,7 +2182,7 @@ "uri":"dli_01_0408.html", "node_id":"dli_01_0408.xml", "product_code":"dli", - "code":"110", + "code":"111", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Permissions Management", @@ -2234,7 +2200,7 @@ "uri":"dli_01_0440.html", "node_id":"dli_01_0440.xml", "product_code":"dli", - "code":"111", + "code":"112", "des":"DLI has a comprehensive permission control mechanism and supports fine-grained authentication through Identity and Access Management (IAM). You can create policies in IAM", "doc_type":"usermanual", "kw":"Overview,Permissions Management,User Guide", @@ -2254,7 +2220,7 @@ "uri":"dli_01_0418.html", "node_id":"dli_01_0418.xml", "product_code":"dli", - "code":"112", + "code":"113", "des":"You can use Identity and Access Management (IAM) to implement fine-grained permissions control on DLI resources. For details, see Overview.If your cloud account does not ", "doc_type":"usermanual", "kw":"Creating an IAM User and Granting Permissions,Permissions Management,User Guide", @@ -2274,7 +2240,7 @@ "uri":"dli_01_0451.html", "node_id":"dli_01_0451.xml", "product_code":"dli", - "code":"113", + "code":"114", "des":"Custom policies can be created as a supplement to the system policies of DLI. You can add actions to custom policies. For the actions supported for custom policies, see \"", "doc_type":"usermanual", "kw":"Creating a Custom Policy,Permissions Management,User Guide", @@ -2294,7 +2260,7 @@ "uri":"dli_01_0417.html", "node_id":"dli_01_0417.xml", "product_code":"dli", - "code":"114", + "code":"115", "des":"A resource is an object that exists within a service. You can select DLI resources by specifying their paths.", "doc_type":"usermanual", "kw":"DLI Resources,Permissions Management,User Guide", @@ -2314,7 +2280,7 @@ "uri":"dli_01_0475.html", "node_id":"dli_01_0475.xml", "product_code":"dli", - "code":"115", + "code":"116", "des":"Request conditions are useful in determining when a custom policy takes effect. A request condition consists of a condition key and operator. Condition keys are either gl", "doc_type":"usermanual", "kw":"DLI Request Conditions,Permissions Management,User Guide", @@ -2334,7 +2300,7 @@ "uri":"dli_01_0441.html", "node_id":"dli_01_0441.xml", "product_code":"dli", - "code":"116", + "code":"117", "des":"Table 1 lists the common operations supported by each system policy of DLI. Choose proper system policies according to this table. For details about the SQL statement per", "doc_type":"usermanual", "kw":"Common Operations Supported by DLI System Policy,Permissions Management,User Guide", @@ -2354,7 +2320,7 @@ "uri":"dli_01_0513.html", "node_id":"dli_01_0513.xml", "product_code":"dli", - "code":"117", + "code":"118", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Other Common Operations", @@ -2362,10 +2328,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Other Common Operations", @@ -2375,17 +2338,16 @@ "uri":"dli_01_0420.html", "node_id":"dli_01_0420.xml", "product_code":"dli", - "code":"118", - "des":"On the DLI management console, you can import data stored on OBS to DLI tables from Data Management > Databases and Tables > Table Management and SQL Editor pages. For de", + "code":"119", + "des":"On the DLI management console, you can import data stored in OBS into DLI tables.To import OBS data to a DLI table, either choose Data Management > Databases and Tables i", "doc_type":"usermanual", "kw":"Importing Data to a DLI Table,Other Common Operations,User Guide", "search_title":"", "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -2396,7 +2358,7 @@ "uri":"dli_01_0445.html", "node_id":"dli_01_0445.xml", "product_code":"dli", - "code":"119", + "code":"120", "des":"This section describes metrics reported by DLI to Cloud Eye as well as their namespaces and dimensions. You can use the management console or APIs provided by Cloud Eye t", "doc_type":"usermanual", "kw":"Viewing Monitoring Metrics,Other Common Operations,User Guide", @@ -2416,7 +2378,7 @@ "uri":"dli_01_0318.html", "node_id":"dli_01_0318.xml", "product_code":"dli", - "code":"120", + "code":"121", "des":"With CTS, you can record operations associated with DLI for later query, audit, and backtrack operations.", "doc_type":"usermanual", "kw":"DLI Operations That Can Be Recorded by CTS,Other Common Operations,User Guide", @@ -2436,7 +2398,7 @@ "uri":"dli_01_0550.html", "node_id":"dli_01_0550.xml", "product_code":"dli", - "code":"121", + "code":"122", "des":"A quota limits the quantity of a resource available to users, thereby preventing spikes in the usage of the resource.You can also request for an increased quota if your e", "doc_type":"usermanual", "kw":"Quota Management,Other Common Operations,User Guide", @@ -2456,7 +2418,7 @@ "uri":"dli_01_0539.html", "node_id":"dli_01_0539.xml", "product_code":"dli", - "code":"122", + "code":"123", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"FAQ", @@ -2464,10 +2426,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"FAQ", @@ -2477,7 +2436,7 @@ "uri":"dli_03_0037.html", "node_id":"dli_03_0037.xml", "product_code":"dli", - "code":"123", + "code":"124", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Flink Jobs", @@ -2485,10 +2444,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Flink Jobs", @@ -2498,7 +2454,7 @@ "uri":"dli_03_0137.html", "node_id":"dli_03_0137.xml", "product_code":"dli", - "code":"124", + "code":"125", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Usage", @@ -2506,10 +2462,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Usage", @@ -2519,7 +2472,7 @@ "uri":"dli_03_0083.html", "node_id":"dli_03_0083.xml", "product_code":"dli", - "code":"125", + "code":"126", "des":"DLI Flink jobs support the following data formats:Avro, Avro_merge, BLOB, CSV, EMAIL, JSON, ORC, Parquet, and XML.DLI Flink jobs support data from the following data sour", "doc_type":"usermanual", "kw":"What Data Formats and Data Sources Are Supported by DLI Flink Jobs?,Usage,User Guide", @@ -2527,10 +2480,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"What Data Formats and Data Sources Are Supported by DLI Flink Jobs?", @@ -2540,7 +2490,7 @@ "uri":"dli_03_0139.html", "node_id":"dli_03_0139.xml", "product_code":"dli", - "code":"126", + "code":"127", "des":"A sub-user can view queues but cannot view Flink jobs. You can authorize the sub-user using DLI or IAM.Authorization on DLILog in to the DLI console using a tenant accoun", "doc_type":"usermanual", "kw":"How Do I Authorize a Subuser to View Flink Jobs?,Usage,User Guide", @@ -2548,10 +2498,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Authorize a Subuser to View Flink Jobs?", @@ -2561,7 +2508,7 @@ "uri":"dli_03_0090.html", "node_id":"dli_03_0090.xml", "product_code":"dli", - "code":"127", + "code":"128", "des":"DLI Flink jobs are highly available. You can enable the automatic restart function to automatically restart your jobs after short-time faults of peripheral services are r", "doc_type":"usermanual", "kw":"How Do I Set Auto Restart upon Exception for a Flink Job?,Usage,User Guide", @@ -2569,10 +2516,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Set Auto Restart upon Exception for a Flink Job?", @@ -2582,7 +2526,7 @@ "uri":"dli_03_0099.html", "node_id":"dli_03_0099.xml", "product_code":"dli", - "code":"128", + "code":"129", "des":"When you create a Flink SQL job or Flink Jar job, you can select Save Job Log on the job editing page to save job running logs to OBS.To set the OBS bucket for storing th", "doc_type":"usermanual", "kw":"How Do I Save Flink Job Logs?,Usage,User Guide", @@ -2590,10 +2534,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Save Flink Job Logs?", @@ -2603,7 +2544,7 @@ "uri":"dli_03_0043.html", "node_id":"dli_03_0043.xml", "product_code":"dli", - "code":"129", + "code":"130", "des":"DLI can output Flink job results to DIS. You can view the results in DIS. For details, see \"Obtaining Data from DIS\" in Data Ingestion Service User Guide.DLI can output F", "doc_type":"usermanual", "kw":"How Can I Check Flink Job Results?,Usage,User Guide", @@ -2611,10 +2552,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Can I Check Flink Job Results?", @@ -2624,7 +2562,7 @@ "uri":"dli_03_0160.html", "node_id":"dli_03_0160.xml", "product_code":"dli", - "code":"130", + "code":"131", "des":"Choose Job Management > Flink Jobs. In the Operation column of the target job, choose More > Permissions. When a new user is authorized, No such user. userName:xxxx. is d", "doc_type":"usermanual", "kw":"Why Is Error \"No such user. userName:xxxx.\" Reported on the Flink Job Management Page When I Grant P", @@ -2632,10 +2570,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"No such user. userName:xxxx.\" Reported on the Flink Job Management Page When I Grant Permission to a User?", @@ -2645,7 +2580,7 @@ "uri":"dli_03_0180.html", "node_id":"dli_03_0180.xml", "product_code":"dli", - "code":"131", + "code":"132", "des":"Checkpoint was enabled when a Flink job is created, and the OBS bucket for storing checkpoints was specified. After a Flink job is manually stopped, no message is display", "doc_type":"usermanual", "kw":"How Do I Know Which Checkpoint the Flink Job I Stopped Will Be Restored to When I Start the Job Agai", @@ -2653,10 +2588,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Know Which Checkpoint the Flink Job I Stopped Will Be Restored to When I Start the Job Again?", @@ -2666,7 +2598,7 @@ "uri":"dli_03_0036.html", "node_id":"dli_03_0036.xml", "product_code":"dli", - "code":"132", + "code":"133", "des":"When you set running parameters of a DLI Flink job, you can enable Alarm Generation upon Job Exception to receive alarms when the job runs abnormally or is in arrears.If ", "doc_type":"usermanual", "kw":"Why Is a Message Displayed Indicating That the SMN Topic Does Not Exist When I Use the SMN Topic in ", @@ -2674,10 +2606,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is a Message Displayed Indicating That the SMN Topic Does Not Exist When I Use the SMN Topic in DLI?", @@ -2687,7 +2616,7 @@ "uri":"dli_03_0131.html", "node_id":"dli_03_0131.xml", "product_code":"dli", - "code":"133", + "code":"134", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Flink SQL", @@ -2695,10 +2624,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Flink SQL", @@ -2708,7 +2634,7 @@ "uri":"dli_03_0130.html", "node_id":"dli_03_0130.xml", "product_code":"dli", - "code":"134", + "code":"135", "des":"The consumption capability of a Flink SQL job depends on the data source transmission, queue size, and job parameter settings. The peak consumption is 10 Mbit/s.", "doc_type":"usermanual", "kw":"How Much Data Can Be Processed in a Day by a Flink SQL Job?,Flink SQL,User Guide", @@ -2716,10 +2642,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Much Data Can Be Processed in a Day by a Flink SQL Job?", @@ -2729,7 +2652,7 @@ "uri":"dli_03_0061.html", "node_id":"dli_03_0061.xml", "product_code":"dli", - "code":"135", + "code":"136", "des":"The temp stream in Flink SQL is similar to a subquery. It is a logical stream used to simplify the SQL logic and does not generate data storage. Therefore, there is no ne", "doc_type":"usermanual", "kw":"Does Data in the Temporary Stream of Flink SQL Need to Be Cleared Periodically? How Do I Clear the D", @@ -2737,10 +2660,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Does Data in the Temporary Stream of Flink SQL Need to Be Cleared Periodically? How Do I Clear the Data?", @@ -2750,18 +2670,15 @@ "uri":"dli_03_0138.html", "node_id":"dli_03_0138.xml", "product_code":"dli", - "code":"136", - "des":"SymptomWhen you create a Flink SQL job and configure the parameters, you select an OBS bucket you have created. The system displays a message indicating that the OBS buck", + "code":"137", + "des":"When you create a Flink SQL job and configure the parameters, you select an OBS bucket you have created. The system displays a message indicating that the OBS bucket is n", "doc_type":"usermanual", "kw":"Why Is a Message Displayed Indicating That the OBS Bucket Is Not Authorized When I Select an OBS Buc", "search_title":"", "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is a Message Displayed Indicating That the OBS Bucket Is Not Authorized When I Select an OBS Bucket for a Flink SQL Job?", @@ -2771,7 +2688,7 @@ "uri":"dli_03_0089.html", "node_id":"dli_03_0089.xml", "product_code":"dli", - "code":"137", + "code":"138", "des":"When using a Flink SQL job, you need to create an OBS partition table for subsequent batch processing.In the following example, the day field is used as the partition fie", "doc_type":"usermanual", "kw":"How Do I Create an OBS Partitioned Table for a Flink SQL Job?,Flink SQL,User Guide", @@ -2779,10 +2696,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Create an OBS Partitioned Table for a Flink SQL Job?", @@ -2792,7 +2706,7 @@ "uri":"dli_03_0075.html", "node_id":"dli_03_0075.xml", "product_code":"dli", - "code":"138", + "code":"139", "des":"In this example, the day field is used as the partition field with the parquet encoding format (only the parquet format is supported currently) to dump car_info data to O", "doc_type":"usermanual", "kw":"How Do I Dump Data to OBS and Create an OBS Partitioned Table?,Flink SQL,User Guide", @@ -2800,10 +2714,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Dump Data to OBS and Create an OBS Partitioned Table?", @@ -2813,7 +2724,7 @@ "uri":"dli_03_0167.html", "node_id":"dli_03_0167.xml", "product_code":"dli", - "code":"139", + "code":"140", "des":"When I run the creation statement with an EL expression in the table name in a Flink SQL job, the following error message is displayed:DLI.0005: AnalysisException: t_user", "doc_type":"usermanual", "kw":"Why Is Error Message \"DLI.0005\" Displayed When I Use an EL Expression to Create a Table in a Flink S", @@ -2821,10 +2732,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error Message \"DLI.0005\" Displayed When I Use an EL Expression to Create a Table in a Flink SQL Job?", @@ -2834,7 +2742,7 @@ "uri":"dli_03_0168.html", "node_id":"dli_03_0168.xml", "product_code":"dli", - "code":"140", + "code":"141", "des":"After data is written to OBS through the Flink job output stream, data cannot be queried from the DLI table created in the OBS file path.For example, use the following Fl", "doc_type":"usermanual", "kw":"Why Is No Data Queried in the DLI Table Created Using the OBS File Path When Data Is Written to OBS ", @@ -2842,10 +2750,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is No Data Queried in the DLI Table Created Using the OBS File Path When Data Is Written to OBS by a Flink Job Output Stream?", @@ -2855,7 +2760,7 @@ "uri":"dli_03_0174.html", "node_id":"dli_03_0174.xml", "product_code":"dli", - "code":"141", + "code":"142", "des":"After a Flink SQL job is submitted on DLI, the job fails to be executed. The following error information is displayed in the job log:connect to DIS failed java.lang.Illeg", "doc_type":"usermanual", "kw":"Why Does a Flink SQL Job Fails to Be Executed, and Is \"connect to DIS failed java.lang.IllegalArgume", @@ -2863,10 +2768,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Does a Flink SQL Job Fails to Be Executed, and Is \"connect to DIS failed java.lang.IllegalArgumentException: Access key cannot be null\" Displayed in the Log?", @@ -2876,7 +2778,7 @@ "uri":"dli_03_0176.html", "node_id":"dli_03_0176.xml", "product_code":"dli", - "code":"142", + "code":"143", "des":"Semantic verification for a Flink SQL job (reading DIS data) fails. The following information is displayed when the job fails:Get dis channel xxxinfo failed. error info: ", "doc_type":"usermanual", "kw":"Why Is Error \"Not authorized\" Reported When a Flink SQL Job Reads DIS Data?,Flink SQL,User Guide", @@ -2884,10 +2786,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"Not authorized\" Reported When a Flink SQL Job Reads DIS Data?", @@ -2897,7 +2796,7 @@ "uri":"dli_03_0232.html", "node_id":"dli_03_0232.xml", "product_code":"dli", - "code":"143", + "code":"144", "des":"After a Flink SQL job consumed Kafka and sent data to the Elasticsearch cluster, the job was successfully executed, but no data is available.Possible causes are as follow", "doc_type":"usermanual", "kw":"Data Writing Fails After a Flink SQL Job Consumed Kafka and Sank Data to the Elasticsearch Cluster,F", @@ -2905,10 +2804,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Data Writing Fails After a Flink SQL Job Consumed Kafka and Sank Data to the Elasticsearch Cluster", @@ -2918,7 +2814,7 @@ "uri":"dli_03_0132.html", "node_id":"dli_03_0132.xml", "product_code":"dli", - "code":"144", + "code":"145", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Flink Jar Jobs", @@ -2926,55 +2822,28 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Flink Jar Jobs", "githuburl":"" }, - { - "uri":"dli_03_0038.html", - "node_id":"dli_03_0038.xml", - "product_code":"dli", - "code":"145", - "des":"The procedure is as follows:Add the following code to the JAR file code of the Flink Jar job:// Configure the pom file on which the StreamExecutionEnvironment depends.\nSt", - "doc_type":"usermanual", - "kw":"How Do I Configure Checkpoints for Flink Jar Jobs and Save the Checkpoints to OBS?,Flink Jar Jobs,Us", - "search_title":"", - "metedata":[ - { - "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" - } - ], - "title":"How Do I Configure Checkpoints for Flink Jar Jobs and Save the Checkpoints to OBS?", - "githuburl":"" - }, { "uri":"dli_03_0044.html", "node_id":"dli_03_0044.xml", "product_code":"dli", "code":"146", - "des":"Configuration files can be uploaded for user-defined jobs (JAR).Upload the configuration file to DLI through Package Management.In the Other Dependencies area of the Flin", + "des":"You can upload configuration files for custom jobs (Jar).Upload the configuration file to DLI through Package Management.In the Other Dependencies area of the Flink Jar j", "doc_type":"usermanual", - "kw":"Does a Flink JAR Job Support Configuration File Upload? How Do I Upload a Configuration File?,Flink ", + "kw":"Can I Upload Configuration Files for Flink Jar Jobs?,Flink Jar Jobs,User Guide", "search_title":"", "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], - "title":"Does a Flink JAR Job Support Configuration File Upload? How Do I Upload a Configuration File?", + "title":"Can I Upload Configuration Files for Flink Jar Jobs?", "githuburl":"" }, { @@ -2984,18 +2853,15 @@ "code":"147", "des":"The dependency of your Flink job conflicts with a built-in dependency of the DLI Flink platform. As a result, the job submission fails.Delete your JAR file that is the sa", "doc_type":"usermanual", - "kw":"Why Does the Submission Fail Due to Flink JAR File Conflict?,Flink Jar Jobs,User Guide", + "kw":"Why Does a Flink Jar Package Conflict Result in Submission Failure?,Flink Jar Jobs,User Guide", "search_title":"", "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], - "title":"Why Does the Submission Fail Due to Flink JAR File Conflict?", + "title":"Why Does a Flink Jar Package Conflict Result in Submission Failure?", "githuburl":"" }, { @@ -3010,10 +2876,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Does a Flink Jar Job Fail to Access GaussDB(DWS) and a Message Is Displayed Indicating Too Many Client Connections?", @@ -3031,10 +2894,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error Message \"Authentication failed\" Displayed During Flink Jar Job Running?", @@ -3052,10 +2912,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error Invalid OBS Bucket Name Reported After a Flink Job Submission Failed?", @@ -3073,10 +2930,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Does the Flink Submission Fail Due to Hadoop JAR File Conflict?", @@ -3094,10 +2948,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Connect a Flink jar Job to SASL_SSL?", @@ -3115,10 +2966,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Performance Tuning", @@ -3136,10 +2984,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Optimize Performance of a Flink Job?", @@ -3157,10 +3002,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Write Data to Different Elasticsearch Clusters in a Flink Job?", @@ -3178,10 +3020,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Prevent Data Loss After Flink Job Restart?", @@ -3199,10 +3038,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"O&M Guide", @@ -3220,10 +3056,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Locate a Flink Job Submission Error?", @@ -3241,10 +3074,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Locate a Flink Job Running Error?", @@ -3262,10 +3092,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Can I Check if a Flink Job Can Be Restored From a Checkpoint After Restarting It?", @@ -3283,10 +3110,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Does DIS Stream Not Exist During Job Semantic Check?", @@ -3304,10 +3128,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is the OBS Bucket Selected for Job Not Authorized?", @@ -3325,10 +3146,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Are Logs Not Written to the OBS Bucket After a DLI Flink Job Fails to Be Submitted for Running?", @@ -3346,10 +3164,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Information Displayed on the FlinkUI/Spark UI Page Incomplete?", @@ -3367,10 +3182,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is the Flink Job Abnormal Due to Heartbeat Timeout Between JobManager and TaskManager?", @@ -3388,10 +3200,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"Timeout expired while fetching topic metadata\" Repeatedly Reported in Flink JobManager Logs?", @@ -3409,10 +3218,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Problems Related to SQL Jobs", @@ -3430,10 +3236,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Usage", @@ -3451,10 +3254,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"SQL Jobs", @@ -3472,10 +3272,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Job Development", @@ -3493,10 +3290,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Merge Small Files?", @@ -3514,10 +3308,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Specify an OBS Path When Creating an OBS Table?", @@ -3535,10 +3326,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Create a Table Using JSON Data in an OBS Bucket?", @@ -3556,10 +3344,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Set Local Variables in SQL Statements?", @@ -3577,10 +3362,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Can I Use the count Function to Perform Aggregation?", @@ -3598,10 +3380,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Synchronize DLI Table Data from One Region to Another?", @@ -3619,10 +3398,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Insert Table Data into Specific Fields of a Table Using a SQL Job?", @@ -3640,10 +3416,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Job O&M Errors", @@ -3661,10 +3434,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"path obs://xxx already exists\" Reported When Data Is Exported to OBS?", @@ -3682,10 +3452,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"SQL_ANALYSIS_ERROR: Reference 't.id' is ambiguous, could be: t.id, t.id.;\" Displayed When Two Tables Are Joined?", @@ -3703,10 +3470,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"The current account does not have permission to perform this operation,the current account was restricted. Restricted for no budget.\" Reported when a SQL Statement Is Executed?", @@ -3724,10 +3488,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"There should be at least one partition pruning predicate on partitioned table XX.YYY\" Reported When a Query Statement Is Executed?", @@ -3745,10 +3506,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"IllegalArgumentException: Buffer size too small. size\" Reported When Data Is Loaded to an OBS Foreign Table?", @@ -3766,10 +3524,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"DLI.0002 FileNotFoundException\" Reported During SQL Job Running?", @@ -3787,10 +3542,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is a Schema Parsing Error Reported When I Create a Hive Table Using CTAS?", @@ -3808,10 +3560,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"org.apache.hadoop.fs.obs.OBSIOException\" Reported When I Run DLI SQL Scripts on DataArts Studio?", @@ -3829,10 +3578,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"UQUERY_CONNECTOR_0001:Invoke DLI service api failed\" Reported in the Job Log When I Use CDM to Migrate Data to DLI?", @@ -3850,10 +3596,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"File not Found\" Reported When I Access a SQL Job?", @@ -3871,10 +3614,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"DLI.0003: AccessControlException XXX\" Reported When I Access a SQL Job?", @@ -3892,10 +3632,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"DLI.0001: org.apache.hadoop.security.AccessControlException: verifyBucketExists on {{bucket name}}: status [403]\" Reported When I Access a SQL Job?", @@ -3913,10 +3650,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"The current account does not have permission to perform this operation,the current account was restricted. Restricted for no budget\" Reported During SQL Statement Execution? Restricted for no budget.", @@ -3934,10 +3668,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"O&M Guide", @@ -3955,10 +3686,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Troubleshoot Slow SQL Jobs?", @@ -3976,10 +3704,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I View DLI SQL Logs?", @@ -3997,10 +3722,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I View SQL Execution Records?", @@ -4011,20 +3733,17 @@ "node_id":"dli_03_0093.xml", "product_code":"dli", "code":"196", - "des":"If the execution of an SQL statement takes a long time, you need to access the Spark UI to check the execution status.If data skew occurs, the running time of a stage exc", + "des":"Data skew is a common issue during the execution of SQL jobs. When data is unevenly distributed, some compute nodes process significantly more data than others, which can", "doc_type":"usermanual", - "kw":"How Do I Eliminate Data Skew by Configuring AE Parameters?,O&M Guide,User Guide", + "kw":"How Do I Do When Data Skew Occurs During the Execution of a SQL Job?,O&M Guide,User Guide", "search_title":"", "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], - "title":"How Do I Eliminate Data Skew by Configuring AE Parameters?", + "title":"How Do I Do When Data Skew Occurs During the Execution of a SQL Job?", "githuburl":"" }, { @@ -4039,10 +3758,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"What Can I Do If a Table Cannot Be Queried on the DLI Console?", @@ -4060,10 +3776,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"The Compression Ratio of OBS Tables Is Too High", @@ -4081,10 +3794,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Can I Avoid Garbled Characters Caused by Inconsistent Character Codes?", @@ -4102,10 +3812,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Do I Need to Grant Table Permissions to a User and Project After I Delete a Table and Create One with the Same Name?", @@ -4123,10 +3830,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Can't I Query Table Data After Data Is Imported to a DLI Partitioned Table Because the File to Be Imported Does Not Contain Data in the Partitioning Column?", @@ -4144,10 +3848,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Fix the Data Error Caused by CRLF Characters in a Field of the OBS File Used to Create an External OBS Table?", @@ -4165,10 +3866,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Does a SQL Job That Has Join Operations Stay in the Running State?", @@ -4186,10 +3884,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"The on Clause Is Not Added When Tables Are Joined. Cartesian Product Query Causes High Resource Usage of the Queue, and the Job Fails to Be Executed", @@ -4207,10 +3902,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Can't I Query Data After I Manually Add Data to the Partition Directory of an OBS Table?", @@ -4228,10 +3920,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is All Data Overwritten When insert overwrite Is Used to Overwrite Partitioned Table?", @@ -4249,10 +3938,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is a SQL Job Stuck in the Submitting State?", @@ -4270,10 +3956,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is the create_date Field in the RDS Table Is a Timestamp in the DLI query result?", @@ -4291,10 +3974,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"What Can I Do If datasize Cannot Be Changed After the Table Name Is Changed in a Finished SQL Job?", @@ -4312,10 +3992,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is the Data Volume Changes When Data Is Imported from DLI to OBS?", @@ -4333,10 +4010,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Problems Related to Spark Jobs", @@ -4354,10 +4028,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Usage", @@ -4375,10 +4046,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Spark Jobs", @@ -4396,10 +4064,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Job Development", @@ -4417,10 +4082,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Use Spark to Write Data into a DLI Table?", @@ -4431,17 +4093,14 @@ "node_id":"dli_03_0017.xml", "product_code":"dli", "code":"216", - "des":"Hard-coded or plaintext AK and SK pose significant security risks. To ensure security, encrypt your AK and SK, store them in configuration files or environment variables,", + "des":"To obtain the AK/SK, set the parameters as follows:Create a SparkContext using code.val sc: SparkContext = new SparkContext()\nsc.hadoopConfiguration.set(\"fs.obs.access.ke", "doc_type":"usermanual", "kw":"How Do I Set the AK/SK for a Queue to Operate an OBS Table?,Job Development,User Guide", "search_title":"", "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Set the AK/SK for a Queue to Operate an OBS Table?", @@ -4459,10 +4118,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I View the Resource Usage of DLI Spark Jobs?", @@ -4480,10 +4136,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Use Python Scripts to Access the MySQL Database If the pymysql Module Is Missing from the Spark Job Results Stored in MySQL?", @@ -4501,10 +4154,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Run a Complex PySpark Program in DLI?", @@ -4522,10 +4172,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Does a Spark Job Access a MySQL Database?", @@ -4543,10 +4190,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Use JDBC to Set the spark.sql.shuffle.partitions Parameter to Improve the Task Concurrency?", @@ -4564,10 +4208,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Read Uploaded Files for a Spark Jar Job?", @@ -4585,10 +4226,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Job O&M Errors", @@ -4606,10 +4244,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Are Errors \"ResponseCode: 403\" and \"ResponseStatus: Forbidden\" Reported When a Spark Job Accesses OBS Data?", @@ -4627,10 +4262,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"verifyBucketExists on XXXX: status [403]\" Reported When I Use a Spark Job to Access an OBS Bucket That I Have Access Permission?", @@ -4648,10 +4280,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is a Job Running Timeout Reported When a Spark Job Runs a Large Amount of Data?", @@ -4669,10 +4298,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Does the Job Fail to Be Executed and the Log Shows that the File Directory Is Abnormal When I Use a Spark Job to Access Files in SFTP?", @@ -4690,10 +4316,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Does the Job Fail to Be Executed Due to Insufficient Database and Table Permissions?", @@ -4711,10 +4334,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"O&M Guide", @@ -4732,10 +4352,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Can't I Find the Specified Python Environment After Adding the Python Package?", @@ -4753,10 +4370,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is a Spark Jar Job Stuck in the Submitting State?", @@ -4774,10 +4388,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Product Consultation", @@ -4795,41 +4406,17 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Usage", "githuburl":"" }, - { - "uri":"dli_03_0002.html", - "node_id":"dli_03_0002.xml", - "product_code":"dli", - "code":"234", - "des":"Data Lake Insight (DLI) is a serverless data processing and analysis service fully compatible with Apache Spark and Apache Flink ecosystems. It frees you from managing an", - "doc_type":"usermanual", - "kw":"What Is DLI?,Usage,User Guide", - "search_title":"", - "metedata":[ - { - "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" - } - ], - "title":"What Is DLI?", - "githuburl":"" - }, { "uri":"dli_03_0025.html", "node_id":"dli_03_0025.xml", "product_code":"dli", - "code":"235", + "code":"234", "des":"DLI supports the following data formats:ParquetCSVORCJsonAvro", "doc_type":"usermanual", "kw":"Which Data Formats Does DLI Support?,Usage,User Guide", @@ -4837,10 +4424,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Which Data Formats Does DLI Support?", @@ -4850,7 +4434,7 @@ "uri":"dli_03_0115.html", "node_id":"dli_03_0115.xml", "product_code":"dli", - "code":"236", + "code":"235", "des":"The Spark component of DLI is a fully managed service. You can only use the DLI Spark through its APIs. .The Spark component of MRS is built on the VM in an MRS cluster. ", "doc_type":"usermanual", "kw":"What Are the Differences Between MRS Spark and DLI Spark?,Usage,User Guide", @@ -4858,10 +4442,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"What Are the Differences Between MRS Spark and DLI Spark?", @@ -4871,7 +4452,7 @@ "uri":"dli_03_0029.html", "node_id":"dli_03_0029.xml", "product_code":"dli", - "code":"237", + "code":"236", "des":"DLI data can be stored in either of the following:OBS: Data used by SQL jobs, Spark jobs, and Flink jobs can be stored in OBS, reducing storage costs.DLI: The column-base", "doc_type":"usermanual", "kw":"Where Can DLI Data Be Stored?,Usage,User Guide", @@ -4879,10 +4460,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Where Can DLI Data Be Stored?", @@ -4892,7 +4470,7 @@ "uri":"dli_03_0117.html", "node_id":"dli_03_0117.xml", "product_code":"dli", - "code":"238", + "code":"237", "des":"DLI tables store data within the DLI service, and you do not need to know the data storage path.OBS tables store data in your OBS buckets, and you need to manage the sour", "doc_type":"usermanual", "kw":"What Are the Differences Between DLI Tables and OBS Tables?,Usage,User Guide", @@ -4900,10 +4478,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"What Are the Differences Between DLI Tables and OBS Tables?", @@ -4913,7 +4488,7 @@ "uri":"dli_03_0010.html", "node_id":"dli_03_0010.xml", "product_code":"dli", - "code":"239", + "code":"238", "des":"Currently, DLI supports analysis only on the data uploaded to the cloud. In scenarios where regular (for example, on a per day basis) one-off analysis on incremental data", "doc_type":"usermanual", "kw":"How Can I Use DLI If Data Is Not Uploaded to OBS?,Usage,User Guide", @@ -4921,10 +4496,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Can I Use DLI If Data Is Not Uploaded to OBS?", @@ -4934,7 +4506,7 @@ "uri":"dli_03_0129.html", "node_id":"dli_03_0129.xml", "product_code":"dli", - "code":"240", + "code":"239", "des":"Data in the OBS bucket shared by IAM users under the same account can be imported. You cannot import data in the OBS bucket shared with other IAM account.", "doc_type":"usermanual", "kw":"Can I Import OBS Bucket Data Shared by Other Tenants into DLI?,Usage,User Guide", @@ -4942,10 +4514,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Can I Import OBS Bucket Data Shared by Other Tenants into DLI?", @@ -4955,7 +4524,7 @@ "uri":"dli_03_0264.html", "node_id":"dli_03_0264.xml", "product_code":"dli", - "code":"241", + "code":"240", "des":"Log in to the management console.Click in the upper left corner and select a region and a project.Click the My Quota icon in the upper right corner of the page.The Serv", "doc_type":"usermanual", "kw":"Why Is Error \"Failed to create the database. {\"error_code\":\"DLI.1028\";\"error_msg\":\"Already reached t", @@ -4963,10 +4532,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"Failed to create the database. {\"error_code\":\"DLI.1028\";\"error_msg\":\"Already reached the maximum quota of databases:XXX\".\" Reported?", @@ -4976,7 +4542,7 @@ "uri":"dli_03_0263.html", "node_id":"dli_03_0263.xml", "product_code":"dli", - "code":"242", + "code":"241", "des":"No, a global variable can only be used by the user who created it. Global variables can be used to simplify complex parameters. For example, long and difficult variables ", "doc_type":"usermanual", "kw":"Can a Member Account Use Global Variables Created by Other Member Accounts?,Usage,User Guide", @@ -4984,10 +4550,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Can a Member Account Use Global Variables Created by Other Member Accounts?", @@ -4997,7 +4560,7 @@ "uri":"dli_03_0222.html", "node_id":"dli_03_0222.xml", "product_code":"dli", - "code":"243", + "code":"242", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Job Management", @@ -5005,10 +4568,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Job Management", @@ -5018,7 +4578,7 @@ "uri":"dli_03_0126.html", "node_id":"dli_03_0126.xml", "product_code":"dli", - "code":"244", + "code":"243", "des":"If you are suggested to perform following operations to run a large number of DLI jobs:Group the DLI jobs by type, and run each group on a queue.Alternatively, create IAM", "doc_type":"usermanual", "kw":"How Do I Manage Tens of Thousands of Jobs Running on DLI?,Job Management,User Guide", @@ -5026,10 +4586,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Manage Tens of Thousands of Jobs Running on DLI?", @@ -5039,7 +4596,7 @@ "uri":"dli_03_0162.html", "node_id":"dli_03_0162.xml", "product_code":"dli", - "code":"245", + "code":"244", "des":"The field names of tables that have been created cannot be changed.You can create a table, define new table fields, and migrate data from the old table to the new one.", "doc_type":"usermanual", "kw":"How Do I Change the Name of a Field in a Created Table?,Job Management,User Guide", @@ -5047,10 +4604,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Change the Name of a Field in a Created Table?", @@ -5060,7 +4614,7 @@ "uri":"dli_03_0261.html", "node_id":"dli_03_0261.xml", "product_code":"dli", - "code":"246", + "code":"245", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Privacy and Security", @@ -5068,10 +4622,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Privacy and Security", @@ -5081,7 +4632,7 @@ "uri":"dli_03_0260.html", "node_id":"dli_03_0260.xml", "product_code":"dli", - "code":"247", + "code":"246", "des":"No. The spark.acls.enable configuration item is not used in DLI. The Apache Spark command injection vulnerability (CVE-2022-33891) does not exist in DLI.", "doc_type":"usermanual", "kw":"Does DLI Have the Apache Spark Command Injection Vulnerability (CVE-2022-33891)?,Privacy and Securit", @@ -5089,10 +4640,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Does DLI Have the Apache Spark Command Injection Vulnerability (CVE-2022-33891)?", @@ -5102,7 +4650,7 @@ "uri":"dli_03_0053.html", "node_id":"dli_03_0053.xml", "product_code":"dli", - "code":"248", + "code":"247", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Quota", @@ -5110,10 +4658,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Quota", @@ -5123,7 +4668,7 @@ "uri":"dli_03_0031.html", "node_id":"dli_03_0031.xml", "product_code":"dli", - "code":"249", + "code":"248", "des":"Log in to the management console.Click in the upper left corner and select Region and Project.Click (the My Quotas icon) in the upper right corner.The Service Quota pag", "doc_type":"usermanual", "kw":"How Do I View My Quotas?,Quota,User Guide", @@ -5131,10 +4676,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I View My Quotas?", @@ -5144,7 +4686,7 @@ "uri":"dli_03_0032.html", "node_id":"dli_03_0032.xml", "product_code":"dli", - "code":"250", + "code":"249", "des":"The system does not support online quota adjustment. To increase a resource quota, dial the hotline or send an email to the customer service. We will process your applica", "doc_type":"usermanual", "kw":"How Do I Increase a Quota?,Quota,User Guide", @@ -5152,10 +4694,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Increase a Quota?", @@ -5165,7 +4704,7 @@ "uri":"dli_03_0054.html", "node_id":"dli_03_0054.xml", "product_code":"dli", - "code":"251", + "code":"250", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Permission", @@ -5173,10 +4712,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Permission", @@ -5186,7 +4722,7 @@ "uri":"dli_03_0223.html", "node_id":"dli_03_0223.xml", "product_code":"dli", - "code":"252", + "code":"251", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Usage", @@ -5194,10 +4730,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Usage", @@ -5207,7 +4740,7 @@ "uri":"dli_03_0100.html", "node_id":"dli_03_0100.xml", "product_code":"dli", - "code":"253", + "code":"252", "des":"DLI has a comprehensive permission control mechanism and supports fine-grained authentication through Identity and Access Management (IAM). You can create policies in IAM", "doc_type":"usermanual", "kw":"How Do I Manage Fine-Grained DLI Permissions?,Usage,User Guide", @@ -5215,10 +4748,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Manage Fine-Grained DLI Permissions?", @@ -5228,7 +4758,7 @@ "uri":"dli_03_0008.html", "node_id":"dli_03_0008.xml", "product_code":"dli", - "code":"254", + "code":"253", "des":"You cannot perform permission-related operations on the partition column of a partitioned table.However, when you grant the permission of any non-partition column in a pa", "doc_type":"usermanual", "kw":"What Is Column Permission Granting of a DLI Partition Table?,Usage,User Guide", @@ -5236,10 +4766,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"What Is Column Permission Granting of a DLI Partition Table?", @@ -5249,7 +4776,7 @@ "uri":"dli_03_0226.html", "node_id":"dli_03_0226.xml", "product_code":"dli", - "code":"255", + "code":"254", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"O&M Guide", @@ -5257,10 +4784,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"O&M Guide", @@ -5270,7 +4794,7 @@ "uri":"dli_03_0140.html", "node_id":"dli_03_0140.xml", "product_code":"dli", - "code":"256", + "code":"255", "des":"When you submit a job, a message is displayed indicating that the job fails to be submitted due to insufficient permission caused by arrears. In this case, you need to ch", "doc_type":"usermanual", "kw":"Why Does My Account Have Insufficient Permissions Due to Arrears?,O&M Guide,User Guide", @@ -5278,10 +4802,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Does My Account Have Insufficient Permissions Due to Arrears?", @@ -5291,7 +4812,7 @@ "uri":"dli_03_0195.html", "node_id":"dli_03_0195.xml", "product_code":"dli", - "code":"257", + "code":"256", "des":"When the user update an existing program package, the following error information is displayed:\"error_code\"*DLI.0003\",\"error_msg\":\"Permission denied for resource 'resourc", "doc_type":"usermanual", "kw":"Why Does the System Display a Message Indicating Insufficient Permissions When I Update a Program Pa", @@ -5299,10 +4820,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Does the System Display a Message Indicating Insufficient Permissions When I Update a Program Package?", @@ -5312,7 +4830,7 @@ "uri":"dli_03_0227.html", "node_id":"dli_03_0227.xml", "product_code":"dli", - "code":"258", + "code":"257", "des":"When the SQL query statement is executed, the system displays a message indicating that the user does not have the permission to query resources.Error information: DLI.00", "doc_type":"usermanual", "kw":"Why Is Error \"DLI.0003: Permission denied for resource...\" Reported When I Run a SQL Statement?,O&M ", @@ -5320,10 +4838,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"DLI.0003: Permission denied for resource...\" Reported When I Run a SQL Statement?", @@ -5333,7 +4848,7 @@ "uri":"dli_03_0228.html", "node_id":"dli_03_0228.xml", "product_code":"dli", - "code":"259", + "code":"258", "des":"The table permission has been granted and verified. However, after a period of time, an error is reported indicating that the table query fails.There are two possible rea", "doc_type":"usermanual", "kw":"Why Can't I Query Table Data After I've Been Granted Table Permissions?,O&M Guide,User Guide", @@ -5341,10 +4856,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Can't I Query Table Data After I've Been Granted Table Permissions?", @@ -5354,7 +4866,7 @@ "uri":"dli_03_0057.html", "node_id":"dli_03_0057.xml", "product_code":"dli", - "code":"260", + "code":"259", "des":"If a table inherits database permissions, you do not need to regrant the inherited permissions to the table.When you grant permissions on a table on the console:If you se", "doc_type":"usermanual", "kw":"Will an Error Be Reported if the Inherited Permissions Are Regranted to a Table That Inherits Databa", @@ -5362,9 +4874,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -5375,7 +4886,7 @@ "uri":"dli_03_0067.html", "node_id":"dli_03_0067.xml", "product_code":"dli", - "code":"261", + "code":"260", "des":"User A created Table1.User B created View1 based on Table1.After the Select Table permission on Table1 is granted to user C, user C fails to query View1.User B does not h", "doc_type":"usermanual", "kw":"Why Can't I Query a View After I'm Granted the Select Table Permission on the View?,O&M Guide,User G", @@ -5383,9 +4894,8 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", "IsMulti":"No", + "documenttype":"usermanual", "IsBot":"Yes" } ], @@ -5396,7 +4906,7 @@ "uri":"dli_03_0049.html", "node_id":"dli_03_0049.xml", "product_code":"dli", - "code":"262", + "code":"261", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Queue", @@ -5404,10 +4914,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Queue", @@ -5417,7 +4924,7 @@ "uri":"dli_03_0229.html", "node_id":"dli_03_0229.xml", "product_code":"dli", - "code":"263", + "code":"262", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Usage", @@ -5425,10 +4932,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Usage", @@ -5438,7 +4942,7 @@ "uri":"dli_03_0109.html", "node_id":"dli_03_0109.xml", "product_code":"dli", - "code":"264", + "code":"263", "des":"Currently, you are not allowed to modify the description of a created queue. You can add the description when purchasing the queue.", "doc_type":"usermanual", "kw":"Does the Description of a DLI Queue Can Be Modified?,Usage,User Guide", @@ -5446,10 +4950,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Does the Description of a DLI Queue Can Be Modified?", @@ -5459,7 +4960,7 @@ "uri":"dli_03_0166.html", "node_id":"dli_03_0166.xml", "product_code":"dli", - "code":"265", + "code":"264", "des":"Deleting a queue does not cause table data loss in your database.", "doc_type":"usermanual", "kw":"Will Table Data in My Database Be Lost If I Delete a Queue?,Usage,User Guide", @@ -5467,10 +4968,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Will Table Data in My Database Be Lost If I Delete a Queue?", @@ -5480,7 +4978,7 @@ "uri":"dli_03_0170.html", "node_id":"dli_03_0170.xml", "product_code":"dli", - "code":"266", + "code":"265", "des":"You need to develop a mechanism to retry failed jobs. When a faulty queue is recovered, your application tries to submit the failed jobs to the queue again.", "doc_type":"usermanual", "kw":"How Does DLI Ensure the Reliability of Spark Jobs When a Queue Is Abnormal?,Usage,User Guide", @@ -5488,10 +4986,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Does DLI Ensure the Reliability of Spark Jobs When a Queue Is Abnormal?", @@ -5501,7 +4996,7 @@ "uri":"dli_03_0098.html", "node_id":"dli_03_0098.xml", "product_code":"dli", - "code":"267", + "code":"266", "des":"DLI allows you to subscribe to an SMN topic for failed jobs.Log in to the DLI console.In the navigation pane on the left, choose Queue Management.On the Queue Management ", "doc_type":"usermanual", "kw":"How Do I Monitor Queue Exceptions?,Usage,User Guide", @@ -5509,10 +5004,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Monitor Queue Exceptions?", @@ -5522,7 +5014,7 @@ "uri":"dli_03_0230.html", "node_id":"dli_03_0230.xml", "product_code":"dli", - "code":"268", + "code":"267", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"O&M Guide", @@ -5530,10 +5022,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"O&M Guide", @@ -5543,7 +5032,7 @@ "uri":"dli_03_0095.html", "node_id":"dli_03_0095.xml", "product_code":"dli", - "code":"269", + "code":"268", "des":"To check the running status of the DLI queue and determine whether to run more jobs on that queue, you need to check the queue load.Search for Cloud Eye on the console.In", "doc_type":"usermanual", "kw":"How Do I View DLI Queue Load?,O&M Guide,User Guide", @@ -5551,10 +5040,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I View DLI Queue Load?", @@ -5564,7 +5050,7 @@ "uri":"dli_03_0183.html", "node_id":"dli_03_0183.xml", "product_code":"dli", - "code":"270", + "code":"269", "des":"You need to check the large number of jobs in the Submitting and Running states on the queue.Use Cloud Eye to view jobs in different states on the queue. The procedure is", "doc_type":"usermanual", "kw":"How Do I Determine Whether There Are Too Many Jobs in the Current Queue?,O&M Guide,User Guide", @@ -5572,10 +5058,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Determine Whether There Are Too Many Jobs in the Current Queue?", @@ -5585,7 +5068,7 @@ "uri":"dli_03_0065.html", "node_id":"dli_03_0065.xml", "product_code":"dli", - "code":"271", + "code":"270", "des":"Currently, DLI provides two types of queues, For SQL and For general use. SQL queues are used to run SQL jobs. General-use queues are compatible with Spark queues of earl", "doc_type":"usermanual", "kw":"How Do I Switch an Earlier-Version Spark Queue to a General-Purpose Queue?,O&M Guide,User Guide", @@ -5593,10 +5076,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Switch an Earlier-Version Spark Queue to a General-Purpose Queue?", @@ -5606,7 +5086,7 @@ "uri":"dli_03_0193.html", "node_id":"dli_03_0193.xml", "product_code":"dli", - "code":"272", + "code":"271", "des":"DLI queues do not use resources or bandwidth when no job is running. In this case, the running status of DLI queues is not displayed on CES.", "doc_type":"usermanual", "kw":"Why Cannot I View the Resource Running Status of DLI Queues on Cloud Eye?,O&M Guide,User Guide", @@ -5614,10 +5094,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Cannot I View the Resource Running Status of DLI Queues on Cloud Eye?", @@ -5627,7 +5104,7 @@ "uri":"dli_03_0088.html", "node_id":"dli_03_0088.xml", "product_code":"dli", - "code":"273", + "code":"272", "des":"In DLI, 64 CU = 64 cores and 256 GB memory.In a Spark job, if the driver occupies 4 cores and 16 GB memory, the executor can occupy 60 cores and 240 GB memory.", "doc_type":"usermanual", "kw":"How Do I Allocate Queue Resources for Running Spark Jobs If I Have Purchased 64 CUs?,O&M Guide,User ", @@ -5635,10 +5112,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Allocate Queue Resources for Running Spark Jobs If I Have Purchased 64 CUs?", @@ -5648,7 +5122,7 @@ "uri":"dli_03_0159.html", "node_id":"dli_03_0159.xml", "product_code":"dli", - "code":"274", + "code":"273", "des":"Queue plans create failed. The plan xxx target cu is out of quota is displayed when you create a scheduled scaling task.The CU quota of the current account is insufficien", "doc_type":"usermanual", "kw":"Why Is Error \"Queue plans create failed. The plan xxx target cu is out of quota\" Reported When I Sch", @@ -5656,10 +5130,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"Queue plans create failed. The plan xxx target cu is out of quota\" Reported When I Schedule CU Changes?", @@ -5669,7 +5140,7 @@ "uri":"dli_03_0171.html", "node_id":"dli_03_0171.xml", "product_code":"dli", - "code":"275", + "code":"274", "des":"After a SQL job was submitted to the default queue, the job runs abnormally. The job log reported that the execution timed out. The exception logs are as follows:[ERROR] ", "doc_type":"usermanual", "kw":"Why Is a Timeout Exception Reported When a DLI SQL Statement Fails to Be Executed on the Default Que", @@ -5677,10 +5148,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is a Timeout Exception Reported When a DLI SQL Statement Fails to Be Executed on the Default Queue?", @@ -5690,7 +5158,7 @@ "uri":"dli_03_0276.html", "node_id":"dli_03_0276.xml", "product_code":"dli", - "code":"276", + "code":"275", "des":"In daily big data analysis work, it is important to allocate and manage compute resources properly to provide a good job execution environment.You can allocate resources ", "doc_type":"usermanual", "kw":"How Can I Check the Actual and Used CUs for an Elastic Resource Pool as Well as the Required CUs for", @@ -5708,7 +5176,7 @@ "uri":"dli_03_0022.html", "node_id":"dli_03_0022.xml", "product_code":"dli", - "code":"277", + "code":"276", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Datasource Connections", @@ -5716,10 +5184,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Datasource Connections", @@ -5729,7 +5194,7 @@ "uri":"dli_03_0110.html", "node_id":"dli_03_0110.xml", "product_code":"dli", - "code":"278", + "code":"277", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Datasource Connections", @@ -5737,10 +5202,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Datasource Connections", @@ -5750,7 +5212,7 @@ "uri":"dli_03_0128.html", "node_id":"dli_03_0128.xml", "product_code":"dli", - "code":"279", + "code":"278", "des":"You need to create a VPC peering connection to enable network connectivity. Take MRS as an example. If DLI and MRS clusters are in the same VPC, and the security group is", "doc_type":"usermanual", "kw":"Why Do I Need to Create a VPC Peering Connection for an Enhanced Datasource Connection?,Datasource C", @@ -5758,10 +5220,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Do I Need to Create a VPC Peering Connection for an Enhanced Datasource Connection?", @@ -5771,7 +5230,7 @@ "uri":"dli_03_0237.html", "node_id":"dli_03_0237.xml", "product_code":"dli", - "code":"280", + "code":"279", "des":"An enhanced datasource connection failed to pass the network connectivity test. Datasource connection cannot be bound to a queue. The following error information is displ", "doc_type":"usermanual", "kw":"Failed to Bind a Queue to an Enhanced Datasource Connection,Datasource Connections,User Guide", @@ -5779,10 +5238,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Failed to Bind a Queue to an Enhanced Datasource Connection", @@ -5792,7 +5248,7 @@ "uri":"dli_03_0238.html", "node_id":"dli_03_0238.xml", "product_code":"dli", - "code":"281", + "code":"280", "des":"The outbound rule had been configured for the security group of the queue associated with the enhanced datasource connection. The datasource authentication used a passwor", "doc_type":"usermanual", "kw":"DLI Failed to Connect to GaussDB(DWS) Through an Enhanced Datasource Connection,Datasource Connectio", @@ -5800,10 +5256,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"DLI Failed to Connect to GaussDB(DWS) Through an Enhanced Datasource Connection", @@ -5813,7 +5266,7 @@ "uri":"dli_03_0179.html", "node_id":"dli_03_0179.xml", "product_code":"dli", - "code":"282", + "code":"281", "des":"A datasource connection is created and bound to a queue. The connectivity test fails and the following error information is displayed:failed to connect to specified addre", "doc_type":"usermanual", "kw":"How Do I Do if the Datasource Connection Is Created But the Network Connectivity Test Fails?,Datasou", @@ -5821,10 +5274,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Do if the Datasource Connection Is Created But the Network Connectivity Test Fails?", @@ -5834,7 +5284,7 @@ "uri":"dli_03_0186.html", "node_id":"dli_03_0186.xml", "product_code":"dli", - "code":"283", + "code":"282", "des":"Configuring the Connection Between a DLI Queue and a Data Source in a Private NetworkIf your DLI job needs to connect to a data source, for example, MRS, RDS, CSS, Kafka,", "doc_type":"usermanual", "kw":"How Do I Configure the Network Between a DLI Queue and a Data Source?,Datasource Connections,User Gu", @@ -5842,10 +5292,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Configure the Network Between a DLI Queue and a Data Source?", @@ -5855,7 +5302,7 @@ "uri":"dli_03_0257.html", "node_id":"dli_03_0257.xml", "product_code":"dli", - "code":"284", + "code":"283", "des":"The possible causes and solutions are as follows:If you have created a queue, do not bind it to a datasource connection immediately. Wait for 5 to 10 minutes. After the c", "doc_type":"usermanual", "kw":"What Can I Do If a Datasource Connection Is Stuck in Creating State When I Try to Bind a Queue to It", @@ -5863,10 +5310,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"What Can I Do If a Datasource Connection Is Stuck in Creating State When I Try to Bind a Queue to It?", @@ -5876,7 +5320,7 @@ "uri":"dli_03_0259.html", "node_id":"dli_03_0259.xml", "product_code":"dli", - "code":"285", + "code":"284", "des":"DLI enhanced datasource connection uses VPC peering to directly connect the VPC networks of the desired data sources for point-to-point data exchanges.", "doc_type":"usermanual", "kw":"How Do I Connect DLI to Data Sources?,Datasource Connections,User Guide", @@ -5884,10 +5328,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Connect DLI to Data Sources?", @@ -5897,7 +5338,7 @@ "uri":"dli_03_0112.html", "node_id":"dli_03_0112.xml", "product_code":"dli", - "code":"286", + "code":"285", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Cross-Source Analysis", @@ -5905,10 +5346,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Cross-Source Analysis", @@ -5918,7 +5356,7 @@ "uri":"dli_03_0011.html", "node_id":"dli_03_0011.xml", "product_code":"dli", - "code":"287", + "code":"286", "des":"To perform query on data stored on services rather than DLI, perform the following steps:Assume that the data to be queried is stored on multiple services (for example, O", "doc_type":"usermanual", "kw":"How Can I Perform Query on Data Stored on Services Rather Than DLI?,Cross-Source Analysis,User Guide", @@ -5926,10 +5364,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Can I Perform Query on Data Stored on Services Rather Than DLI?", @@ -5939,7 +5374,7 @@ "uri":"dli_03_0085.html", "node_id":"dli_03_0085.xml", "product_code":"dli", - "code":"288", + "code":"287", "des":"Connect VPCs in different regions.Create an enhanced datasource connection on DLI and bind it to a queue.Add a DLI route.", "doc_type":"usermanual", "kw":"How Can I Access Data Across Regions?,Cross-Source Analysis,User Guide", @@ -5947,10 +5382,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Can I Access Data Across Regions?", @@ -5960,7 +5392,7 @@ "uri":"dli_03_0028.html", "node_id":"dli_03_0028.xml", "product_code":"dli", - "code":"289", + "code":"288", "des":"When data is inserted into DLI, set the ID field to NULL.", "doc_type":"usermanual", "kw":"How Do I Set the Auto-increment Primary Key or Other Fields That Are Automatically Filled in the RDS", @@ -5968,10 +5400,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Set the Auto-increment Primary Key or Other Fields That Are Automatically Filled in the RDS Table When Creating a DLI and Associating It with the RDS Table?", @@ -5981,7 +5410,7 @@ "uri":"dli_03_0256.html", "node_id":"dli_03_0256.xml", "product_code":"dli", - "code":"290", + "code":"289", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Datasource Connection O&M", @@ -5989,10 +5418,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Datasource Connection O&M", @@ -6002,7 +5428,7 @@ "uri":"dli_03_0047.html", "node_id":"dli_03_0047.xml", "product_code":"dli", - "code":"291", + "code":"290", "des":"Possible CausesThe network connectivity is abnormal. Check whether the security group is correctly selected and whether the VPC is correctly configured.The network connec", "doc_type":"usermanual", "kw":"Why Is the Error Message \"communication link failure\" Displayed When I Use a Newly Activated Datasou", @@ -6010,10 +5436,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is the Error Message \"communication link failure\" Displayed When I Use a Newly Activated Datasource Connection?", @@ -6023,7 +5446,7 @@ "uri":"dli_03_0080.html", "node_id":"dli_03_0080.xml", "product_code":"dli", - "code":"292", + "code":"291", "des":"The cluster host information is not added to the datasource connection. As a result, the KRB authentication fails, the connection times out, and no error is recorded in l", "doc_type":"usermanual", "kw":"Connection Times Out During MRS HBase Datasource Connection, and No Error Is Recorded in Logs,Dataso", @@ -6031,10 +5454,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Connection Times Out During MRS HBase Datasource Connection, and No Error Is Recorded in Logs", @@ -6044,7 +5464,7 @@ "uri":"dli_03_0111.html", "node_id":"dli_03_0111.xml", "product_code":"dli", - "code":"293", + "code":"292", "des":"When you create a VPC peering connection for the datasource connection, the following error information is displayed:Before you create a datasource connection, check whet", "doc_type":"usermanual", "kw":"Why Can't I Find the Subnet When Creating a DLI Datasource Connection?,Datasource Connection O&M,Use", @@ -6052,10 +5472,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Can't I Find the Subnet When Creating a DLI Datasource Connection?", @@ -6065,7 +5482,7 @@ "uri":"dli_03_0239.html", "node_id":"dli_03_0239.xml", "product_code":"dli", - "code":"294", + "code":"293", "des":"A datasource RDS table was created in the DataArts Studio, and the insert overwrite statement was executed to write data into RDS. DLI.0999: BatchUpdateException: Incorre", "doc_type":"usermanual", "kw":"Error Message \"Incorrect string value\" Is Displayed When insert overwrite Is Executed on a Datasourc", @@ -6073,10 +5490,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Error Message \"Incorrect string value\" Is Displayed When insert overwrite Is Executed on a Datasource RDS Table", @@ -6086,7 +5500,7 @@ "uri":"dli_03_0250.html", "node_id":"dli_03_0250.xml", "product_code":"dli", - "code":"295", + "code":"294", "des":"The system failed to create a datasource RDS table, and null pointer error was reported.The following table creation statement was used:The RDS database is in a PostGre c", "doc_type":"usermanual", "kw":"Null Pointer Error Is Displayed When the System Creates a Datasource RDS Table,Datasource Connection", @@ -6094,10 +5508,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Null Pointer Error Is Displayed When the System Creates a Datasource RDS Table", @@ -6107,7 +5518,7 @@ "uri":"dli_03_0251.html", "node_id":"dli_03_0251.xml", "product_code":"dli", - "code":"296", + "code":"295", "des":"The system failed to execute insert overwrite on the datasource GaussDB(DWS) table, and org.postgresql.util.PSQLException: ERROR: tuple concurrently updated was displayed", "doc_type":"usermanual", "kw":"Error Message \"org.postgresql.util.PSQLException: ERROR: tuple concurrently updated\" Is Displayed Wh", @@ -6115,10 +5526,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Error Message \"org.postgresql.util.PSQLException: ERROR: tuple concurrently updated\" Is Displayed When the System Executes insert overwrite on a Datasource GaussDB(DWS) Table", @@ -6128,7 +5536,7 @@ "uri":"dli_03_0252.html", "node_id":"dli_03_0252.xml", "product_code":"dli", - "code":"297", + "code":"296", "des":"A datasource table was used to import data to a CloudTable HBase table. This HBase table contains a column family and a rowkey for 100 million simulating data records. Th", "doc_type":"usermanual", "kw":"RegionTooBusyException Is Reported When Data Is Imported to a CloudTable HBase Table Through a Datas", @@ -6136,10 +5544,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"RegionTooBusyException Is Reported When Data Is Imported to a CloudTable HBase Table Through a Datasource Table", @@ -6149,7 +5554,7 @@ "uri":"dli_03_0253.html", "node_id":"dli_03_0253.xml", "product_code":"dli", - "code":"298", + "code":"297", "des":"A table was created on GaussDB(DWS) and then a datasource connection was created on DLI to read and write data. An error message was displayed during data writing, indica", "doc_type":"usermanual", "kw":"A Null Value Is Written Into a Non-Null Field When a DLI Datasource Connection Is Used to Connect to", @@ -6157,10 +5562,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"A Null Value Is Written Into a Non-Null Field When a DLI Datasource Connection Is Used to Connect to a GaussDB(DWS) Table", @@ -6170,7 +5572,7 @@ "uri":"dli_03_0254.html", "node_id":"dli_03_0254.xml", "product_code":"dli", - "code":"299", + "code":"298", "des":"A datasource GaussDB(DWS) table and the datasource connection were created in DLI, and the schema of the source table in GaussDB(DWS) were updated. During the job executi", "doc_type":"usermanual", "kw":"An Insert Operation Failed After the Schema of the GaussDB(DWS) Source Table Is Updated,Datasource C", @@ -6178,10 +5580,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"An Insert Operation Failed After the Schema of the GaussDB(DWS) Source Table Is Updated", @@ -6191,7 +5590,7 @@ "uri":"dli_03_0056.html", "node_id":"dli_03_0056.xml", "product_code":"dli", - "code":"300", + "code":"299", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"APIs", @@ -6199,10 +5598,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"APIs", @@ -6212,7 +5608,7 @@ "uri":"dli_03_0060.html", "node_id":"dli_03_0060.xml", "product_code":"dli", - "code":"301", + "code":"300", "des":"In the REST API provided by DLI, the request header can be added to the request URI, for example, Content-Type.Content-Type indicates the request body type or format. The", "doc_type":"usermanual", "kw":"Why Is Error \"unsupported media Type\" Reported When I Subimt a SQL Job?,APIs,User Guide", @@ -6220,10 +5616,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Why Is Error \"unsupported media Type\" Reported When I Subimt a SQL Job?", @@ -6233,7 +5626,7 @@ "uri":"dli_03_0125.html", "node_id":"dli_03_0125.xml", "product_code":"dli", - "code":"302", + "code":"301", "des":"When different IAM users call an API under the same enterprise project in the same region, the project ID is the same.", "doc_type":"usermanual", "kw":"Is the Project ID Fixed when Different IAM Users Call an API?,APIs,User Guide", @@ -6241,10 +5634,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"Is the Project ID Fixed when Different IAM Users Call an API?", @@ -6254,7 +5644,7 @@ "uri":"dli_03_0178.html", "node_id":"dli_03_0178.xml", "product_code":"dli", - "code":"303", + "code":"302", "des":"When the API call for submitting a SQL job times out, and the following error information is displayed:There are currently no resources tracked in the state, so there is ", "doc_type":"usermanual", "kw":"What Can I Do If an Error Is Reported When the Execution of the API for Creating a SQL Job Times Out", @@ -6262,10 +5652,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"What Can I Do If an Error Is Reported When the Execution of the API for Creating a SQL Job Times Out?", @@ -6275,7 +5662,7 @@ "uri":"dli_03_0058.html", "node_id":"dli_03_0058.xml", "product_code":"dli", - "code":"304", + "code":"303", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"SDKs", @@ -6283,10 +5670,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"SDKs", @@ -6296,7 +5680,7 @@ "uri":"dli_03_0073.html", "node_id":"dli_03_0073.xml", "product_code":"dli", - "code":"305", + "code":"304", "des":"When you query the SQL job results using SDK, the system checks the job status when the job is submitted. The timeout interval set in the system is 300s. If the job is no", "doc_type":"usermanual", "kw":"How Do I Set the Timeout Duration for Querying SQL Job Results Using SDK?,SDKs,User Guide", @@ -6304,10 +5688,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Set the Timeout Duration for Querying SQL Job Results Using SDK?", @@ -6317,7 +5698,7 @@ "uri":"dli_03_0255.html", "node_id":"dli_03_0255.xml", "product_code":"dli", - "code":"306", + "code":"305", "des":"Run the ping command to check whether dli.xxx can be accessed.If dli.xxx can be accessed, check whether DNS resolution is correctly configured.If dli.xxx can be accessed,", "doc_type":"usermanual", "kw":"How Do I Handle the dli.xxx,unable to resolve host address Error?,SDKs,User Guide", @@ -6325,10 +5706,7 @@ "metedata":[ { "prodname":"dli", - "opensource":"true", - "documenttype":"usermanual", - "IsMulti":"No", - "IsBot":"Yes" + "documenttype":"usermanual" } ], "title":"How Do I Handle the dli.xxx,unable to resolve host address Error?", @@ -6338,7 +5716,7 @@ "uri":"dli_01_00006.html", "node_id":"dli_01_00006.xml", "product_code":"dli", - "code":"307", + "code":"306", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"usermanual", "kw":"Change History,User Guide", diff --git a/docs/dli/umn/CLASS.TXT.json b/docs/dli/umn/CLASS.TXT.json index 082ba8e3..8f7006d1 100644 --- a/docs/dli/umn/CLASS.TXT.json +++ b/docs/dli/umn/CLASS.TXT.json @@ -38,7 +38,7 @@ { "desc":"Only the latest 100 jobs are displayed on DLI's SparkUI.A maximum of 1,000 job results can be displayed on the console. To view more or all jobs, export the job data to O", "product_code":"dli", - "title":"Constraints and Limitations", + "title":"Notes and Constraints", "uri":"dli_07_0005.html", "doc_type":"usermanual", "p_code":"1", @@ -362,7 +362,7 @@ { "desc":"Elastic scaling can be performed for a newly created queue only when there were jobs running in this queue.Queues with 16 CUs do not support scale-out or scale-in.Queues ", "product_code":"dli", - "title":"Elastic Queue Scaling", + "title":"Elastic Scaling of Queues", "uri":"dli_01_0487.html", "doc_type":"usermanual", "p_code":"34", @@ -522,7 +522,7 @@ "code":"58" }, { - "desc":"If CUs of a yearly/monthly elastic resource pool cannot meet your service requirements, you can modify the CUs. In this case, you will be charged based on the number of C", + "desc":"If the current specifications of your elastic resource pool do not meet your service needs, you can modify them using the change specifications function.In the navigation", "product_code":"dli", "title":"Modifying Specifications", "uri":"dli_01_0524.html", @@ -827,6 +827,15 @@ "p_code":"89", "code":"92" }, + { + "desc":"VPC sharing allows sharing VPC resources created in one account with other accounts using Resource Access Manager (RAM). For example, account A can share its VPC and subn", + "product_code":"dli", + "title":"Establishing a Network Connection Between DLI and Resources in a Shared VPC", + "uri":"dli_01_0624.html", + "doc_type":"usermanual", + "p_code":"89", + "code":"93" + }, { "desc":"Delete an enhanced datasource connection that is no longer used on the console.Log in to the DLI management console.In the left navigation pane, choose Datasource Connect", "product_code":"dli", @@ -834,16 +843,16 @@ "uri":"dli_01_0553.html", "doc_type":"usermanual", "p_code":"89", - "code":"93" + "code":"94" }, { "desc":"Host information is the mapping between host IP addresses and domain names. After you configure host information, jobs can only use the configured domain names to access ", "product_code":"dli", - "title":"Modifying Host Information", + "title":"Modifying Host Information in an Elastic Resource Pool", "uri":"dli_01_0013.html", "doc_type":"usermanual", "p_code":"89", - "code":"94" + "code":"95" }, { "desc":"The CIDR block of the DLI queue that is bound with a datasource connection cannot overlap with that of the data source.The default queue cannot be bound with a connection", @@ -852,7 +861,7 @@ "uri":"dli_01_0514.html", "doc_type":"usermanual", "p_code":"89", - "code":"95" + "code":"96" }, { "desc":"A route is configured with the destination, next hop type, and next hop to determine where the network traffic is directed. Routes are classified into system routes and c", @@ -861,7 +870,7 @@ "uri":"dli_01_0014.html", "doc_type":"usermanual", "p_code":"89", - "code":"96" + "code":"97" }, { "desc":"Delete a route that is no longer used.A custom route table cannot be deleted if it is associated with a subnet.Log in to the DLI management console.In the left navigation", @@ -870,7 +879,7 @@ "uri":"dli_01_0556.html", "doc_type":"usermanual", "p_code":"89", - "code":"97" + "code":"98" }, { "desc":"Enhanced connections support user authorization by project. After authorization, users in the project have the permission to perform operations on the enhanced connection", @@ -879,7 +888,7 @@ "uri":"dli_01_0018.html", "doc_type":"usermanual", "p_code":"89", - "code":"98" + "code":"99" }, { "desc":"A tag is a key-value pair customized by users and used to identify cloud resources. It helps users to classify and search for cloud resources. A tag consists of a tag key", @@ -888,7 +897,7 @@ "uri":"dli_01_0019.html", "doc_type":"usermanual", "p_code":"89", - "code":"99" + "code":"100" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -897,7 +906,7 @@ "uri":"dli_01_0422.html", "doc_type":"usermanual", "p_code":"", - "code":"100" + "code":"101" }, { "desc":"When analyzing across multiple sources, it is not recommended to configure authentication information directly in a job as it can lead to password leakage. Instead, you a", @@ -905,8 +914,8 @@ "title":"Overview", "uri":"dli_01_0561.html", "doc_type":"usermanual", - "p_code":"100", - "code":"101" + "p_code":"101", + "code":"102" }, { "desc":"Create a CSS datasource authentication on the DLI console to store the authentication information of the CSS security cluster to DLI. This will allow you to access to the", @@ -914,8 +923,8 @@ "title":"Creating a CSS Datasource Authentication", "uri":"dli_01_0427.html", "doc_type":"usermanual", - "p_code":"100", - "code":"102" + "p_code":"101", + "code":"103" }, { "desc":"Create a Kerberos datasource authentication on the DLI console to store the authentication information of the data source to DLI. This will allow you to access to the dat", @@ -923,8 +932,8 @@ "title":"Creating a Kerberos Datasource Authentication", "uri":"dli_01_0558.html", "doc_type":"usermanual", - "p_code":"100", - "code":"103" + "p_code":"101", + "code":"104" }, { "desc":"Create a Kafka_SSL datasource authentication on the DLI console to store the Kafka authentication information to DLI. This will allow you to access to Kafka instances wit", @@ -932,8 +941,8 @@ "title":"Creating a Kafka_SSL Datasource Authentication", "uri":"dli_01_0560.html", "doc_type":"usermanual", - "p_code":"100", - "code":"104" + "p_code":"101", + "code":"105" }, { "desc":"Create a password datasource authentication on the DLI console to store passwords of the GaussDB(DWS), RDS, DCS, and DDS data sources to DLI. This will allow you to acces", @@ -941,8 +950,8 @@ "title":"Creating a Password Datasource Authentication", "uri":"dli_01_0559.html", "doc_type":"usermanual", - "p_code":"100", - "code":"105" + "p_code":"101", + "code":"106" }, { "desc":"Grant permissions on a datasource authentication to users so multiple user jobs can use the datasource authentication without affecting each other.The administrator and t", @@ -950,8 +959,8 @@ "title":"Datasource Authentication Permission Management", "uri":"dli_01_0480.html", "doc_type":"usermanual", - "p_code":"100", - "code":"106" + "p_code":"101", + "code":"107" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -960,7 +969,7 @@ "uri":"dli_01_0485.html", "doc_type":"usermanual", "p_code":"", - "code":"107" + "code":"108" }, { "desc":"DLI allows you to set variables that are frequently used during job development as global variables on the DLI management console. This avoids repeated definitions during", @@ -968,8 +977,8 @@ "title":"Global Variables", "uri":"dli_01_0476.html", "doc_type":"usermanual", - "p_code":"107", - "code":"108" + "p_code":"108", + "code":"109" }, { "desc":"You can grant permissions on a global variable to users.The administrator and the global variable owner have all permissions. You do not need to set permissions for them,", @@ -977,8 +986,8 @@ "title":"Permission Management for Global Variables", "uri":"dli_01_0533.html", "doc_type":"usermanual", - "p_code":"107", - "code":"109" + "p_code":"108", + "code":"110" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -987,7 +996,7 @@ "uri":"dli_01_0408.html", "doc_type":"usermanual", "p_code":"", - "code":"110" + "code":"111" }, { "desc":"DLI has a comprehensive permission control mechanism and supports fine-grained authentication through Identity and Access Management (IAM). You can create policies in IAM", @@ -995,8 +1004,8 @@ "title":"Overview", "uri":"dli_01_0440.html", "doc_type":"usermanual", - "p_code":"110", - "code":"111" + "p_code":"111", + "code":"112" }, { "desc":"You can use Identity and Access Management (IAM) to implement fine-grained permissions control on DLI resources. For details, see Overview.If your cloud account does not ", @@ -1004,8 +1013,8 @@ "title":"Creating an IAM User and Granting Permissions", "uri":"dli_01_0418.html", "doc_type":"usermanual", - "p_code":"110", - "code":"112" + "p_code":"111", + "code":"113" }, { "desc":"Custom policies can be created as a supplement to the system policies of DLI. You can add actions to custom policies. For the actions supported for custom policies, see \"", @@ -1013,8 +1022,8 @@ "title":"Creating a Custom Policy", "uri":"dli_01_0451.html", "doc_type":"usermanual", - "p_code":"110", - "code":"113" + "p_code":"111", + "code":"114" }, { "desc":"A resource is an object that exists within a service. You can select DLI resources by specifying their paths.", @@ -1022,8 +1031,8 @@ "title":"DLI Resources", "uri":"dli_01_0417.html", "doc_type":"usermanual", - "p_code":"110", - "code":"114" + "p_code":"111", + "code":"115" }, { "desc":"Request conditions are useful in determining when a custom policy takes effect. A request condition consists of a condition key and operator. Condition keys are either gl", @@ -1031,8 +1040,8 @@ "title":"DLI Request Conditions", "uri":"dli_01_0475.html", "doc_type":"usermanual", - "p_code":"110", - "code":"115" + "p_code":"111", + "code":"116" }, { "desc":"Table 1 lists the common operations supported by each system policy of DLI. Choose proper system policies according to this table. For details about the SQL statement per", @@ -1040,8 +1049,8 @@ "title":"Common Operations Supported by DLI System Policy", "uri":"dli_01_0441.html", "doc_type":"usermanual", - "p_code":"110", - "code":"116" + "p_code":"111", + "code":"117" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1050,16 +1059,16 @@ "uri":"dli_01_0513.html", "doc_type":"usermanual", "p_code":"", - "code":"117" + "code":"118" }, { - "desc":"On the DLI management console, you can import data stored on OBS to DLI tables from Data Management > Databases and Tables > Table Management and SQL Editor pages. For de", + "desc":"On the DLI management console, you can import data stored in OBS into DLI tables.To import OBS data to a DLI table, either choose Data Management > Databases and Tables i", "product_code":"dli", "title":"Importing Data to a DLI Table", "uri":"dli_01_0420.html", "doc_type":"usermanual", - "p_code":"117", - "code":"118" + "p_code":"118", + "code":"119" }, { "desc":"This section describes metrics reported by DLI to Cloud Eye as well as their namespaces and dimensions. You can use the management console or APIs provided by Cloud Eye t", @@ -1067,8 +1076,8 @@ "title":"Viewing Monitoring Metrics", "uri":"dli_01_0445.html", "doc_type":"usermanual", - "p_code":"117", - "code":"119" + "p_code":"118", + "code":"120" }, { "desc":"With CTS, you can record operations associated with DLI for later query, audit, and backtrack operations.", @@ -1076,8 +1085,8 @@ "title":"DLI Operations That Can Be Recorded by CTS", "uri":"dli_01_0318.html", "doc_type":"usermanual", - "p_code":"117", - "code":"120" + "p_code":"118", + "code":"121" }, { "desc":"A quota limits the quantity of a resource available to users, thereby preventing spikes in the usage of the resource.You can also request for an increased quota if your e", @@ -1085,8 +1094,8 @@ "title":"Quota Management", "uri":"dli_01_0550.html", "doc_type":"usermanual", - "p_code":"117", - "code":"121" + "p_code":"118", + "code":"122" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1095,7 +1104,7 @@ "uri":"dli_01_0539.html", "doc_type":"usermanual", "p_code":"", - "code":"122" + "code":"123" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1103,8 +1112,8 @@ "title":"Flink Jobs", "uri":"dli_03_0037.html", "doc_type":"usermanual", - "p_code":"122", - "code":"123" + "p_code":"123", + "code":"124" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1112,8 +1121,8 @@ "title":"Usage", "uri":"dli_03_0137.html", "doc_type":"usermanual", - "p_code":"123", - "code":"124" + "p_code":"124", + "code":"125" }, { "desc":"DLI Flink jobs support the following data formats:Avro, Avro_merge, BLOB, CSV, EMAIL, JSON, ORC, Parquet, and XML.DLI Flink jobs support data from the following data sour", @@ -1121,8 +1130,8 @@ "title":"What Data Formats and Data Sources Are Supported by DLI Flink Jobs?", "uri":"dli_03_0083.html", "doc_type":"usermanual", - "p_code":"124", - "code":"125" + "p_code":"125", + "code":"126" }, { "desc":"A sub-user can view queues but cannot view Flink jobs. You can authorize the sub-user using DLI or IAM.Authorization on DLILog in to the DLI console using a tenant accoun", @@ -1130,8 +1139,8 @@ "title":"How Do I Authorize a Subuser to View Flink Jobs?", "uri":"dli_03_0139.html", "doc_type":"usermanual", - "p_code":"124", - "code":"126" + "p_code":"125", + "code":"127" }, { "desc":"DLI Flink jobs are highly available. You can enable the automatic restart function to automatically restart your jobs after short-time faults of peripheral services are r", @@ -1139,8 +1148,8 @@ "title":"How Do I Set Auto Restart upon Exception for a Flink Job?", "uri":"dli_03_0090.html", "doc_type":"usermanual", - "p_code":"124", - "code":"127" + "p_code":"125", + "code":"128" }, { "desc":"When you create a Flink SQL job or Flink Jar job, you can select Save Job Log on the job editing page to save job running logs to OBS.To set the OBS bucket for storing th", @@ -1148,8 +1157,8 @@ "title":"How Do I Save Flink Job Logs?", "uri":"dli_03_0099.html", "doc_type":"usermanual", - "p_code":"124", - "code":"128" + "p_code":"125", + "code":"129" }, { "desc":"DLI can output Flink job results to DIS. You can view the results in DIS. For details, see \"Obtaining Data from DIS\" in Data Ingestion Service User Guide.DLI can output F", @@ -1157,8 +1166,8 @@ "title":"How Can I Check Flink Job Results?", "uri":"dli_03_0043.html", "doc_type":"usermanual", - "p_code":"124", - "code":"129" + "p_code":"125", + "code":"130" }, { "desc":"Choose Job Management > Flink Jobs. In the Operation column of the target job, choose More > Permissions. When a new user is authorized, No such user. userName:xxxx. is d", @@ -1166,8 +1175,8 @@ "title":"Why Is Error \"No such user. userName:xxxx.\" Reported on the Flink Job Management Page When I Grant Permission to a User?", "uri":"dli_03_0160.html", "doc_type":"usermanual", - "p_code":"124", - "code":"130" + "p_code":"125", + "code":"131" }, { "desc":"Checkpoint was enabled when a Flink job is created, and the OBS bucket for storing checkpoints was specified. After a Flink job is manually stopped, no message is display", @@ -1175,8 +1184,8 @@ "title":"How Do I Know Which Checkpoint the Flink Job I Stopped Will Be Restored to When I Start the Job Again?", "uri":"dli_03_0180.html", "doc_type":"usermanual", - "p_code":"124", - "code":"131" + "p_code":"125", + "code":"132" }, { "desc":"When you set running parameters of a DLI Flink job, you can enable Alarm Generation upon Job Exception to receive alarms when the job runs abnormally or is in arrears.If ", @@ -1184,8 +1193,8 @@ "title":"Why Is a Message Displayed Indicating That the SMN Topic Does Not Exist When I Use the SMN Topic in DLI?", "uri":"dli_03_0036.html", "doc_type":"usermanual", - "p_code":"124", - "code":"132" + "p_code":"125", + "code":"133" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1193,8 +1202,8 @@ "title":"Flink SQL", "uri":"dli_03_0131.html", "doc_type":"usermanual", - "p_code":"123", - "code":"133" + "p_code":"124", + "code":"134" }, { "desc":"The consumption capability of a Flink SQL job depends on the data source transmission, queue size, and job parameter settings. The peak consumption is 10 Mbit/s.", @@ -1202,8 +1211,8 @@ "title":"How Much Data Can Be Processed in a Day by a Flink SQL Job?", "uri":"dli_03_0130.html", "doc_type":"usermanual", - "p_code":"133", - "code":"134" + "p_code":"134", + "code":"135" }, { "desc":"The temp stream in Flink SQL is similar to a subquery. It is a logical stream used to simplify the SQL logic and does not generate data storage. Therefore, there is no ne", @@ -1211,17 +1220,17 @@ "title":"Does Data in the Temporary Stream of Flink SQL Need to Be Cleared Periodically? How Do I Clear the Data?", "uri":"dli_03_0061.html", "doc_type":"usermanual", - "p_code":"133", - "code":"135" + "p_code":"134", + "code":"136" }, { - "desc":"SymptomWhen you create a Flink SQL job and configure the parameters, you select an OBS bucket you have created. The system displays a message indicating that the OBS buck", + "desc":"When you create a Flink SQL job and configure the parameters, you select an OBS bucket you have created. The system displays a message indicating that the OBS bucket is n", "product_code":"dli", "title":"Why Is a Message Displayed Indicating That the OBS Bucket Is Not Authorized When I Select an OBS Bucket for a Flink SQL Job?", "uri":"dli_03_0138.html", "doc_type":"usermanual", - "p_code":"133", - "code":"136" + "p_code":"134", + "code":"137" }, { "desc":"When using a Flink SQL job, you need to create an OBS partition table for subsequent batch processing.In the following example, the day field is used as the partition fie", @@ -1229,8 +1238,8 @@ "title":"How Do I Create an OBS Partitioned Table for a Flink SQL Job?", "uri":"dli_03_0089.html", "doc_type":"usermanual", - "p_code":"133", - "code":"137" + "p_code":"134", + "code":"138" }, { "desc":"In this example, the day field is used as the partition field with the parquet encoding format (only the parquet format is supported currently) to dump car_info data to O", @@ -1238,8 +1247,8 @@ "title":"How Do I Dump Data to OBS and Create an OBS Partitioned Table?", "uri":"dli_03_0075.html", "doc_type":"usermanual", - "p_code":"133", - "code":"138" + "p_code":"134", + "code":"139" }, { "desc":"When I run the creation statement with an EL expression in the table name in a Flink SQL job, the following error message is displayed:DLI.0005: AnalysisException: t_user", @@ -1247,8 +1256,8 @@ "title":"Why Is Error Message \"DLI.0005\" Displayed When I Use an EL Expression to Create a Table in a Flink SQL Job?", "uri":"dli_03_0167.html", "doc_type":"usermanual", - "p_code":"133", - "code":"139" + "p_code":"134", + "code":"140" }, { "desc":"After data is written to OBS through the Flink job output stream, data cannot be queried from the DLI table created in the OBS file path.For example, use the following Fl", @@ -1256,8 +1265,8 @@ "title":"Why Is No Data Queried in the DLI Table Created Using the OBS File Path When Data Is Written to OBS by a Flink Job Output Stream?", "uri":"dli_03_0168.html", "doc_type":"usermanual", - "p_code":"133", - "code":"140" + "p_code":"134", + "code":"141" }, { "desc":"After a Flink SQL job is submitted on DLI, the job fails to be executed. The following error information is displayed in the job log:connect to DIS failed java.lang.Illeg", @@ -1265,8 +1274,8 @@ "title":"Why Does a Flink SQL Job Fails to Be Executed, and Is \"connect to DIS failed java.lang.IllegalArgumentException: Access key cannot be null\" Displayed in the Log?", "uri":"dli_03_0174.html", "doc_type":"usermanual", - "p_code":"133", - "code":"141" + "p_code":"134", + "code":"142" }, { "desc":"Semantic verification for a Flink SQL job (reading DIS data) fails. The following information is displayed when the job fails:Get dis channel xxxinfo failed. error info: ", @@ -1274,8 +1283,8 @@ "title":"Why Is Error \"Not authorized\" Reported When a Flink SQL Job Reads DIS Data?", "uri":"dli_03_0176.html", "doc_type":"usermanual", - "p_code":"133", - "code":"142" + "p_code":"134", + "code":"143" }, { "desc":"After a Flink SQL job consumed Kafka and sent data to the Elasticsearch cluster, the job was successfully executed, but no data is available.Possible causes are as follow", @@ -1283,8 +1292,8 @@ "title":"Data Writing Fails After a Flink SQL Job Consumed Kafka and Sank Data to the Elasticsearch Cluster", "uri":"dli_03_0232.html", "doc_type":"usermanual", - "p_code":"133", - "code":"143" + "p_code":"134", + "code":"144" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1292,34 +1301,25 @@ "title":"Flink Jar Jobs", "uri":"dli_03_0132.html", "doc_type":"usermanual", - "p_code":"123", - "code":"144" - }, - { - "desc":"The procedure is as follows:Add the following code to the JAR file code of the Flink Jar job:// Configure the pom file on which the StreamExecutionEnvironment depends.\nSt", - "product_code":"dli", - "title":"How Do I Configure Checkpoints for Flink Jar Jobs and Save the Checkpoints to OBS?", - "uri":"dli_03_0038.html", - "doc_type":"usermanual", - "p_code":"144", + "p_code":"124", "code":"145" }, { - "desc":"Configuration files can be uploaded for user-defined jobs (JAR).Upload the configuration file to DLI through Package Management.In the Other Dependencies area of the Flin", + "desc":"You can upload configuration files for custom jobs (Jar).Upload the configuration file to DLI through Package Management.In the Other Dependencies area of the Flink Jar j", "product_code":"dli", - "title":"Does a Flink JAR Job Support Configuration File Upload? How Do I Upload a Configuration File?", + "title":"Can I Upload Configuration Files for Flink Jar Jobs?", "uri":"dli_03_0044.html", "doc_type":"usermanual", - "p_code":"144", + "p_code":"145", "code":"146" }, { "desc":"The dependency of your Flink job conflicts with a built-in dependency of the DLI Flink platform. As a result, the job submission fails.Delete your JAR file that is the sa", "product_code":"dli", - "title":"Why Does the Submission Fail Due to Flink JAR File Conflict?", + "title":"Why Does a Flink Jar Package Conflict Result in Submission Failure?", "uri":"dli_03_0119.html", "doc_type":"usermanual", - "p_code":"144", + "p_code":"145", "code":"147" }, { @@ -1328,7 +1328,7 @@ "title":"Why Does a Flink Jar Job Fail to Access GaussDB(DWS) and a Message Is Displayed Indicating Too Many Client Connections?", "uri":"dli_03_0161.html", "doc_type":"usermanual", - "p_code":"144", + "p_code":"145", "code":"148" }, { @@ -1337,7 +1337,7 @@ "title":"Why Is Error Message \"Authentication failed\" Displayed During Flink Jar Job Running?", "uri":"dli_03_0165.html", "doc_type":"usermanual", - "p_code":"144", + "p_code":"145", "code":"149" }, { @@ -1346,7 +1346,7 @@ "title":"Why Is Error Invalid OBS Bucket Name Reported After a Flink Job Submission Failed?", "uri":"dli_03_0233.html", "doc_type":"usermanual", - "p_code":"144", + "p_code":"145", "code":"150" }, { @@ -1355,7 +1355,7 @@ "title":"Why Does the Flink Submission Fail Due to Hadoop JAR File Conflict?", "uri":"dli_03_0234.html", "doc_type":"usermanual", - "p_code":"144", + "p_code":"145", "code":"151" }, { @@ -1364,7 +1364,7 @@ "title":"How Do I Connect a Flink jar Job to SASL_SSL?", "uri":"dli_03_0266.html", "doc_type":"usermanual", - "p_code":"144", + "p_code":"145", "code":"152" }, { @@ -1373,7 +1373,7 @@ "title":"Performance Tuning", "uri":"dli_03_0133.html", "doc_type":"usermanual", - "p_code":"123", + "p_code":"124", "code":"153" }, { @@ -1409,7 +1409,7 @@ "title":"O&M Guide", "uri":"dli_03_0135.html", "doc_type":"usermanual", - "p_code":"123", + "p_code":"124", "code":"157" }, { @@ -1499,7 +1499,7 @@ "title":"Problems Related to SQL Jobs", "uri":"dli_03_0020.html", "doc_type":"usermanual", - "p_code":"122", + "p_code":"123", "code":"167" }, { @@ -1755,9 +1755,9 @@ "code":"195" }, { - "desc":"If the execution of an SQL statement takes a long time, you need to access the Spark UI to check the execution status.If data skew occurs, the running time of a stage exc", + "desc":"Data skew is a common issue during the execution of SQL jobs. When data is unevenly distributed, some compute nodes process significantly more data than others, which can", "product_code":"dli", - "title":"How Do I Eliminate Data Skew by Configuring AE Parameters?", + "title":"How Do I Do When Data Skew Occurs During the Execution of a SQL Job?", "uri":"dli_03_0093.html", "doc_type":"usermanual", "p_code":"192", @@ -1895,7 +1895,7 @@ "title":"Problems Related to Spark Jobs", "uri":"dli_03_0021.html", "doc_type":"usermanual", - "p_code":"122", + "p_code":"123", "code":"211" }, { @@ -1935,7 +1935,7 @@ "code":"215" }, { - "desc":"Hard-coded or plaintext AK and SK pose significant security risks. To ensure security, encrypt your AK and SK, store them in configuration files or environment variables,", + "desc":"To obtain the AK/SK, set the parameters as follows:Create a SparkContext using code.val sc: SparkContext = new SparkContext()\nsc.hadoopConfiguration.set(\"fs.obs.access.ke", "product_code":"dli", "title":"How Do I Set the AK/SK for a Queue to Operate an OBS Table?", "uri":"dli_03_0017.html", @@ -2084,7 +2084,7 @@ "title":"Product Consultation", "uri":"dli_03_0001.html", "doc_type":"usermanual", - "p_code":"122", + "p_code":"123", "code":"232" }, { @@ -2096,15 +2096,6 @@ "p_code":"232", "code":"233" }, - { - "desc":"Data Lake Insight (DLI) is a serverless data processing and analysis service fully compatible with Apache Spark and Apache Flink ecosystems. It frees you from managing an", - "product_code":"dli", - "title":"What Is DLI?", - "uri":"dli_03_0002.html", - "doc_type":"usermanual", - "p_code":"233", - "code":"234" - }, { "desc":"DLI supports the following data formats:ParquetCSVORCJsonAvro", "product_code":"dli", @@ -2112,7 +2103,7 @@ "uri":"dli_03_0025.html", "doc_type":"usermanual", "p_code":"233", - "code":"235" + "code":"234" }, { "desc":"The Spark component of DLI is a fully managed service. You can only use the DLI Spark through its APIs. .The Spark component of MRS is built on the VM in an MRS cluster. ", @@ -2121,7 +2112,7 @@ "uri":"dli_03_0115.html", "doc_type":"usermanual", "p_code":"233", - "code":"236" + "code":"235" }, { "desc":"DLI data can be stored in either of the following:OBS: Data used by SQL jobs, Spark jobs, and Flink jobs can be stored in OBS, reducing storage costs.DLI: The column-base", @@ -2130,7 +2121,7 @@ "uri":"dli_03_0029.html", "doc_type":"usermanual", "p_code":"233", - "code":"237" + "code":"236" }, { "desc":"DLI tables store data within the DLI service, and you do not need to know the data storage path.OBS tables store data in your OBS buckets, and you need to manage the sour", @@ -2139,7 +2130,7 @@ "uri":"dli_03_0117.html", "doc_type":"usermanual", "p_code":"233", - "code":"238" + "code":"237" }, { "desc":"Currently, DLI supports analysis only on the data uploaded to the cloud. In scenarios where regular (for example, on a per day basis) one-off analysis on incremental data", @@ -2148,7 +2139,7 @@ "uri":"dli_03_0010.html", "doc_type":"usermanual", "p_code":"233", - "code":"239" + "code":"238" }, { "desc":"Data in the OBS bucket shared by IAM users under the same account can be imported. You cannot import data in the OBS bucket shared with other IAM account.", @@ -2157,7 +2148,7 @@ "uri":"dli_03_0129.html", "doc_type":"usermanual", "p_code":"233", - "code":"240" + "code":"239" }, { "desc":"Log in to the management console.Click in the upper left corner and select a region and a project.Click the My Quota icon in the upper right corner of the page.The Serv", @@ -2166,7 +2157,7 @@ "uri":"dli_03_0264.html", "doc_type":"usermanual", "p_code":"233", - "code":"241" + "code":"240" }, { "desc":"No, a global variable can only be used by the user who created it. Global variables can be used to simplify complex parameters. For example, long and difficult variables ", @@ -2175,7 +2166,7 @@ "uri":"dli_03_0263.html", "doc_type":"usermanual", "p_code":"233", - "code":"242" + "code":"241" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2184,7 +2175,7 @@ "uri":"dli_03_0222.html", "doc_type":"usermanual", "p_code":"232", - "code":"243" + "code":"242" }, { "desc":"If you are suggested to perform following operations to run a large number of DLI jobs:Group the DLI jobs by type, and run each group on a queue.Alternatively, create IAM", @@ -2192,8 +2183,8 @@ "title":"How Do I Manage Tens of Thousands of Jobs Running on DLI?", "uri":"dli_03_0126.html", "doc_type":"usermanual", - "p_code":"243", - "code":"244" + "p_code":"242", + "code":"243" }, { "desc":"The field names of tables that have been created cannot be changed.You can create a table, define new table fields, and migrate data from the old table to the new one.", @@ -2201,8 +2192,8 @@ "title":"How Do I Change the Name of a Field in a Created Table?", "uri":"dli_03_0162.html", "doc_type":"usermanual", - "p_code":"243", - "code":"245" + "p_code":"242", + "code":"244" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2211,7 +2202,7 @@ "uri":"dli_03_0261.html", "doc_type":"usermanual", "p_code":"232", - "code":"246" + "code":"245" }, { "desc":"No. The spark.acls.enable configuration item is not used in DLI. The Apache Spark command injection vulnerability (CVE-2022-33891) does not exist in DLI.", @@ -2219,8 +2210,8 @@ "title":"Does DLI Have the Apache Spark Command Injection Vulnerability (CVE-2022-33891)?", "uri":"dli_03_0260.html", "doc_type":"usermanual", - "p_code":"246", - "code":"247" + "p_code":"245", + "code":"246" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2228,8 +2219,8 @@ "title":"Quota", "uri":"dli_03_0053.html", "doc_type":"usermanual", - "p_code":"122", - "code":"248" + "p_code":"123", + "code":"247" }, { "desc":"Log in to the management console.Click in the upper left corner and select Region and Project.Click (the My Quotas icon) in the upper right corner.The Service Quota pag", @@ -2237,8 +2228,8 @@ "title":"How Do I View My Quotas?", "uri":"dli_03_0031.html", "doc_type":"usermanual", - "p_code":"248", - "code":"249" + "p_code":"247", + "code":"248" }, { "desc":"The system does not support online quota adjustment. To increase a resource quota, dial the hotline or send an email to the customer service. We will process your applica", @@ -2246,8 +2237,8 @@ "title":"How Do I Increase a Quota?", "uri":"dli_03_0032.html", "doc_type":"usermanual", - "p_code":"248", - "code":"250" + "p_code":"247", + "code":"249" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2255,8 +2246,8 @@ "title":"Permission", "uri":"dli_03_0054.html", "doc_type":"usermanual", - "p_code":"122", - "code":"251" + "p_code":"123", + "code":"250" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2264,8 +2255,8 @@ "title":"Usage", "uri":"dli_03_0223.html", "doc_type":"usermanual", - "p_code":"251", - "code":"252" + "p_code":"250", + "code":"251" }, { "desc":"DLI has a comprehensive permission control mechanism and supports fine-grained authentication through Identity and Access Management (IAM). You can create policies in IAM", @@ -2273,8 +2264,8 @@ "title":"How Do I Manage Fine-Grained DLI Permissions?", "uri":"dli_03_0100.html", "doc_type":"usermanual", - "p_code":"252", - "code":"253" + "p_code":"251", + "code":"252" }, { "desc":"You cannot perform permission-related operations on the partition column of a partitioned table.However, when you grant the permission of any non-partition column in a pa", @@ -2282,8 +2273,8 @@ "title":"What Is Column Permission Granting of a DLI Partition Table?", "uri":"dli_03_0008.html", "doc_type":"usermanual", - "p_code":"252", - "code":"254" + "p_code":"251", + "code":"253" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2291,8 +2282,8 @@ "title":"O&M Guide", "uri":"dli_03_0226.html", "doc_type":"usermanual", - "p_code":"251", - "code":"255" + "p_code":"250", + "code":"254" }, { "desc":"When you submit a job, a message is displayed indicating that the job fails to be submitted due to insufficient permission caused by arrears. In this case, you need to ch", @@ -2300,8 +2291,8 @@ "title":"Why Does My Account Have Insufficient Permissions Due to Arrears?", "uri":"dli_03_0140.html", "doc_type":"usermanual", - "p_code":"255", - "code":"256" + "p_code":"254", + "code":"255" }, { "desc":"When the user update an existing program package, the following error information is displayed:\"error_code\"*DLI.0003\",\"error_msg\":\"Permission denied for resource 'resourc", @@ -2309,8 +2300,8 @@ "title":"Why Does the System Display a Message Indicating Insufficient Permissions When I Update a Program Package?", "uri":"dli_03_0195.html", "doc_type":"usermanual", - "p_code":"255", - "code":"257" + "p_code":"254", + "code":"256" }, { "desc":"When the SQL query statement is executed, the system displays a message indicating that the user does not have the permission to query resources.Error information: DLI.00", @@ -2318,8 +2309,8 @@ "title":"Why Is Error \"DLI.0003: Permission denied for resource...\" Reported When I Run a SQL Statement?", "uri":"dli_03_0227.html", "doc_type":"usermanual", - "p_code":"255", - "code":"258" + "p_code":"254", + "code":"257" }, { "desc":"The table permission has been granted and verified. However, after a period of time, an error is reported indicating that the table query fails.There are two possible rea", @@ -2327,8 +2318,8 @@ "title":"Why Can't I Query Table Data After I've Been Granted Table Permissions?", "uri":"dli_03_0228.html", "doc_type":"usermanual", - "p_code":"255", - "code":"259" + "p_code":"254", + "code":"258" }, { "desc":"If a table inherits database permissions, you do not need to regrant the inherited permissions to the table.When you grant permissions on a table on the console:If you se", @@ -2336,8 +2327,8 @@ "title":"Will an Error Be Reported if the Inherited Permissions Are Regranted to a Table That Inherits Database Permissions?", "uri":"dli_03_0057.html", "doc_type":"usermanual", - "p_code":"255", - "code":"260" + "p_code":"254", + "code":"259" }, { "desc":"User A created Table1.User B created View1 based on Table1.After the Select Table permission on Table1 is granted to user C, user C fails to query View1.User B does not h", @@ -2345,8 +2336,8 @@ "title":"Why Can't I Query a View After I'm Granted the Select Table Permission on the View?", "uri":"dli_03_0067.html", "doc_type":"usermanual", - "p_code":"255", - "code":"261" + "p_code":"254", + "code":"260" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2354,8 +2345,8 @@ "title":"Queue", "uri":"dli_03_0049.html", "doc_type":"usermanual", - "p_code":"122", - "code":"262" + "p_code":"123", + "code":"261" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2363,8 +2354,8 @@ "title":"Usage", "uri":"dli_03_0229.html", "doc_type":"usermanual", - "p_code":"262", - "code":"263" + "p_code":"261", + "code":"262" }, { "desc":"Currently, you are not allowed to modify the description of a created queue. You can add the description when purchasing the queue.", @@ -2372,8 +2363,8 @@ "title":"Does the Description of a DLI Queue Can Be Modified?", "uri":"dli_03_0109.html", "doc_type":"usermanual", - "p_code":"263", - "code":"264" + "p_code":"262", + "code":"263" }, { "desc":"Deleting a queue does not cause table data loss in your database.", @@ -2381,8 +2372,8 @@ "title":"Will Table Data in My Database Be Lost If I Delete a Queue?", "uri":"dli_03_0166.html", "doc_type":"usermanual", - "p_code":"263", - "code":"265" + "p_code":"262", + "code":"264" }, { "desc":"You need to develop a mechanism to retry failed jobs. When a faulty queue is recovered, your application tries to submit the failed jobs to the queue again.", @@ -2390,8 +2381,8 @@ "title":"How Does DLI Ensure the Reliability of Spark Jobs When a Queue Is Abnormal?", "uri":"dli_03_0170.html", "doc_type":"usermanual", - "p_code":"263", - "code":"266" + "p_code":"262", + "code":"265" }, { "desc":"DLI allows you to subscribe to an SMN topic for failed jobs.Log in to the DLI console.In the navigation pane on the left, choose Queue Management.On the Queue Management ", @@ -2399,8 +2390,8 @@ "title":"How Do I Monitor Queue Exceptions?", "uri":"dli_03_0098.html", "doc_type":"usermanual", - "p_code":"263", - "code":"267" + "p_code":"262", + "code":"266" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2408,8 +2399,8 @@ "title":"O&M Guide", "uri":"dli_03_0230.html", "doc_type":"usermanual", - "p_code":"262", - "code":"268" + "p_code":"261", + "code":"267" }, { "desc":"To check the running status of the DLI queue and determine whether to run more jobs on that queue, you need to check the queue load.Search for Cloud Eye on the console.In", @@ -2417,8 +2408,8 @@ "title":"How Do I View DLI Queue Load?", "uri":"dli_03_0095.html", "doc_type":"usermanual", - "p_code":"268", - "code":"269" + "p_code":"267", + "code":"268" }, { "desc":"You need to check the large number of jobs in the Submitting and Running states on the queue.Use Cloud Eye to view jobs in different states on the queue. The procedure is", @@ -2426,8 +2417,8 @@ "title":"How Do I Determine Whether There Are Too Many Jobs in the Current Queue?", "uri":"dli_03_0183.html", "doc_type":"usermanual", - "p_code":"268", - "code":"270" + "p_code":"267", + "code":"269" }, { "desc":"Currently, DLI provides two types of queues, For SQL and For general use. SQL queues are used to run SQL jobs. General-use queues are compatible with Spark queues of earl", @@ -2435,8 +2426,8 @@ "title":"How Do I Switch an Earlier-Version Spark Queue to a General-Purpose Queue?", "uri":"dli_03_0065.html", "doc_type":"usermanual", - "p_code":"268", - "code":"271" + "p_code":"267", + "code":"270" }, { "desc":"DLI queues do not use resources or bandwidth when no job is running. In this case, the running status of DLI queues is not displayed on CES.", @@ -2444,8 +2435,8 @@ "title":"Why Cannot I View the Resource Running Status of DLI Queues on Cloud Eye?", "uri":"dli_03_0193.html", "doc_type":"usermanual", - "p_code":"268", - "code":"272" + "p_code":"267", + "code":"271" }, { "desc":"In DLI, 64 CU = 64 cores and 256 GB memory.In a Spark job, if the driver occupies 4 cores and 16 GB memory, the executor can occupy 60 cores and 240 GB memory.", @@ -2453,8 +2444,8 @@ "title":"How Do I Allocate Queue Resources for Running Spark Jobs If I Have Purchased 64 CUs?", "uri":"dli_03_0088.html", "doc_type":"usermanual", - "p_code":"268", - "code":"273" + "p_code":"267", + "code":"272" }, { "desc":"Queue plans create failed. The plan xxx target cu is out of quota is displayed when you create a scheduled scaling task.The CU quota of the current account is insufficien", @@ -2462,8 +2453,8 @@ "title":"Why Is Error \"Queue plans create failed. The plan xxx target cu is out of quota\" Reported When I Schedule CU Changes?", "uri":"dli_03_0159.html", "doc_type":"usermanual", - "p_code":"268", - "code":"274" + "p_code":"267", + "code":"273" }, { "desc":"After a SQL job was submitted to the default queue, the job runs abnormally. The job log reported that the execution timed out. The exception logs are as follows:[ERROR] ", @@ -2471,8 +2462,8 @@ "title":"Why Is a Timeout Exception Reported When a DLI SQL Statement Fails to Be Executed on the Default Queue?", "uri":"dli_03_0171.html", "doc_type":"usermanual", - "p_code":"268", - "code":"275" + "p_code":"267", + "code":"274" }, { "desc":"In daily big data analysis work, it is important to allocate and manage compute resources properly to provide a good job execution environment.You can allocate resources ", @@ -2480,8 +2471,8 @@ "title":"How Can I Check the Actual and Used CUs for an Elastic Resource Pool as Well as the Required CUs for a Job?", "uri":"dli_03_0276.html", "doc_type":"usermanual", - "p_code":"268", - "code":"276" + "p_code":"267", + "code":"275" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2489,8 +2480,8 @@ "title":"Datasource Connections", "uri":"dli_03_0022.html", "doc_type":"usermanual", - "p_code":"122", - "code":"277" + "p_code":"123", + "code":"276" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2498,8 +2489,8 @@ "title":"Datasource Connections", "uri":"dli_03_0110.html", "doc_type":"usermanual", - "p_code":"277", - "code":"278" + "p_code":"276", + "code":"277" }, { "desc":"You need to create a VPC peering connection to enable network connectivity. Take MRS as an example. If DLI and MRS clusters are in the same VPC, and the security group is", @@ -2507,8 +2498,8 @@ "title":"Why Do I Need to Create a VPC Peering Connection for an Enhanced Datasource Connection?", "uri":"dli_03_0128.html", "doc_type":"usermanual", - "p_code":"278", - "code":"279" + "p_code":"277", + "code":"278" }, { "desc":"An enhanced datasource connection failed to pass the network connectivity test. Datasource connection cannot be bound to a queue. The following error information is displ", @@ -2516,8 +2507,8 @@ "title":"Failed to Bind a Queue to an Enhanced Datasource Connection", "uri":"dli_03_0237.html", "doc_type":"usermanual", - "p_code":"278", - "code":"280" + "p_code":"277", + "code":"279" }, { "desc":"The outbound rule had been configured for the security group of the queue associated with the enhanced datasource connection. The datasource authentication used a passwor", @@ -2525,8 +2516,8 @@ "title":"DLI Failed to Connect to GaussDB(DWS) Through an Enhanced Datasource Connection", "uri":"dli_03_0238.html", "doc_type":"usermanual", - "p_code":"278", - "code":"281" + "p_code":"277", + "code":"280" }, { "desc":"A datasource connection is created and bound to a queue. The connectivity test fails and the following error information is displayed:failed to connect to specified addre", @@ -2534,8 +2525,8 @@ "title":"How Do I Do if the Datasource Connection Is Created But the Network Connectivity Test Fails?", "uri":"dli_03_0179.html", "doc_type":"usermanual", - "p_code":"278", - "code":"282" + "p_code":"277", + "code":"281" }, { "desc":"Configuring the Connection Between a DLI Queue and a Data Source in a Private NetworkIf your DLI job needs to connect to a data source, for example, MRS, RDS, CSS, Kafka,", @@ -2543,8 +2534,8 @@ "title":"How Do I Configure the Network Between a DLI Queue and a Data Source?", "uri":"dli_03_0186.html", "doc_type":"usermanual", - "p_code":"278", - "code":"283" + "p_code":"277", + "code":"282" }, { "desc":"The possible causes and solutions are as follows:If you have created a queue, do not bind it to a datasource connection immediately. Wait for 5 to 10 minutes. After the c", @@ -2552,8 +2543,8 @@ "title":"What Can I Do If a Datasource Connection Is Stuck in Creating State When I Try to Bind a Queue to It?", "uri":"dli_03_0257.html", "doc_type":"usermanual", - "p_code":"278", - "code":"284" + "p_code":"277", + "code":"283" }, { "desc":"DLI enhanced datasource connection uses VPC peering to directly connect the VPC networks of the desired data sources for point-to-point data exchanges.", @@ -2561,8 +2552,8 @@ "title":"How Do I Connect DLI to Data Sources?", "uri":"dli_03_0259.html", "doc_type":"usermanual", - "p_code":"278", - "code":"285" + "p_code":"277", + "code":"284" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2570,8 +2561,8 @@ "title":"Cross-Source Analysis", "uri":"dli_03_0112.html", "doc_type":"usermanual", - "p_code":"277", - "code":"286" + "p_code":"276", + "code":"285" }, { "desc":"To perform query on data stored on services rather than DLI, perform the following steps:Assume that the data to be queried is stored on multiple services (for example, O", @@ -2579,8 +2570,8 @@ "title":"How Can I Perform Query on Data Stored on Services Rather Than DLI?", "uri":"dli_03_0011.html", "doc_type":"usermanual", - "p_code":"286", - "code":"287" + "p_code":"285", + "code":"286" }, { "desc":"Connect VPCs in different regions.Create an enhanced datasource connection on DLI and bind it to a queue.Add a DLI route.", @@ -2588,8 +2579,8 @@ "title":"How Can I Access Data Across Regions?", "uri":"dli_03_0085.html", "doc_type":"usermanual", - "p_code":"286", - "code":"288" + "p_code":"285", + "code":"287" }, { "desc":"When data is inserted into DLI, set the ID field to NULL.", @@ -2597,8 +2588,8 @@ "title":"How Do I Set the Auto-increment Primary Key or Other Fields That Are Automatically Filled in the RDS Table When Creating a DLI and Associating It with the RDS Table?", "uri":"dli_03_0028.html", "doc_type":"usermanual", - "p_code":"286", - "code":"289" + "p_code":"285", + "code":"288" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2606,8 +2597,8 @@ "title":"Datasource Connection O&M", "uri":"dli_03_0256.html", "doc_type":"usermanual", - "p_code":"277", - "code":"290" + "p_code":"276", + "code":"289" }, { "desc":"Possible CausesThe network connectivity is abnormal. Check whether the security group is correctly selected and whether the VPC is correctly configured.The network connec", @@ -2615,8 +2606,8 @@ "title":"Why Is the Error Message \"communication link failure\" Displayed When I Use a Newly Activated Datasource Connection?", "uri":"dli_03_0047.html", "doc_type":"usermanual", - "p_code":"290", - "code":"291" + "p_code":"289", + "code":"290" }, { "desc":"The cluster host information is not added to the datasource connection. As a result, the KRB authentication fails, the connection times out, and no error is recorded in l", @@ -2624,8 +2615,8 @@ "title":"Connection Times Out During MRS HBase Datasource Connection, and No Error Is Recorded in Logs", "uri":"dli_03_0080.html", "doc_type":"usermanual", - "p_code":"290", - "code":"292" + "p_code":"289", + "code":"291" }, { "desc":"When you create a VPC peering connection for the datasource connection, the following error information is displayed:Before you create a datasource connection, check whet", @@ -2633,8 +2624,8 @@ "title":"Why Can't I Find the Subnet When Creating a DLI Datasource Connection?", "uri":"dli_03_0111.html", "doc_type":"usermanual", - "p_code":"290", - "code":"293" + "p_code":"289", + "code":"292" }, { "desc":"A datasource RDS table was created in the DataArts Studio, and the insert overwrite statement was executed to write data into RDS. DLI.0999: BatchUpdateException: Incorre", @@ -2642,8 +2633,8 @@ "title":"Error Message \"Incorrect string value\" Is Displayed When insert overwrite Is Executed on a Datasource RDS Table", "uri":"dli_03_0239.html", "doc_type":"usermanual", - "p_code":"290", - "code":"294" + "p_code":"289", + "code":"293" }, { "desc":"The system failed to create a datasource RDS table, and null pointer error was reported.The following table creation statement was used:The RDS database is in a PostGre c", @@ -2651,8 +2642,8 @@ "title":"Null Pointer Error Is Displayed When the System Creates a Datasource RDS Table", "uri":"dli_03_0250.html", "doc_type":"usermanual", - "p_code":"290", - "code":"295" + "p_code":"289", + "code":"294" }, { "desc":"The system failed to execute insert overwrite on the datasource GaussDB(DWS) table, and org.postgresql.util.PSQLException: ERROR: tuple concurrently updated was displayed", @@ -2660,8 +2651,8 @@ "title":"Error Message \"org.postgresql.util.PSQLException: ERROR: tuple concurrently updated\" Is Displayed When the System Executes insert overwrite on a Datasource GaussDB(DWS) Table", "uri":"dli_03_0251.html", "doc_type":"usermanual", - "p_code":"290", - "code":"296" + "p_code":"289", + "code":"295" }, { "desc":"A datasource table was used to import data to a CloudTable HBase table. This HBase table contains a column family and a rowkey for 100 million simulating data records. Th", @@ -2669,8 +2660,8 @@ "title":"RegionTooBusyException Is Reported When Data Is Imported to a CloudTable HBase Table Through a Datasource Table", "uri":"dli_03_0252.html", "doc_type":"usermanual", - "p_code":"290", - "code":"297" + "p_code":"289", + "code":"296" }, { "desc":"A table was created on GaussDB(DWS) and then a datasource connection was created on DLI to read and write data. An error message was displayed during data writing, indica", @@ -2678,8 +2669,8 @@ "title":"A Null Value Is Written Into a Non-Null Field When a DLI Datasource Connection Is Used to Connect to a GaussDB(DWS) Table", "uri":"dli_03_0253.html", "doc_type":"usermanual", - "p_code":"290", - "code":"298" + "p_code":"289", + "code":"297" }, { "desc":"A datasource GaussDB(DWS) table and the datasource connection were created in DLI, and the schema of the source table in GaussDB(DWS) were updated. During the job executi", @@ -2687,8 +2678,8 @@ "title":"An Insert Operation Failed After the Schema of the GaussDB(DWS) Source Table Is Updated", "uri":"dli_03_0254.html", "doc_type":"usermanual", - "p_code":"290", - "code":"299" + "p_code":"289", + "code":"298" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2696,8 +2687,8 @@ "title":"APIs", "uri":"dli_03_0056.html", "doc_type":"usermanual", - "p_code":"122", - "code":"300" + "p_code":"123", + "code":"299" }, { "desc":"In the REST API provided by DLI, the request header can be added to the request URI, for example, Content-Type.Content-Type indicates the request body type or format. The", @@ -2705,8 +2696,8 @@ "title":"Why Is Error \"unsupported media Type\" Reported When I Subimt a SQL Job?", "uri":"dli_03_0060.html", "doc_type":"usermanual", - "p_code":"300", - "code":"301" + "p_code":"299", + "code":"300" }, { "desc":"When different IAM users call an API under the same enterprise project in the same region, the project ID is the same.", @@ -2714,8 +2705,8 @@ "title":"Is the Project ID Fixed when Different IAM Users Call an API?", "uri":"dli_03_0125.html", "doc_type":"usermanual", - "p_code":"300", - "code":"302" + "p_code":"299", + "code":"301" }, { "desc":"When the API call for submitting a SQL job times out, and the following error information is displayed:There are currently no resources tracked in the state, so there is ", @@ -2723,8 +2714,8 @@ "title":"What Can I Do If an Error Is Reported When the Execution of the API for Creating a SQL Job Times Out?", "uri":"dli_03_0178.html", "doc_type":"usermanual", - "p_code":"300", - "code":"303" + "p_code":"299", + "code":"302" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2732,8 +2723,8 @@ "title":"SDKs", "uri":"dli_03_0058.html", "doc_type":"usermanual", - "p_code":"122", - "code":"304" + "p_code":"123", + "code":"303" }, { "desc":"When you query the SQL job results using SDK, the system checks the job status when the job is submitted. The timeout interval set in the system is 300s. If the job is no", @@ -2741,8 +2732,8 @@ "title":"How Do I Set the Timeout Duration for Querying SQL Job Results Using SDK?", "uri":"dli_03_0073.html", "doc_type":"usermanual", - "p_code":"304", - "code":"305" + "p_code":"303", + "code":"304" }, { "desc":"Run the ping command to check whether dli.xxx can be accessed.If dli.xxx can be accessed, check whether DNS resolution is correctly configured.If dli.xxx can be accessed,", @@ -2750,8 +2741,8 @@ "title":"How Do I Handle the dli.xxx,unable to resolve host address Error?", "uri":"dli_03_0255.html", "doc_type":"usermanual", - "p_code":"304", - "code":"306" + "p_code":"303", + "code":"305" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2760,6 +2751,6 @@ "uri":"dli_01_00006.html", "doc_type":"usermanual", "p_code":"", - "code":"307" + "code":"306" } ] \ No newline at end of file diff --git a/docs/dli/umn/dli_01_00006.html b/docs/dli/umn/dli_01_00006.html index 2406d648..cbbcb3e1 100644 --- a/docs/dli/umn/dli_01_00006.html +++ b/docs/dli/umn/dli_01_00006.html @@ -8,7 +8,13 @@ -
2024-04-28
+2024-07-11
+Modified the following section:
+2024-04-28
Added the following section:
@@ -25,13 +31,13 @@2023-11-01
Modified the following content:
-2023-10-08
Modified the following content:
-2023-09-07
diff --git a/docs/dli/umn/dli_01_0003.html b/docs/dli/umn/dli_01_0003.html index fe5fa05c..35c6b2bf 100644 --- a/docs/dli/umn/dli_01_0003.html +++ b/docs/dli/umn/dli_01_0003.html @@ -8,9 +8,9 @@In cross-source development scenarios, there is a risk of password leakage if datasource authentication information is directly configured. You are advised to use the datasource authentication provided by DLI. For details, see Overview.
For example, 192.168.xx.xx or 192.168.xx.xx:8181.
+Before creating a table, ensure that a database has been created.
-Datasource connection tables, such as View tables, HBase (MRS) tables, OpenTSDB (MRS) tables, GaussDB(DWS) tables, RDS tables, and CSS tables, cannot be created. You can use SQL to create views and datasource connection tables. For details, see sections Creating a View and Creating a Datasource Connection Table in the Data Lake Insight SQL Syntax Reference.
+Datasource connection tables, such as View tables, HBase (MRS) tables, OpenTSDB (MRS) tables, GaussDB(DWS) tables, RDS tables, and CSS tables, cannot be created. You can use SQL to create views and datasource connection tables. For details, see sections Creating a View and Creating a Datasource Connection Table in the Data Lake Insight SQL Syntax Reference.
Type
Data type of a column. This parameter corresponds to Column Name.
-string
Data Format
DLI supports the following data formats:
-CSV
User-defined Quotation Character
This parameter is valid only when Data Format is set to CSV and you select User-defined Quotation Character.
+This parameter is valid only when Data Format is set to CSV and you select User-defined Quotation Character.
The following quotation characters are supported:
After a table is created, you can view and select the table for use on the Data Management page or SQL Editor page.
+After a table is created, you can view and select the table for use on the Data Management page or SQL Editor page.
For example, to connect DLI to the MRS, RDS, CSS, Kafka, or GaussDB(DWS) data source, you need to enable the network between DLI and the VPC of the data source.
Create an enhanced datasource connection on the console.
-For example, 192.168.xx.xx or 192.168.xx.xx:8181.
+Format: IP address:Port number
Before testing the connection, ensure that the security group of the external data source has allowed access from the CIDR block of the queue.
Host information is the mapping between host IP addresses and domain names. After you configure host information, jobs can only use the configured domain names to access corresponding hosts. After a datasource connection is created, you can modify the host information.
When accessing the HBase cluster of MRS, you need to configure the host name (domain name) and IP address of the instance.
Last Accessed
Last time when an operation was performed on the table.
+The last access time of a table refers only to the last time it was updated, not the time it was read (SELECT operation).
Operation
diff --git a/docs/dli/umn/dli_01_0318.html b/docs/dli/umn/dli_01_0318.html index 1d7930df..2d5289e3 100644 --- a/docs/dli/umn/dli_01_0318.html +++ b/docs/dli/umn/dli_01_0318.html @@ -3,465 +3,535 @@With CTS, you can record operations associated with DLI for later query, audit, and backtrack operations.
-Operation +
Alternatively, you can double-click the table name qw. The query statement is automatically entered in the SQL job editing window.
On the Overview page, click Create Job in the upper right corner of the Spark Jobs tab or click Create Job in the upper right corner of the Spark Jobs page. The Spark job editing page is displayed. On the Spark job editing page, a message is displayed, indicating that a temporary DLI data bucket will be created. The created bucket is used to store temporary data generated by DLI, such as job logs and job results. You cannot view job logs if you choose not to create it. The bucket will be created and the default bucket name is used. If you do not need to create a DLI temporary data bucket and do not want to receive this message, select Do not show again and click Cancel. -Prerequisites
Flink 1.15 DependenciesObtain information about the Flink 1.15 dependencies from the logs of a Flink job. +
Flink 1.12 Dependencies-
|
---|