diff --git a/docs/dli/sqlreference/ALL_META.TXT.json b/docs/dli/sqlreference/ALL_META.TXT.json index 474594b3..6313d6c5 100644 --- a/docs/dli/sqlreference/ALL_META.TXT.json +++ b/docs/dli/sqlreference/ALL_META.TXT.json @@ -1,3821 +1,9798 @@ [ + { + "dockw":"SQL Syntax Reference" + }, { "uri":"dli_08_0221.html", + "node_id":"dli_08_0221.xml", "product_code":"dli", "code":"1", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Spark SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Spark SQL Syntax Reference", "githuburl":"" }, { "uri":"dli_08_0266.html", + "node_id":"dli_08_0266.xml", "product_code":"dli", "code":"2", "des":"This section describes the common configuration items of the SQL syntax for DLI batch jobs.", "doc_type":"sqlreference", "kw":"Common Configuration Items of Batch SQL Jobs,Spark SQL Syntax Reference,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Common Configuration Items of Batch SQL Jobs", "githuburl":"" }, { "uri":"dli_08_0219.html", + "node_id":"dli_08_0219.xml", "product_code":"dli", "code":"3", "des":"This section describes the Spark SQL syntax list provided by DLI. For details about the parameters and examples, see the syntax description.", "doc_type":"sqlreference", "kw":"SQL Syntax Overview of Batch Jobs,Spark SQL Syntax Reference,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"SQL Syntax Overview of Batch Jobs", "githuburl":"" }, { - "uri":"dli_08_0070.html", + "uri":"dli_08_0477.html", + "node_id":"dli_08_0477.xml", "product_code":"dli", "code":"4", + "des":"This section describes the open source Spark SQL syntax supported by DLI. For details about the syntax, parameters, and examples, see Spark SQL Syntax.", + "doc_type":"sqlreference", + "kw":"Spark Open Source Commands,Spark SQL Syntax Reference,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"Spark Open Source Commands", + "githuburl":"" + }, + { + "uri":"dli_08_0070.html", + "node_id":"dli_08_0070.xml", + "product_code":"dli", + "code":"5", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Databases", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Databases", "githuburl":"" }, { "uri":"dli_08_0071.html", + "node_id":"dli_08_0071.xml", "product_code":"dli", - "code":"5", + "code":"6", "des":"This statement is used to create a database.IF NOT EXISTS: Prevents system errors if the database to be created exists.COMMENT: Describes a database.DBPROPERTIES: Specifi", "doc_type":"sqlreference", "kw":"Creating a Database,Databases,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a Database", "githuburl":"" }, { "uri":"dli_08_0072.html", + "node_id":"dli_08_0072.xml", "product_code":"dli", - "code":"6", + "code":"7", "des":"This statement is used to delete a database.IF EXISTS: Prevents system errors if the database to be deleted does not exist.DATABASE and SCHEMA can be used interchangeably", "doc_type":"sqlreference", "kw":"Deleting a Database,Databases,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Deleting a Database", "githuburl":"" }, { "uri":"dli_08_0073.html", + "node_id":"dli_08_0073.xml", "product_code":"dli", - "code":"7", + "code":"8", "des":"This syntax is used to view the information about a specified database, including the database name and database description.EXTENDED: Displays the database properties.If", "doc_type":"sqlreference", "kw":"Viewing a Specified Database,Databases,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Viewing a Specified Database", "githuburl":"" }, { "uri":"dli_08_0074.html", + "node_id":"dli_08_0074.xml", "product_code":"dli", - "code":"8", + "code":"9", "des":"This syntax is used to query all current databases.NoneKeyword DATABASES is equivalent to SCHEMAS. You can use either of them in this statement.View all the current datab", "doc_type":"sqlreference", "kw":"Viewing All Databases,Databases,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Viewing All Databases", "githuburl":"" }, { "uri":"dli_08_0223.html", + "node_id":"dli_08_0223.xml", "product_code":"dli", - "code":"9", + "code":"10", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Creating an OBS Table", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating an OBS Table", "githuburl":"" }, { "uri":"dli_08_0076.html", + "node_id":"dli_08_0076.xml", "product_code":"dli", - "code":"10", + "code":"11", "des":"Create an OBS table using the DataSource syntax.The main differences between the DataSource and the Hive syntax lie in the supported data formats and the number of suppor", "doc_type":"sqlreference", "kw":"Creating an OBS Table Using the DataSource Syntax,Creating an OBS Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating an OBS Table Using the DataSource Syntax", "githuburl":"" }, { "uri":"dli_08_0077.html", + "node_id":"dli_08_0077.xml", "product_code":"dli", - "code":"11", + "code":"12", "des":"This statement is used to create an OBS table using the Hive syntax. The main differences between the DataSource and the Hive syntax lie in the supported data formats and", "doc_type":"sqlreference", "kw":"Creating an OBS Table Using the Hive Syntax,Creating an OBS Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating an OBS Table Using the Hive Syntax", "githuburl":"" }, { "uri":"dli_08_0224.html", + "node_id":"dli_08_0224.xml", "product_code":"dli", - "code":"12", + "code":"13", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Creating a DLI Table", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a DLI Table", "githuburl":"" }, { "uri":"dli_08_0098.html", + "node_id":"dli_08_0098.xml", "product_code":"dli", - "code":"13", + "code":"14", "des":"This DataSource syntax can be used to create a DLI table. The main differences between the DataSource and the Hive syntax lie in the supported data formats and the number", "doc_type":"sqlreference", "kw":"Creating a DLI Table Using the DataSource Syntax,Creating a DLI Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a DLI Table Using the DataSource Syntax", "githuburl":"" }, { "uri":"dli_08_0204.html", + "node_id":"dli_08_0204.xml", "product_code":"dli", - "code":"14", + "code":"15", "des":"This Hive syntax is used to create a DLI table. The main differences between the DataSource and the Hive syntax lie in the supported data formats and the number of suppor", "doc_type":"sqlreference", "kw":"Creating a DLI Table Using the Hive Syntax,Creating a DLI Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a DLI Table Using the Hive Syntax", "githuburl":"" }, { "uri":"dli_08_0087.html", + "node_id":"dli_08_0087.xml", "product_code":"dli", - "code":"15", + "code":"16", "des":"This statement is used to delete tables.If the table is stored in OBS, only the metadata is deleted. The data stored on OBS is not deleted.If the table is stored in DLI, ", "doc_type":"sqlreference", "kw":"Deleting a Table,Spark SQL Syntax Reference,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Deleting a Table", "githuburl":"" }, { "uri":"dli_08_0089.html", + "node_id":"dli_08_0089.xml", "product_code":"dli", - "code":"16", + "code":"17", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Viewing Tables", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Viewing Tables", "githuburl":"" }, { "uri":"dli_08_0090.html", + "node_id":"dli_08_0090.xml", "product_code":"dli", - "code":"17", + "code":"18", "des":"This statement is used to view all tables and views in the current database.FROM/IN: followed by the name of a database whose tables and views will be displayed.NoneCreat", "doc_type":"sqlreference", "kw":"Viewing All Tables,Viewing Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Viewing All Tables", "githuburl":"" }, { "uri":"dli_08_0091.html", + "node_id":"dli_08_0091.xml", "product_code":"dli", - "code":"18", + "code":"19", "des":"This statement is used to show the statements for creating a table.CREATE TABLE: statement for creating a tableThe table specified in this statement must exist. Otherwise", "doc_type":"sqlreference", "kw":"Viewing Table Creation Statements,Viewing Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Viewing Table Creation Statements", "githuburl":"" }, { "uri":"dli_08_0092.html", + "node_id":"dli_08_0092.xml", "product_code":"dli", - "code":"19", + "code":"20", "des":"Check the properties of a table.TBLPROPERTIES: This statement allows you to add a key/value property to a table.property_name is case sensitive. You cannot specify multip", "doc_type":"sqlreference", "kw":"Viewing Table Properties,Viewing Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Viewing Table Properties", "githuburl":"" }, { "uri":"dli_08_0093.html", + "node_id":"dli_08_0093.xml", "product_code":"dli", - "code":"20", + "code":"21", "des":"This statement is used to query all columns in a specified table.COLUMNS: columns in the current tableFROM/IN: followed by the name of a database whose tables and views w", "doc_type":"sqlreference", "kw":"Viewing All Columns in a Specified Table,Viewing Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Viewing All Columns in a Specified Table", "githuburl":"" }, { "uri":"dli_08_0094.html", + "node_id":"dli_08_0094.xml", "product_code":"dli", - "code":"21", + "code":"22", "des":"This statement is used to view all partitions in a specified table.PARTITIONS: partitions in a specified tablePARTITION: a specified partitionThe table specified in this ", "doc_type":"sqlreference", "kw":"Viewing All Partitions in a Specified Table,Viewing Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Viewing All Partitions in a Specified Table", "githuburl":"" }, { "uri":"dli_08_0105.html", + "node_id":"dli_08_0105.xml", "product_code":"dli", - "code":"22", + "code":"23", "des":"This statement is used to view the table statistics. The names and data types of all columns in a specified table will be returned.EXTENDED: displays all metadata of the ", "doc_type":"sqlreference", "kw":"Viewing Table Statistics,Viewing Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Viewing Table Statistics", "githuburl":"" }, { "uri":"dli_08_0262.html", + "node_id":"dli_08_0262.xml", "product_code":"dli", - "code":"23", + "code":"24", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Modifying a Table", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Modifying a Table", "githuburl":"" }, { "uri":"dli_08_0263.html", + "node_id":"dli_08_0263.xml", "product_code":"dli", - "code":"24", + "code":"25", "des":"This statement is used to add one or more new columns to a table.ADD COLUMNS: columns to addCOMMENT: column descriptionDo not run this SQL statement concurrently. Otherwi", "doc_type":"sqlreference", "kw":"Adding a Column,Modifying a Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Adding a Column", "githuburl":"" }, { - "uri":"dli_08_0354.html", + "uri":"dli_08_0470.html", + "node_id":"dli_08_0470.xml", "product_code":"dli", - "code":"25", + "code":"26", + "des":"You can modify the column comments of non-partitioned or partitioned tables.CHANGE COLUMN: Modify a column.COMMENT: column descriptionChange the comment of the c1 column ", + "doc_type":"sqlreference", + "kw":"Modifying Column Comments,Modifying a Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"Modifying Column Comments", + "githuburl":"" + }, + { + "uri":"dli_08_0354.html", + "node_id":"dli_08_0354.xml", + "product_code":"dli", + "code":"27", "des":"DLI controls multiple versions of backup data for restoration. After the multiversion function is enabled, the system automatically backs up table data when you delete or", "doc_type":"sqlreference", "kw":"Enabling or Disabling Multiversion Backup,Modifying a Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Enabling or Disabling Multiversion Backup", "githuburl":"" }, { "uri":"dli_08_0080.html", + "node_id":"dli_08_0080.xml", "product_code":"dli", - "code":"26", + "code":"28", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Syntax for Partitioning a Table", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Syntax for Partitioning a Table", "githuburl":"" }, { "uri":"dli_08_0081.html", + "node_id":"dli_08_0081.xml", "product_code":"dli", - "code":"27", + "code":"29", "des":"After an OBS partitioned table is created, no partition information is generated for the table. Partition information is generated only after you:Insert data to the OBS p", "doc_type":"sqlreference", "kw":"Adding Partition Data (Only OBS Tables Supported),Syntax for Partitioning a Table,SQL Syntax Referen", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Adding Partition Data (Only OBS Tables Supported)", "githuburl":"" }, { "uri":"dli_08_0082.html", + "node_id":"dli_08_0082.xml", "product_code":"dli", - "code":"28", + "code":"30", "des":"This statement is used to rename partitions.PARTITION: a specified partitionRENAME: new name of the partitionThis statement is used for OBS table operations.The table and", "doc_type":"sqlreference", "kw":"Renaming a Partition (Only OBS Tables Supported),Syntax for Partitioning a Table,SQL Syntax Referenc", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Renaming a Partition (Only OBS Tables Supported)", "githuburl":"" }, { "uri":"dli_08_0083.html", + "node_id":"dli_08_0083.xml", "product_code":"dli", - "code":"29", - "des":"Deletes one or more partitions from a partitioned table.The table in which partitions are to be deleted must exist. Otherwise, an error is reported.The to-be-deleted part", + "code":"31", + "des":"This statement is used to delete one or more partitions from a partitioned table.Partitioned tables are classified into OBS tables and DLI tables. You can delete one or m", "doc_type":"sqlreference", "kw":"Deleting a Partition,Syntax for Partitioning a Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Deleting a Partition", "githuburl":"" }, { "uri":"dli_08_0343.html", + "node_id":"dli_08_0343.xml", "product_code":"dli", - "code":"30", - "des":"This statement is used to delete one or more partitions based on specified conditions.This statement is used for OBS table operations only.The table in which partitions a", + "code":"32", + "des":"This statement is used to delete one or more partitions based on specified conditions.This statement is only used for OBS tables.The table in which partitions are to be d", "doc_type":"sqlreference", - "kw":"Deleting Partitions by Specifying Filter Criteria (Only OBS Tables Supported),Syntax for Partitionin", - "title":"Deleting Partitions by Specifying Filter Criteria (Only OBS Tables Supported)", + "kw":"Deleting Partitions by Specifying Filter Criteria (Only Supported on OBS Tables),Syntax for Partitio", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"Deleting Partitions by Specifying Filter Criteria (Only Supported on OBS Tables)", "githuburl":"" }, { "uri":"dli_08_0084.html", + "node_id":"dli_08_0084.xml", "product_code":"dli", - "code":"31", + "code":"33", "des":"This statement is used to modify the positions of table partitions.PARTITION: a specified partitionLOCATION: path of the partitionFor a table partition whose position is ", "doc_type":"sqlreference", "kw":"Altering the Partition Location of a Table (Only OBS Tables Supported),Syntax for Partitioning a Tab", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Altering the Partition Location of a Table (Only OBS Tables Supported)", "githuburl":"" }, { "uri":"dli_08_0079.html", + "node_id":"dli_08_0079.xml", "product_code":"dli", - "code":"32", + "code":"34", "des":"This statement is used to update the partition information about a table in the Metastore.OrPARTITIONS: partition informationSERDEPROPERTIES: Serde attributeThis statemen", "doc_type":"sqlreference", "kw":"Updating Partitioned Table Data (Only OBS Tables Supported),Syntax for Partitioning a Table,SQL Synt", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Updating Partitioned Table Data (Only OBS Tables Supported)", "githuburl":"" }, { "uri":"dli_08_0359.html", + "node_id":"dli_08_0359.xml", "product_code":"dli", - "code":"33", + "code":"35", "des":"Spark caches Parquet metadata to improve performance. If you update a Parquet table, the cached metadata is not updated. Spark SQL cannot find the newly inserted data and", "doc_type":"sqlreference", "kw":"Updating Table Metadata with REFRESH TABLE,Syntax for Partitioning a Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Updating Table Metadata with REFRESH TABLE", "githuburl":"" }, { "uri":"dli_08_0100.html", + "node_id":"dli_08_0100.xml", "product_code":"dli", - "code":"34", + "code":"36", "des":"The LOAD DATA function can be used to import data in CSV, Parquet, ORC, JSON, and Avro formats. The data is converted into the Parquet data format for storage.INPATH: pat", "doc_type":"sqlreference", "kw":"Importing Data to the Table,Spark SQL Syntax Reference,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Importing Data to the Table", "githuburl":"" }, { "uri":"dli_08_0095.html", + "node_id":"dli_08_0095.xml", "product_code":"dli", - "code":"35", + "code":"37", "des":"This statement is used to insert the SELECT query result or a certain data record into a table.Insert the SELECT query result into a table.INSERT INTO [TABLE] [db_name.]t", "doc_type":"sqlreference", "kw":"Inserting Data,Spark SQL Syntax Reference,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Inserting Data", "githuburl":"" }, { "uri":"dli_08_0217.html", + "node_id":"dli_08_0217.xml", "product_code":"dli", - "code":"36", + "code":"38", "des":"This statement is used to delete data from the DLI or OBS table.Only data in the DLI or OBS table can be deleted.", "doc_type":"sqlreference", "kw":"Clearing Data,Spark SQL Syntax Reference,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Clearing Data", "githuburl":"" }, { "uri":"dli_08_0205.html", + "node_id":"dli_08_0205.xml", "product_code":"dli", - "code":"37", + "code":"39", "des":"This statement is used to directly write query results to a specified directory. The query results can be stored in CSV, Parquet, ORC, JSON, or Avro format.USING: Specifi", "doc_type":"sqlreference", "kw":"Exporting Search Results,Spark SQL Syntax Reference,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Exporting Search Results", "githuburl":"" }, { "uri":"dli_08_0349.html", + "node_id":"dli_08_0349.xml", "product_code":"dli", - "code":"38", + "code":"40", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Backing Up and Restoring Data of Multiple Versions", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Backing Up and Restoring Data of Multiple Versions", "githuburl":"" }, { "uri":"dli_08_0350.html", + "node_id":"dli_08_0350.xml", "product_code":"dli", - "code":"39", + "code":"41", "des":"After multiversion is enabled, backup data is retained for seven days by default. You can change the retention period by setting system parameterdli.multi.version.retenti", "doc_type":"sqlreference", "kw":"Setting the Retention Period for Multiversion Backup Data,Backing Up and Restoring Data of Multiple ", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Setting the Retention Period for Multiversion Backup Data", "githuburl":"" }, { "uri":"dli_08_0351.html", + "node_id":"dli_08_0351.xml", "product_code":"dli", - "code":"40", + "code":"42", "des":"After the multiversion function is enabled, you can run the SHOW HISTORY command to view the backup data of a table. For details about the syntax for enabling or disablin", "doc_type":"sqlreference", "kw":"Viewing Multiversion Backup Data,Backing Up and Restoring Data of Multiple Versions,SQL Syntax Refer", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Viewing Multiversion Backup Data", "githuburl":"" }, { "uri":"dli_08_0352.html", + "node_id":"dli_08_0352.xml", "product_code":"dli", - "code":"41", + "code":"43", "des":"After the multiversion function is enabled, you can run the RESTORE TABLE statement to restore a table or partition of a specified version. For details about the syntax f", "doc_type":"sqlreference", "kw":"Restoring Multiversion Backup Data,Backing Up and Restoring Data of Multiple Versions,SQL Syntax Ref", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Restoring Multiversion Backup Data", "githuburl":"" }, { "uri":"dli_08_0353.html", + "node_id":"dli_08_0353.xml", "product_code":"dli", - "code":"42", + "code":"44", "des":"After the multiversion function is enabled, expired backup data will be directly deleted by the system when theinsert overwrite or truncate statement is executed. You can", "doc_type":"sqlreference", "kw":"Configuring the Trash Bin for Expired Multiversion Data,Backing Up and Restoring Data of Multiple Ve", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Configuring the Trash Bin for Expired Multiversion Data", "githuburl":"" }, { "uri":"dli_08_0355.html", + "node_id":"dli_08_0355.xml", "product_code":"dli", - "code":"43", + "code":"45", "des":"The retention period of multiversion backup data takes effect each time the insert overwrite or truncate statement is executed. If neither statement is executed for the t", "doc_type":"sqlreference", "kw":"Deleting Multiversion Backup Data,Backing Up and Restoring Data of Multiple Versions,SQL Syntax Refe", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Deleting Multiversion Backup Data", "githuburl":"" }, { - "uri":"dli_08_0118.html", + "uri":"en-us_topic_0000001571023676.html", + "node_id":"en-us_topic_0000001571023676.xml", "product_code":"dli", - "code":"44", + "code":"46", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"sqlreference", + "kw":"Table Lifecycle Management", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"Table Lifecycle Management", + "githuburl":"" + }, + { + "uri":"en-us_topic_0000001621263317.html", + "node_id":"en-us_topic_0000001621263317.xml", + "product_code":"dli", + "code":"47", + "des":"DLI provides table lifecycle management to allow you to specify the lifecycle of a table when creating the table. DLI determines whether to reclaim a table based on the t", + "doc_type":"sqlreference", + "kw":"Specifying the Lifecycle of a Table When Creating the Table,Table Lifecycle Management,SQL Syntax Re", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"Specifying the Lifecycle of a Table When Creating the Table", + "githuburl":"" + }, + { + "uri":"en-us_topic_0000001621382957.html", + "node_id":"en-us_topic_0000001621382957.xml", + "product_code":"dli", + "code":"48", + "des":"This section describes how to modify the lifecycle of an existing partitioned or non-partitioned table.When the lifecycle function is enabled for the first time, the syst", + "doc_type":"sqlreference", + "kw":"Modifying the Lifecycle of a Table,Table Lifecycle Management,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"Modifying the Lifecycle of a Table", + "githuburl":"" + }, + { + "uri":"en-us_topic_0000001621542965.html", + "node_id":"en-us_topic_0000001621542965.xml", + "product_code":"dli", + "code":"49", + "des":"This section describes how to disable or restore the lifecycle of a specified table or partition.You can disable or restore the lifecycle of a table in either of the foll", + "doc_type":"sqlreference", + "kw":"Disabling or Restoring the Lifecycle of a Table,Table Lifecycle Management,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"Disabling or Restoring the Lifecycle of a Table", + "githuburl":"" + }, + { + "uri":"dli_08_0118.html", + "node_id":"dli_08_0118.xml", + "product_code":"dli", + "code":"50", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Creating a Datasource Connection with an HBase Table", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a Datasource Connection with an HBase Table", "githuburl":"" }, { "uri":"dli_08_0119.html", + "node_id":"dli_08_0119.xml", "product_code":"dli", - "code":"45", - "des":"This statement is used to create a DLI table and associate it with an existing HBase table.Before creating a DLI table and associating it with HBase, you need to create a", + "code":"51", + "des":"This statement is used to create a DLI table and associate it with an existing HBase table.In Spark cross-source development scenarios, there is a risk of password leakag", "doc_type":"sqlreference", "kw":"Creating a DLI Table and Associating It with HBase,Creating a Datasource Connection with an HBase Ta", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a DLI Table and Associating It with HBase", "githuburl":"" }, { "uri":"dli_08_0120.html", + "node_id":"dli_08_0120.xml", "product_code":"dli", - "code":"46", + "code":"52", "des":"This statement is used to insert data in a DLI table to the associated HBase table.Insert the SELECT query result into a table.INSERT INTO DLI_TABLE\n SELECT field1,field", "doc_type":"sqlreference", "kw":"Inserting Data to an HBase Table,Creating a Datasource Connection with an HBase Table,SQL Syntax Ref", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Inserting Data to an HBase Table", "githuburl":"" }, { "uri":"dli_08_0121.html", + "node_id":"dli_08_0121.xml", "product_code":"dli", - "code":"47", + "code":"53", "des":"This statement is used to query data in an HBase table.LIMIT is used to limit the query results. Only INT type is supported by the number parameter.The table to be querie", "doc_type":"sqlreference", "kw":"Querying an HBase Table,Creating a Datasource Connection with an HBase Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Querying an HBase Table", "githuburl":"" }, { "uri":"dli_08_0220.html", + "node_id":"dli_08_0220.xml", "product_code":"dli", - "code":"48", + "code":"54", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Creating a Datasource Connection with an OpenTSDB Table", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a Datasource Connection with an OpenTSDB Table", "githuburl":"" }, { "uri":"dli_08_0122.html", + "node_id":"dli_08_0122.xml", "product_code":"dli", - "code":"49", + "code":"55", "des":"Run the CREATE TABLE statement to create the DLI table and associate it with the existing metric in OpenTSDB. This syntax supports the OpenTSDB of CloudTable and MRS.Befo", "doc_type":"sqlreference", "kw":"Creating a DLI Table and Associating It with OpenTSDB,Creating a Datasource Connection with an OpenT", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a DLI Table and Associating It with OpenTSDB", "githuburl":"" }, { "uri":"dli_08_0123.html", + "node_id":"dli_08_0123.xml", "product_code":"dli", - "code":"50", + "code":"56", "des":"Run the INSERT INTO statement to insert the data in the DLI table to the associated OpenTSDB metric.If no metric exists on the OpenTSDB, a new metric is automatically cre", "doc_type":"sqlreference", "kw":"Inserting Data to the OpenTSDB Table,Creating a Datasource Connection with an OpenTSDB Table,SQL Syn", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Inserting Data to the OpenTSDB Table", "githuburl":"" }, { "uri":"dli_08_0124.html", + "node_id":"dli_08_0124.xml", "product_code":"dli", - "code":"51", + "code":"57", "des":"This SELECT command is used to query data in an OpenTSDB table.If no metric exists in OpenTSDB, an error will be reported when the corresponding DLI table is queried.If t", "doc_type":"sqlreference", "kw":"Querying an OpenTSDB Table,Creating a Datasource Connection with an OpenTSDB Table,SQL Syntax Refere", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Querying an OpenTSDB Table", "githuburl":"" }, { "uri":"dli_08_0192.html", + "node_id":"dli_08_0192.xml", "product_code":"dli", - "code":"52", + "code":"58", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Creating a Datasource Connection with a DWS table", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a Datasource Connection with a DWS table", "githuburl":"" }, { "uri":"dli_08_0193.html", + "node_id":"dli_08_0193.xml", "product_code":"dli", - "code":"53", - "des":"This statement is used to create a DLI table and associate it with an existing DWS table.Before creating a DLI table and associating it with DWS, you need to create a dat", + "code":"59", + "des":"This statement is used to create a DLI table and associate it with an existing DWS table.In Spark cross-source development scenarios, there is a risk of password leakage ", "doc_type":"sqlreference", "kw":"Creating a DLI Table and Associating It with DWS,Creating a Datasource Connection with a DWS table,S", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a DLI Table and Associating It with DWS", "githuburl":"" }, { "uri":"dli_08_0194.html", + "node_id":"dli_08_0194.xml", "product_code":"dli", - "code":"54", + "code":"60", "des":"This statement is used to insert data in a DLI table to the associated DWS table.Insert the SELECT query result into a table.INSERT INTO DLI_TABLE\n SELECT field1,field2.", "doc_type":"sqlreference", "kw":"Inserting Data to the DWS Table,Creating a Datasource Connection with a DWS table,SQL Syntax Referen", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Inserting Data to the DWS Table", "githuburl":"" }, { "uri":"dli_08_0195.html", + "node_id":"dli_08_0195.xml", "product_code":"dli", - "code":"55", + "code":"61", "des":"This statement is used to query data in a DWS table.LIMIT is used to limit the query results. Only INT type is supported by the number parameter.The table to be queried m", "doc_type":"sqlreference", "kw":"Querying the DWS Table,Creating a Datasource Connection with a DWS table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Querying the DWS Table", "githuburl":"" }, { "uri":"dli_08_0196.html", + "node_id":"dli_08_0196.xml", "product_code":"dli", - "code":"56", + "code":"62", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Creating a Datasource Connection with an RDS Table", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a Datasource Connection with an RDS Table", "githuburl":"" }, { "uri":"dli_08_0197.html", + "node_id":"dli_08_0197.xml", "product_code":"dli", - "code":"57", - "des":"This statement is used to create a DLI table and associate it with an existing RDS table. This function supports access to the MySQL and PostgreSQL clusters of RDS.Before", + "code":"63", + "des":"This statement is used to create a DLI table and associate it with an existing RDS table. This function supports access to the MySQL and PostgreSQL clusters of RDS.In Spa", "doc_type":"sqlreference", "kw":"Creating a DLI Table and Associating It with RDS,Creating a Datasource Connection with an RDS Table,", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a DLI Table and Associating It with RDS", "githuburl":"" }, { "uri":"dli_08_0198.html", + "node_id":"dli_08_0198.xml", "product_code":"dli", - "code":"58", + "code":"64", "des":"This statement is used to insert data in a DLI table to the associated RDS table.Insert the SELECT query result into a table.INSERT INTO DLI_TABLE\n SELECT field1,field2.", "doc_type":"sqlreference", "kw":"Inserting Data to the RDS Table,Creating a Datasource Connection with an RDS Table,SQL Syntax Refere", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Inserting Data to the RDS Table", "githuburl":"" }, { "uri":"dli_08_0199.html", + "node_id":"dli_08_0199.xml", "product_code":"dli", - "code":"59", + "code":"65", "des":"This statement is used to query data in an RDS table.LIMIT is used to limit the query results. Only INT type is supported by the number parameter.The table to be queried ", "doc_type":"sqlreference", "kw":"Querying the RDS Table,Creating a Datasource Connection with an RDS Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Querying the RDS Table", "githuburl":"" }, { "uri":"dli_08_0200.html", + "node_id":"dli_08_0200.xml", "product_code":"dli", - "code":"60", + "code":"66", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Creating a Datasource Connection with a CSS Table", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a Datasource Connection with a CSS Table", "githuburl":"" }, { "uri":"dli_08_0201.html", + "node_id":"dli_08_0201.xml", "product_code":"dli", - "code":"61", - "des":"This statement is used to create a DLI table and associate it with an existing CSS table.Before creating a DLI table and associating it with CSS, you need to create a dat", + "code":"67", + "des":"This statement is used to create a DLI table and associate it with an existing CSS table.In Spark cross-source development scenarios, there is a risk of password leakage ", "doc_type":"sqlreference", "kw":"Creating a DLI Table and Associating It with CSS,Creating a Datasource Connection with a CSS Table,S", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a DLI Table and Associating It with CSS", "githuburl":"" }, { "uri":"dli_08_0202.html", + "node_id":"dli_08_0202.xml", "product_code":"dli", - "code":"62", + "code":"68", "des":"This statement is used to insert data in a DLI table to the associated CSS table.Insert the SELECT query result into a table.INSERT INTO DLI_TABLE\n SELECT field1,field2.", "doc_type":"sqlreference", "kw":"Inserting Data to the CSS Table,Creating a Datasource Connection with a CSS Table,SQL Syntax Referen", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Inserting Data to the CSS Table", "githuburl":"" }, { "uri":"dli_08_0203.html", + "node_id":"dli_08_0203.xml", "product_code":"dli", - "code":"63", + "code":"69", "des":"This statement is used to query data in a CSS table.LIMIT is used to limit the query results. Only INT type is supported by the number parameter.The table to be queried m", "doc_type":"sqlreference", "kw":"Querying the CSS Table,Creating a Datasource Connection with a CSS Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Querying the CSS Table", "githuburl":"" }, { "uri":"dli_08_0225.html", + "node_id":"dli_08_0225.xml", "product_code":"dli", - "code":"64", + "code":"70", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Creating a Datasource Connection with a DCS Table", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a Datasource Connection with a DCS Table", "githuburl":"" }, { "uri":"dli_08_0226.html", + "node_id":"dli_08_0226.xml", "product_code":"dli", - "code":"65", - "des":"This statement is used to create a DLI table and associate it with an existing DCS key.Before creating a DLI table and associating it with DCS, you need to create a datas", + "code":"71", + "des":"This statement is used to create a DLI table and associate it with an existing DCS key.In Spark cross-source development scenarios, there is a risk of password leakage if", "doc_type":"sqlreference", "kw":"Creating a DLI Table and Associating It with DCS,Creating a Datasource Connection with a DCS Table,S", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a DLI Table and Associating It with DCS", "githuburl":"" }, { "uri":"dli_08_0227.html", + "node_id":"dli_08_0227.xml", "product_code":"dli", - "code":"66", + "code":"72", "des":"This statement is used to insert data in a DLI table to the DCS key.Insert the SELECT query result into a table.INSERT INTO DLI_TABLE\n SELECT field1,field2...\n [FROM DL", "doc_type":"sqlreference", "kw":"Inserting Data to a DCS Table,Creating a Datasource Connection with a DCS Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Inserting Data to a DCS Table", "githuburl":"" }, { "uri":"dli_08_0228.html", + "node_id":"dli_08_0228.xml", "product_code":"dli", - "code":"67", + "code":"73", "des":"This statement is used to query data in a DCS table.LIMIT is used to limit the query results. Only INT type is supported by the number parameter.Query data in the test_re", "doc_type":"sqlreference", "kw":"Querying the DCS Table,Creating a Datasource Connection with a DCS Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Querying the DCS Table", "githuburl":"" }, { "uri":"dli_08_0229.html", + "node_id":"dli_08_0229.xml", "product_code":"dli", - "code":"68", + "code":"74", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Creating a Datasource Connection with a DDS Table", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a Datasource Connection with a DDS Table", "githuburl":"" }, { "uri":"dli_08_0230.html", + "node_id":"dli_08_0230.xml", "product_code":"dli", - "code":"69", - "des":"This statement is used to create a DLI table and associate it with an existing DDS collection.Before creating a DLI table and associating it with DDS, you need to create ", + "code":"75", + "des":"This statement is used to create a DLI table and associate it with an existing DDS collection.In Spark cross-source development scenarios, there is a risk of password lea", "doc_type":"sqlreference", "kw":"Creating a DLI Table and Associating It with DDS,Creating a Datasource Connection with a DDS Table,S", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a DLI Table and Associating It with DDS", "githuburl":"" }, { "uri":"dli_08_0231.html", + "node_id":"dli_08_0231.xml", "product_code":"dli", - "code":"70", + "code":"76", "des":"This statement is used to insert data in a DLI table to the associated DDS table.Insert the SELECT query result into a table.INSERT INTO DLI_TABLE\n SELECT field1,field2.", "doc_type":"sqlreference", "kw":"Inserting Data to the DDS Table,Creating a Datasource Connection with a DDS Table,SQL Syntax Referen", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Inserting Data to the DDS Table", "githuburl":"" }, { "uri":"dli_08_0232.html", + "node_id":"dli_08_0232.xml", "product_code":"dli", - "code":"71", + "code":"77", "des":"This statement is used to query data in a DDS table.LIMIT is used to limit the query results. Only INT type is supported by the number parameter.If schema information is ", "doc_type":"sqlreference", "kw":"Querying the DDS Table,Creating a Datasource Connection with a DDS Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Querying the DDS Table", "githuburl":"" }, { - "uri":"dli_08_0129.html", + "uri":"dli_08_0460.html", + "node_id":"dli_08_0460.xml", "product_code":"dli", - "code":"72", + "code":"78", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"sqlreference", + "kw":"Creating a Datasource Connection with an Oracle Table", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"Creating a Datasource Connection with an Oracle Table", + "githuburl":"" + }, + { + "uri":"dli_08_0461.html", + "node_id":"dli_08_0461.xml", + "product_code":"dli", + "code":"79", + "des":"This statement is used to create a DLI table and associate it with an existing Oracle table.Before creating a DLI table and associating it with Oracle, you need to create", + "doc_type":"sqlreference", + "kw":"Creating a DLI Table and Associating It with Oracle,Creating a Datasource Connection with an Oracle ", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"Creating a DLI Table and Associating It with Oracle", + "githuburl":"" + }, + { + "uri":"dli_08_0462.html", + "node_id":"dli_08_0462.xml", + "product_code":"dli", + "code":"80", + "des":"This statement is used to insert data into an associated Oracle table.Insert the SELECT query result into a table.INSERT INTO DLI_TABLE\n SELECT field1,field2...\n [FROM ", + "doc_type":"sqlreference", + "kw":"Inserting Data to an Oracle Table,Creating a Datasource Connection with an Oracle Table,SQL Syntax R", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"Inserting Data to an Oracle Table", + "githuburl":"" + }, + { + "uri":"dli_08_0463.html", + "node_id":"dli_08_0463.xml", + "product_code":"dli", + "code":"81", + "des":"This statement is used to query data in an Oracle table.LIMIT is used to limit the query results. Only INT type is supported by the number parameter.If schema information", + "doc_type":"sqlreference", + "kw":"Querying an Oracle Table,Creating a Datasource Connection with an Oracle Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"Querying an Oracle Table", + "githuburl":"" + }, + { + "uri":"dli_08_0129.html", + "node_id":"dli_08_0129.xml", + "product_code":"dli", + "code":"82", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Views", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Views", "githuburl":"" }, { "uri":"dli_08_0130.html", + "node_id":"dli_08_0130.xml", "product_code":"dli", - "code":"73", + "code":"83", "des":"This statement is used to create views.CREATE VIEW: creates views based on the given select statement. The result of the select statement will not be written into the dis", "doc_type":"sqlreference", "kw":"Creating a View,Views,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a View", "githuburl":"" }, { "uri":"dli_08_0131.html", + "node_id":"dli_08_0131.xml", "product_code":"dli", - "code":"74", + "code":"84", "des":"This statement is used to delete views.DROP: Deletes the metadata of a specified view. Although views and tables have many common points, the DROP TABLE statement cannot ", "doc_type":"sqlreference", "kw":"Deleting a View,Views,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Deleting a View", "githuburl":"" }, { "uri":"dli_08_0138.html", + "node_id":"dli_08_0138.xml", "product_code":"dli", - "code":"75", + "code":"85", "des":"This statement returns the logical plan and physical execution plan for the SQL statement.EXTENDED: After this keyword is specified, the logical and physical plans are ou", "doc_type":"sqlreference", "kw":"Viewing the Execution Plan,Spark SQL Syntax Reference,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Viewing the Execution Plan", "githuburl":"" }, { "uri":"dli_08_0139.html", + "node_id":"dli_08_0139.xml", "product_code":"dli", - "code":"76", + "code":"86", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Data Permissions Management", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Data Permissions Management", "githuburl":"" }, { "uri":"dli_08_0140.html", + "node_id":"dli_08_0140.xml", "product_code":"dli", - "code":"77", + "code":"87", "des":"Table 1 describes the SQL statement permission matrix in DLI in terms of permissions on databases, tables, and roles.For privilege granting or revocation on databases and", "doc_type":"sqlreference", "kw":"Data Permissions List,Data Permissions Management,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Data Permissions List", "githuburl":"" }, { "uri":"dli_08_0141.html", + "node_id":"dli_08_0141.xml", "product_code":"dli", - "code":"78", + "code":"88", "des":"This statement is used to create a role in the current database or a specified database.Only users with the CREATE_ROLE permission on the database can create roles. For e", "doc_type":"sqlreference", "kw":"Creating a Role,Data Permissions Management,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a Role", "githuburl":"" }, { "uri":"dli_08_0148.html", + "node_id":"dli_08_0148.xml", "product_code":"dli", - "code":"79", + "code":"89", "des":"This statement is used to delete a role in the current database or a specified database.NoneThe role_name to be deleted must exist in the current database or the specifie", "doc_type":"sqlreference", "kw":"Deleting a Role,Data Permissions Management,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Deleting a Role", "githuburl":"" }, { "uri":"dli_08_0142.html", + "node_id":"dli_08_0142.xml", "product_code":"dli", - "code":"80", + "code":"90", "des":"This statement is used to bind a user with a role.NoneThe role_name and username must exist. Otherwise, an error will be reported.", "doc_type":"sqlreference", "kw":"Binding a Role,Data Permissions Management,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Binding a Role", "githuburl":"" }, { "uri":"dli_08_0147.html", + "node_id":"dli_08_0147.xml", "product_code":"dli", - "code":"81", + "code":"91", "des":"This statement is used to unbind the user with the role.Nonerole_name and user_name must exist and user_name has been bound to role_name.To unbind the user_name1 from rol", "doc_type":"sqlreference", "kw":"Unbinding a Role,Data Permissions Management,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Unbinding a Role", "githuburl":"" }, { "uri":"dli_08_0143.html", + "node_id":"dli_08_0143.xml", "product_code":"dli", - "code":"82", + "code":"92", "des":"This statement is used to display all roles or roles bound to the user_name in the current database.ALL: Displays all roles.Keywords ALL and user_name cannot coexist.To d", "doc_type":"sqlreference", "kw":"Displaying a Role,Data Permissions Management,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Displaying a Role", "githuburl":"" }, { "uri":"dli_08_0144.html", + "node_id":"dli_08_0144.xml", "product_code":"dli", - "code":"83", + "code":"93", "des":"This statement is used to grant permissions to a user or role.ROLE: The subsequent role_name must be a role.USER: The subsequent user_name must be a user.The privilege mu", "doc_type":"sqlreference", "kw":"Granting a Permission,Data Permissions Management,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Granting a Permission", "githuburl":"" }, { "uri":"dli_08_0146.html", + "node_id":"dli_08_0146.xml", "product_code":"dli", - "code":"84", + "code":"94", "des":"This statement is used to revoke permissions granted to a user or role.ROLE: The subsequent role_name must be a role.USER: The subsequent user_name must be a user.The pri", "doc_type":"sqlreference", "kw":"Revoking a Permission,Data Permissions Management,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Revoking a Permission", "githuburl":"" }, { "uri":"dli_08_0145.html", + "node_id":"dli_08_0145.xml", "product_code":"dli", - "code":"85", - "des":"This statement is used to show the permissions granted to a user or role in the resource.ROLE: The subsequent role_name must be a role.USER: The subsequent user_name must", + "code":"95", + "des":"This statement is used to show the permissions granted to a user on a resource.USER: The subsequent user_name must be a user.The resource can be a queue, database, table,", "doc_type":"sqlreference", - "kw":"Displaying the Granted Permissions,Data Permissions Management,SQL Syntax Reference", - "title":"Displaying the Granted Permissions", + "kw":"Showing Granted Permissions,Data Permissions Management,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"Showing Granted Permissions", "githuburl":"" }, { "uri":"dli_08_0149.html", + "node_id":"dli_08_0149.xml", "product_code":"dli", - "code":"86", + "code":"96", "des":"This statement is used to display the binding relationship between roles and a user in the current database.NoneThe ROLE variable must exist.", "doc_type":"sqlreference", "kw":"Displaying the Binding Relationship Between All Roles and Users,Data Permissions Management,SQL Synt", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Displaying the Binding Relationship Between All Roles and Users", "githuburl":"" }, { "uri":"dli_08_0056.html", + "node_id":"dli_08_0056.xml", "product_code":"dli", - "code":"87", + "code":"97", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Data Types", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Data Types", "githuburl":"" }, { "uri":"dli_08_0057.html", + "node_id":"dli_08_0057.xml", "product_code":"dli", - "code":"88", + "code":"98", "des":"Data type is a basic attribute of data. It is used to distinguish different types of data. Different data types occupy different storage space and support different opera", "doc_type":"sqlreference", "kw":"Overview,Data Types,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Overview", "githuburl":"" }, { "uri":"dli_08_0058.html", + "node_id":"dli_08_0058.xml", "product_code":"dli", - "code":"89", + "code":"99", "des":"Table 1 lists the primitive data types supported by DLI.VARCHAR and CHAR data is stored in STRING type on DLI. Therefore, the string that exceeds the specified length wil", "doc_type":"sqlreference", "kw":"Primitive Data Types,Data Types,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Primitive Data Types", "githuburl":"" }, { "uri":"dli_08_0059.html", + "node_id":"dli_08_0059.xml", "product_code":"dli", - "code":"90", + "code":"100", "des":"Spark SQL supports complex data types, as shown in Table 1.When a table containing fields of the complex data type is created, the storage format of this table cannot be ", "doc_type":"sqlreference", "kw":"Complex Data Types,Data Types,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Complex Data Types", "githuburl":"" }, { "uri":"dli_08_0282.html", + "node_id":"dli_08_0282.xml", "product_code":"dli", - "code":"91", + "code":"101", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"User-Defined Functions", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"User-Defined Functions", "githuburl":"" }, { "uri":"dli_08_0283.html", + "node_id":"dli_08_0283.xml", "product_code":"dli", - "code":"92", - "des":"DLI allows you to create and use user-defined functions (UDF) and user-defined table functions (UDTF) in Spark jobs.If a function with the same name exists in the databas", + "code":"102", + "des":"DLI allows you to create and use user-defined functions (UDF) and user-defined table functions (UDTF) in Spark jobs.OrIf a function with the same name exists in the datab", "doc_type":"sqlreference", "kw":"Creating a Function,User-Defined Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a Function", "githuburl":"" }, { "uri":"dli_08_0284.html", + "node_id":"dli_08_0284.xml", "product_code":"dli", - "code":"93", + "code":"103", "des":"This statement is used to delete functions.TEMPORARY: Indicates whether the function to be deleted is a temporary function.IF EXISTS: Used when the function to be deleted", "doc_type":"sqlreference", "kw":"Deleting a Function,User-Defined Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Deleting a Function", "githuburl":"" }, { "uri":"dli_08_0281.html", + "node_id":"dli_08_0281.xml", "product_code":"dli", - "code":"94", + "code":"104", "des":"Displays information about a specified function.EXTENDED: displays extended usage information.The metadata (implementation class and usage) of an existing function is ret", "doc_type":"sqlreference", "kw":"Displaying Function Details,User-Defined Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Displaying Function Details", "githuburl":"" }, { "uri":"dli_08_0285.html", + "node_id":"dli_08_0285.xml", "product_code":"dli", - "code":"95", + "code":"105", "des":"View all functions in the current project.In the preceding statement, regex is a regular expression. For details about its parameters, see Table 1.For details about other", "doc_type":"sqlreference", "kw":"Displaying All Functions,User-Defined Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Displaying All Functions", "githuburl":"" }, { "uri":"dli_08_0064.html", + "node_id":"dli_08_0064.xml", "product_code":"dli", - "code":"96", + "code":"106", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Built-in Functions", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Built-in Functions", "githuburl":"" }, { - "uri":"dli_08_0065.html", + "uri":"dli_08_0471.html", + "node_id":"dli_08_0471.xml", "product_code":"dli", - "code":"97", - "des":"Table 1 lists the mathematical functions supported in DLI.", + "code":"107", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", - "kw":"Mathematical Functions,Built-in Functions,SQL Syntax Reference", - "title":"Mathematical Functions", - "githuburl":"" - }, - { - "uri":"dli_08_0066.html", - "product_code":"dli", - "code":"98", - "des":"Table 1 lists the date functions supported in DLI.", - "doc_type":"sqlreference", - "kw":"Date Functions,Built-in Functions,SQL Syntax Reference", + "kw":"Date Functions", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Date Functions", "githuburl":"" }, { - "uri":"dli_08_0067.html", + "uri":"dli_08_0066.html", + "node_id":"dli_08_0066.xml", "product_code":"dli", - "code":"99", - "des":"Table 1 lists the string functions supported by DLI.", + "code":"108", + "des":"Table 1 lists the date functions supported by DLI.", "doc_type":"sqlreference", - "kw":"String Functions,Built-in Functions,SQL Syntax Reference", + "kw":"Overview,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"Overview", + "githuburl":"" + }, + { + "uri":"dli_spark_add_months.html", + "node_id":"dli_spark_add_months.xml", + "product_code":"dli", + "code":"109", + "des":"This function is used to calculate the date after a date value is increased by a specified number of months. That is, it calculates the data that is num_months after star", + "doc_type":"sqlreference", + "kw":"add_months,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"add_months", + "githuburl":"" + }, + { + "uri":"dli_spark_current_date.html", + "node_id":"dli_spark_current_date.xml", + "product_code":"dli", + "code":"110", + "des":"This function is used to return the current date, in the yyyy-mm-dd format.Similar function: getdate. The getdate function is used to return the current system time, in t", + "doc_type":"sqlreference", + "kw":"current_date,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"current_date", + "githuburl":"" + }, + { + "uri":"dli_spark_current_timestamp.html", + "node_id":"dli_spark_current_timestamp.xml", + "product_code":"dli", + "code":"111", + "des":"This function is used to return the current timestamp.NoneThe return value is of the TIMESTAMP type.The value 1692002816300 is returned.", + "doc_type":"sqlreference", + "kw":"current_timestamp,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"current_timestamp", + "githuburl":"" + }, + { + "uri":"dli_spark_date_add.html", + "node_id":"dli_spark_date_add.xml", + "product_code":"dli", + "code":"112", + "des":"This function is used to calculate the number of days in which start_date is increased by days.To obtain the date with a specified change range based on the current date,", + "doc_type":"sqlreference", + "kw":"date_add,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"date_add", + "githuburl":"" + }, + { + "uri":"dli_spark_dateadd.html", + "node_id":"dli_spark_dateadd.xml", + "product_code":"dli", + "code":"113", + "des":"This function is used to change a date based on datepart and delta.To obtain the date with a specified change range based on the current date, use this function together ", + "doc_type":"sqlreference", + "kw":"dateadd,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"dateadd", + "githuburl":"" + }, + { + "uri":"dli_spark_date_sub.html", + "node_id":"dli_spark_date_sub.xml", + "product_code":"dli", + "code":"114", + "des":"This function is used to calculate the number of days in which start_date is subtracted by days.To obtain the date with a specified change range based on the current date", + "doc_type":"sqlreference", + "kw":"date_sub,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"date_sub", + "githuburl":"" + }, + { + "uri":"dli_spark_date_format.html", + "node_id":"dli_spark_date_format.xml", + "product_code":"dli", + "code":"115", + "des":"This function is used to convert a date into a string based on the format specified by format.The return value is of the STRING type.If the value of date is not of the DA", + "doc_type":"sqlreference", + "kw":"date_format,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"date_format", + "githuburl":"" + }, + { + "uri":"dli_spark_datediff.html", + "node_id":"dli_spark_datediff.xml", + "product_code":"dli", + "code":"116", + "des":"This function is used to calculate the difference between date1 and date2.Similar function: datediff1. The datediff1 function is used to calculate the difference between ", + "doc_type":"sqlreference", + "kw":"datediff,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"datediff", + "githuburl":"" + }, + { + "uri":"dli_spark_datediff1.html", + "node_id":"dli_spark_datediff1.xml", + "product_code":"dli", + "code":"117", + "des":"This function is used to calculate the difference between date1 and date2 and return the difference in a specified datepart.Similar function: datediff. The datediff funct", + "doc_type":"sqlreference", + "kw":"datediff1,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"datediff1", + "githuburl":"" + }, + { + "uri":"dli_spark_datepart.html", + "node_id":"dli_spark_datepart.xml", + "product_code":"dli", + "code":"118", + "des":"This function is used to calculate the value that meets the specified datepart in date.The return value is of the BIGINT type.If the value of date is not of the DATE or S", + "doc_type":"sqlreference", + "kw":"datepart,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"datepart", + "githuburl":"" + }, + { + "uri":"dli_spark_datetrunc.html", + "node_id":"dli_spark_datetrunc.xml", + "product_code":"dli", + "code":"119", + "des":"This function is used to calculate the date otained through the truncation of a specified date based on a specified datepart.It truncates the date before the specified da", + "doc_type":"sqlreference", + "kw":"datetrunc,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"datetrunc", + "githuburl":"" + }, + { + "uri":"dli_spark_daydayofmonth.html", + "node_id":"dli_spark_daydayofmonth.xml", + "product_code":"dli", + "code":"120", + "des":"This function is used to return the day of a specified date.The return value is of the INT type.If the value of date is not of the DATE or STRING type, the error message ", + "doc_type":"sqlreference", + "kw":"day/dayofmonth,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"day/dayofmonth", + "githuburl":"" + }, + { + "uri":"dli_spark_from_unixtime.html", + "node_id":"dli_spark_from_unixtime.xml", + "product_code":"dli", + "code":"121", + "des":"This function is used to convert a timestamp represented by a numeric UNIX value to a date value.The return value is of the STRING type, in the yyyy-mm-dd hh:mi:ss format", + "doc_type":"sqlreference", + "kw":"from_unixtime,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"from_unixtime", + "githuburl":"" + }, + { + "uri":"dli_spark_from_utc_timestamp.html", + "node_id":"dli_spark_from_utc_timestamp.xml", + "product_code":"dli", + "code":"122", + "des":"This function is used to convert a UTC timestamp to a UNIX timestamp in a given time zone.The return value is of the TIMESTAMP type.If the value of timestamp is not of th", + "doc_type":"sqlreference", + "kw":"from_utc_timestamp,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"from_utc_timestamp", + "githuburl":"" + }, + { + "uri":"dli_spark_getdate.html", + "node_id":"dli_spark_getdate.xml", + "product_code":"dli", + "code":"123", + "des":"This function is used to return the current system time, in the yyyy-mm-dd hh:mi:ss format.Similar function: current_date. The current_date function is used to return the", + "doc_type":"sqlreference", + "kw":"getdate,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"getdate", + "githuburl":"" + }, + { + "uri":"dli_spark_hour.html", + "node_id":"dli_spark_hour.xml", + "product_code":"dli", + "code":"124", + "des":"This function is used to return the hour (from 0 to 23) of a specified time.The return value is of the INT type.If the value of date is not of the DATE or STRING type, th", + "doc_type":"sqlreference", + "kw":"hour,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"hour", + "githuburl":"" + }, + { + "uri":"dli_spark_isdate.html", + "node_id":"dli_spark_isdate.xml", + "product_code":"dli", + "code":"125", + "des":"This function is used to determine whether a date string can be converted into a date value based on a specified format.The return value is of the BOOLEAN type.If the val", + "doc_type":"sqlreference", + "kw":"isdate,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"isdate", + "githuburl":"" + }, + { + "uri":"dli_spark_last_day.html", + "node_id":"dli_spark_last_day.xml", + "product_code":"dli", + "code":"126", + "des":"This function is used to return the last day of the month a date belongs to.Similar function: lastday. The lastday function is used to return the last day of the month a ", + "doc_type":"sqlreference", + "kw":"last_day,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"last_day", + "githuburl":"" + }, + { + "uri":"dli_spark_lastday.html", + "node_id":"dli_spark_lastday.xml", + "product_code":"dli", + "code":"127", + "des":"This function is used to return the last day of the month a date belongs to. The hour, minute, and second part is 00:00:00.Similar function: last_day. The last_day functi", + "doc_type":"sqlreference", + "kw":"lastday,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"lastday", + "githuburl":"" + }, + { + "uri":"dli_spark_minute.html", + "node_id":"dli_spark_minute.xml", + "product_code":"dli", + "code":"128", + "des":"This function is used to return the minute (from 0 to 59) of a specified time.The return value is of the INT type.If the value of date is not of the DATE or STRING type, ", + "doc_type":"sqlreference", + "kw":"minute,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"minute", + "githuburl":"" + }, + { + "uri":"dli_spark_month.html", + "node_id":"dli_spark_month.xml", + "product_code":"dli", + "code":"129", + "des":"This function is used to return the month (from January to December) of a specified time.The return value is of the INT type.If the value of date is not of the DATE or ST", + "doc_type":"sqlreference", + "kw":"month,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"month", + "githuburl":"" + }, + { + "uri":"dli_spark_months_between.html", + "node_id":"dli_spark_months_between.xml", + "product_code":"dli", + "code":"130", + "des":"This function returns the month difference between date1 and date2.The return value is of the DOUBLE type.If the values of date1 and date2 are not of the DATE or STRING t", + "doc_type":"sqlreference", + "kw":"months_between,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"months_between", + "githuburl":"" + }, + { + "uri":"dli_spark_next_day.html", + "node_id":"dli_spark_next_day.xml", + "product_code":"dli", + "code":"131", + "des":"This function is used to return the date closest to day_of_week after start_date.The return value is of the DATE type, in the yyyy-mm-dd format.If the value of start_date", + "doc_type":"sqlreference", + "kw":"next_day,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"next_day", + "githuburl":"" + }, + { + "uri":"dli_spark_quarter.html", + "node_id":"dli_spark_quarter.xml", + "product_code":"dli", + "code":"132", + "des":"This function is used to return the quarter of a date. The value ranges from 1 to 4.The return value is of the INT type.If the value of date is not of the DATE or STRING ", + "doc_type":"sqlreference", + "kw":"quarter,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"quarter", + "githuburl":"" + }, + { + "uri":"dli_spark_second.html", + "node_id":"dli_spark_second.xml", + "product_code":"dli", + "code":"133", + "des":"This function is used to return the second (from 0 to 59) of a specified time.The return value is of the INT type.If the value of date is not of the DATE or STRING type, ", + "doc_type":"sqlreference", + "kw":"second,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"second", + "githuburl":"" + }, + { + "uri":"dli_spark_to_char.html", + "node_id":"dli_spark_to_char.xml", + "product_code":"dli", + "code":"134", + "des":"This function is used to convert a date into a string in a specified format.The return value is of the STRING type.If the value of date is not of the DATE or STRING type,", + "doc_type":"sqlreference", + "kw":"to_char,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"to_char", + "githuburl":"" + }, + { + "uri":"dli_spark_to_date.html", + "node_id":"dli_spark_to_date.xml", + "product_code":"dli", + "code":"135", + "des":"This function is used to return the year, month, and day in a time.Similar function: to_date1. The to_date1 function is used to convert a string in a specified format to ", + "doc_type":"sqlreference", + "kw":"to_date,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"to_date", + "githuburl":"" + }, + { + "uri":"dli_spark_to_date1.html", + "node_id":"dli_spark_to_date1.xml", + "product_code":"dli", + "code":"136", + "des":"This function is used to convert a string in a specified format to a date value.Similar function: to_date. The to_date function is used to return the year, month, and day", + "doc_type":"sqlreference", + "kw":"to_date1,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"to_date1", + "githuburl":"" + }, + { + "uri":"dli_spark_to_utc_timestamp.html", + "node_id":"dli_spark_to_utc_timestamp.xml", + "product_code":"dli", + "code":"137", + "des":"This function is used to convert a timestamp in a given time zone to a UTC timestamp.The return value is of the BIGINT type.If the value of timestamp is not of the DATE o", + "doc_type":"sqlreference", + "kw":"to_utc_timestamp,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"to_utc_timestamp", + "githuburl":"" + }, + { + "uri":"dli_spark_trunc.html", + "node_id":"dli_spark_trunc.xml", + "product_code":"dli", + "code":"138", + "des":"This function is used to reset a date to a specific format.Resetting means returning to default values, where the default values for year, month, and day are 01, and the ", + "doc_type":"sqlreference", + "kw":"trunc,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"trunc", + "githuburl":"" + }, + { + "uri":"dli_spark_unix_timestamp.html", + "node_id":"dli_spark_unix_timestamp.xml", + "product_code":"dli", + "code":"139", + "des":"This function is used to convert a date value to a numeric date value in UNIX format.The function returns the first ten digits of the timestamp in normal UNIX format.The ", + "doc_type":"sqlreference", + "kw":"unix_timestamp,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"unix_timestamp", + "githuburl":"" + }, + { + "uri":"dli_spark_weekday.html", + "node_id":"dli_spark_weekday.xml", + "product_code":"dli", + "code":"140", + "des":"This function is used to return the day of the current week.The return value is of the INT type.If Monday is used as the first day of a week, the value 0 is returned. For", + "doc_type":"sqlreference", + "kw":"weekday,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"weekday", + "githuburl":"" + }, + { + "uri":"dli_spark_weekofyear.html", + "node_id":"dli_spark_weekofyear.xml", + "product_code":"dli", + "code":"141", + "des":"This function is used to return the week number (from 0 to 53) of a specified date.The return value is of the INT type.If the value of date is not of the DATE or STRING t", + "doc_type":"sqlreference", + "kw":"weekofyear,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"weekofyear", + "githuburl":"" + }, + { + "uri":"dli_spark_year.html", + "node_id":"dli_spark_year.xml", + "product_code":"dli", + "code":"142", + "des":"This function is used to return the year of a specified date.The return value is of the INT type.If the value of date is not of the DATE or STRING type, the error message", + "doc_type":"sqlreference", + "kw":"year,Date Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"year", + "githuburl":"" + }, + { + "uri":"dli_08_0472.html", + "node_id":"dli_08_0472.xml", + "product_code":"dli", + "code":"143", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"sqlreference", + "kw":"String Functions", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"String Functions", "githuburl":"" }, { - "uri":"dli_08_0068.html", + "uri":"dli_08_0067.html", + "node_id":"dli_08_0067.xml", "product_code":"dli", - "code":"100", - "des":"An aggregate function performs a calculation operation on a set of input values and returns a value. For example, the COUNT function counts the number of rows retrieved b", + "code":"144", + "des":"Table 1 lists the string functions supported by DLI.", "doc_type":"sqlreference", - "kw":"Aggregate Functions,Built-in Functions,SQL Syntax Reference", + "kw":"Overview,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"Overview", + "githuburl":"" + }, + { + "uri":"dli_spark_ascii.html", + "node_id":"dli_spark_ascii.xml", + "product_code":"dli", + "code":"145", + "des":"This function is used to return the ASCII code of the first character in str.The return value is of the BIGINT type.If the value of str is not of the STRING, BIGINT, DOUB", + "doc_type":"sqlreference", + "kw":"ascii,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"ascii", + "githuburl":"" + }, + { + "uri":"dli_spark_concat.html", + "node_id":"dli_spark_concat.xml", + "product_code":"dli", + "code":"146", + "des":"This function is used to concatenate arrays or strings.If multiple arrays are used as the input, all elements in the arrays are connected to generate a new array.If multi", + "doc_type":"sqlreference", + "kw":"concat,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"concat", + "githuburl":"" + }, + { + "uri":"dli_spark_concat_ws.html", + "node_id":"dli_spark_concat_ws.xml", + "product_code":"dli", + "code":"147", + "des":"This function is used to return a string concatenated from multiple input strings that are separated by specified separators.orReturns the result of joining all the strin", + "doc_type":"sqlreference", + "kw":"concat_ws,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"concat_ws", + "githuburl":"" + }, + { + "uri":"dli_spark_char_matchcount.html", + "node_id":"dli_spark_char_matchcount.xml", + "product_code":"dli", + "code":"148", + "des":"This parameter is used to return the number of characters in str1 that appear in str2.The return value is of the BIGINT type.If the value of str1 or str2 is NULL, NULL is", + "doc_type":"sqlreference", + "kw":"char_matchcount,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"char_matchcount", + "githuburl":"" + }, + { + "uri":"dli_spark_encode.html", + "node_id":"dli_spark_encode.xml", + "product_code":"dli", + "code":"149", + "des":"This function is used to encode str in charset format.encode(string , string )The return value is of the BINARY type.If the value of str or charset is NULL,", + "doc_type":"sqlreference", + "kw":"encode,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"encode", + "githuburl":"" + }, + { + "uri":"dli_spark_find_in_set.html", + "node_id":"dli_spark_find_in_set.xml", + "product_code":"dli", + "code":"150", + "des":"This function is used to return the position (stating from 1) of str1 in str2 separated by commas (,).The return value is of the BIGINT type.If str1 cannot be matched in ", + "doc_type":"sqlreference", + "kw":"find_in_set,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"find_in_set", + "githuburl":"" + }, + { + "uri":"dli_spark_get_json_object.html", + "node_id":"dli_spark_get_json_object.xml", + "product_code":"dli", + "code":"151", + "des":"This function is used to parse the JSON object in a specified JSON path. The function will return NULL if the JSON object is invalid.The return value is of the STRING typ", + "doc_type":"sqlreference", + "kw":"get_json_object,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"get_json_object", + "githuburl":"" + }, + { + "uri":"dli_spark_instr.html", + "node_id":"dli_spark_instr.xml", + "product_code":"dli", + "code":"152", + "des":"This function is used to return the index of substr that appears earliest in str.It returns NULL if either of the arguments are NULL and returns 0 if substr does not exis", + "doc_type":"sqlreference", + "kw":"instr,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"instr", + "githuburl":"" + }, + { + "uri":"dli_spark_instr1.html", + "node_id":"dli_spark_instr1.xml", + "product_code":"dli", + "code":"153", + "des":"This function is used to return the position of substring str2 in string str1.Similar function: instr. The instr function is used to return the index of substr that appea", + "doc_type":"sqlreference", + "kw":"instr1,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"instr1", + "githuburl":"" + }, + { + "uri":"dli_spark_initcap.html", + "node_id":"dli_spark_initcap.xml", + "product_code":"dli", + "code":"154", + "des":"This function is used to convert the first letter of each word of a string to upper case and all other letters to lower case.The return value is of the STRING type. In th", + "doc_type":"sqlreference", + "kw":"initcap,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"initcap", + "githuburl":"" + }, + { + "uri":"dli_spark_keyvalue.html", + "node_id":"dli_spark_keyvalue.xml", + "product_code":"dli", + "code":"155", + "des":"This function is used to split str by split1, convert each group into a key-value pair by split2, and return the value corresponding to the key.The return value is of the", + "doc_type":"sqlreference", + "kw":"keyvalue,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"keyvalue", + "githuburl":"" + }, + { + "uri":"dli_spark_length.html", + "node_id":"dli_spark_length.xml", + "product_code":"dli", + "code":"156", + "des":"This function is used to return the length of a string.Similar function: lengthb. The lengthb function is used to return the length of string str in bytes and return a va", + "doc_type":"sqlreference", + "kw":"length,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"length", + "githuburl":"" + }, + { + "uri":"dli_spark_lengthb.html", + "node_id":"dli_spark_lengthb.xml", + "product_code":"dli", + "code":"157", + "des":"This function is used to return the length of a specified string in bytes.Similar function: length. The length function is used to return the length of a string and retur", + "doc_type":"sqlreference", + "kw":"lengthb,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"lengthb", + "githuburl":"" + }, + { + "uri":"dli_spark_levenshtein.html", + "node_id":"dli_spark_levenshtein.xml", + "product_code":"dli", + "code":"158", + "des":"This function is used to returns the Levenshtein distance between two strings, for example, levenshtein('kitten','sitting') = 3.Levenshtein distance is a type of edit dis", + "doc_type":"sqlreference", + "kw":"levenshtein,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"levenshtein", + "githuburl":"" + }, + { + "uri":"dli_spark_locate.html", + "node_id":"dli_spark_locate.xml", + "product_code":"dli", + "code":"159", + "des":"This function is used to return the position of substr in str. You can specify the starting position of your search using \"start_pos,\" which starts from 1.The return valu", + "doc_type":"sqlreference", + "kw":"locate,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"locate", + "githuburl":"" + }, + { + "uri":"dli_spark_lower_lcase.html", + "node_id":"dli_spark_lower_lcase.xml", + "product_code":"dli", + "code":"160", + "des":"This function is used to convert all characters of a string to the lower case.The return value is of the STRING type.If the value of the input parameter is not of the STR", + "doc_type":"sqlreference", + "kw":"lower/lcase,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"lower/lcase", + "githuburl":"" + }, + { + "uri":"dli_spark_lpad.html", + "node_id":"dli_spark_lpad.xml", + "product_code":"dli", + "code":"161", + "des":"This function is used to return a string of a specified length. If the length of the given string (str1) is shorter than the specified length (length), the given string i", + "doc_type":"sqlreference", + "kw":"lpad,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"lpad", + "githuburl":"" + }, + { + "uri":"dli_spark_ltrim.html", + "node_id":"dli_spark_ltrim.xml", + "product_code":"dli", + "code":"162", + "des":"This function is used to remove characters from the left of str.If trimChars is not specified, spaces are removed by default.If trimChars is specified, the function remov", + "doc_type":"sqlreference", + "kw":"ltrim,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"ltrim", + "githuburl":"" + }, + { + "uri":"dli_spark_parse_url.html", + "node_id":"dli_spark_parse_url.xml", + "product_code":"dli", + "code":"163", + "des":"This character is used to return the specified part of a given URL. Valid values of partToExtract include HOST, PATH, QUERY, REF, PROTOCOL, AUTHORITY, FILE, and USERINFO.", + "doc_type":"sqlreference", + "kw":"parse_url,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"parse_url", + "githuburl":"" + }, + { + "uri":"dli_spark_printf.html", + "node_id":"dli_spark_printf.xml", + "product_code":"dli", + "code":"164", + "des":"This function is used to print the input in a specific format.The return value is of the STRING type.The value is returned after the parameters that filled in Obj are spe", + "doc_type":"sqlreference", + "kw":"printf,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"printf", + "githuburl":"" + }, + { + "uri":"dli_spark_regexp_count.html", + "node_id":"dli_spark_regexp_count.xml", + "product_code":"dli", + "code":"165", + "des":"This function is used to return the number of substrings that match a specified pattern in the source, starting from the start_position position.The return value is of th", + "doc_type":"sqlreference", + "kw":"regexp_count,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"regexp_count", + "githuburl":"" + }, + { + "uri":"dli_spark_regexp_extract.html", + "node_id":"dli_spark_regexp_extract.xml", + "product_code":"dli", + "code":"166", + "des":"This function is used to match the string source based on the pattern grouping rule and return the string content that matches groupid.regexp_extract(string , str", + "doc_type":"sqlreference", + "kw":"regexp_extract,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"regexp_extract", + "githuburl":"" + }, + { + "uri":"dli_spark_replace.html", + "node_id":"dli_spark_replace.xml", + "product_code":"dli", + "code":"167", + "des":"This function is used to replace the part in a specified string that is the same as the string old with the string new and return the result.If the string has no same cha", + "doc_type":"sqlreference", + "kw":"replace,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"replace", + "githuburl":"" + }, + { + "uri":"dli_spark_regexp_replace.html", + "node_id":"dli_spark_regexp_replace.xml", + "product_code":"dli", + "code":"168", + "des":"This function has slight variations in its functionality depending on the version of Spark being used.For Spark 2.4.5 or earlier: Replaces the substring that matches patt", + "doc_type":"sqlreference", + "kw":"regexp_replace,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"regexp_replace", + "githuburl":"" + }, + { + "uri":"dli_spark_regexp_replace1.html", + "node_id":"dli_spark_regexp_replace1.xml", + "product_code":"dli", + "code":"169", + "des":"This function is used to replace the substring that matches pattern for the occurrence time in the source string with the specified string replace_string and return the r", + "doc_type":"sqlreference", + "kw":"regexp_replace1,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"regexp_replace1", + "githuburl":"" + }, + { + "uri":"dli_spark_regexp_instr.html", + "node_id":"dli_spark_regexp_instr.xml", + "product_code":"dli", + "code":"170", + "des":"This function is used to return the start or end position of the substring that matches a specified pattern for the occurrence time, starting from start_position in the s", + "doc_type":"sqlreference", + "kw":"regexp_instr,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"regexp_instr", + "githuburl":"" + }, + { + "uri":"dli_spark_regexp_substr.html", + "node_id":"dli_spark_regexp_substr.xml", + "product_code":"dli", + "code":"171", + "des":"This function is used to return the substring that matches a specified pattern for the occurrence time, starting from start_position in the string source.The return value", + "doc_type":"sqlreference", + "kw":"regexp_substr,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"regexp_substr", + "githuburl":"" + }, + { + "uri":"dli_spark_repeat.html", + "node_id":"dli_spark_repeat.xml", + "product_code":"dli", + "code":"172", + "des":"This function is used to return the string after str is repeated for n times.The return value is of the STRING type.If the value of str is not of the STRING, BIGINT, DOUB", + "doc_type":"sqlreference", + "kw":"repeat,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"repeat", + "githuburl":"" + }, + { + "uri":"dli_spark_reverse.html", + "node_id":"dli_spark_reverse.xml", + "product_code":"dli", + "code":"173", + "des":"This function is used to return a string in reverse order.The return value is of the STRING type.If the value of str is not of the STRING, BIGINT, DOUBLE, DECIMAL, or DAT", + "doc_type":"sqlreference", + "kw":"reverse,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"reverse", + "githuburl":"" + }, + { + "uri":"dli_spark_rpad.html", + "node_id":"dli_spark_rpad.xml", + "product_code":"dli", + "code":"174", + "des":"This function is used to right pad str1 with str2 to the specified length.The return value is of the STRING type.If the value of length is smaller than the number of digi", + "doc_type":"sqlreference", + "kw":"rpad,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"rpad", + "githuburl":"" + }, + { + "uri":"dli_spark_rtrim.html", + "node_id":"dli_spark_rtrim.xml", + "product_code":"dli", + "code":"175", + "des":"This function is used to remove characters from the right of str.If trimChars is not specified, spaces are removed by default.If trimChars is specified, the function remo", + "doc_type":"sqlreference", + "kw":"rtrim,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"rtrim", + "githuburl":"" + }, + { + "uri":"dli_spark_soundex.html", + "node_id":"dli_spark_soundex.xml", + "product_code":"dli", + "code":"176", + "des":"This function is used to return the soundex string from str, for example, soundex('Miller') = M460.The return value is of the STRING type.If the value of str is NULL, NUL", + "doc_type":"sqlreference", + "kw":"soundex,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"soundex", + "githuburl":"" + }, + { + "uri":"dli_spark_space.html", + "node_id":"dli_spark_space.xml", + "product_code":"dli", + "code":"177", + "des":"This function is used to return a specified number of spaces.The return value is of the STRING type.If the value of n is empty, an error is reported.If the value of n is ", + "doc_type":"sqlreference", + "kw":"space,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"space", + "githuburl":"" + }, + { + "uri":"dli_spark_substr_substring.html", + "node_id":"dli_spark_substr_substring.xml", + "product_code":"dli", + "code":"178", + "des":"This function is used to return the substring of str, starting from start_position and with a length of length.orThe return value is of the STRING type.If the value of st", + "doc_type":"sqlreference", + "kw":"substr/substring,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"substr/substring", + "githuburl":"" + }, + { + "uri":"dli_spark_substring_index.html", + "node_id":"dli_spark_substring_index.xml", + "product_code":"dli", + "code":"179", + "des":"This function is used to truncate the string before the count separator of str. If the value of count is positive, the string is truncated from the left. If the value of ", + "doc_type":"sqlreference", + "kw":"substring_index,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"substring_index", + "githuburl":"" + }, + { + "uri":"dli_spark_split_part.html", + "node_id":"dli_spark_split_part.xml", + "product_code":"dli", + "code":"180", + "des":"This function is used to split a specified string based on a specified separator and return a substring from the start to end position.The return value is of the STRING t", + "doc_type":"sqlreference", + "kw":"split_part,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"split_part", + "githuburl":"" + }, + { + "uri":"dli_spark_translate.html", + "node_id":"dli_spark_translate.xml", + "product_code":"dli", + "code":"181", + "des":"This function is used to translate the input string by replacing the characters or string specified by from with the characters or string specified by to.For example, it ", + "doc_type":"sqlreference", + "kw":"translate,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"translate", + "githuburl":"" + }, + { + "uri":"dli_spark_trim.html", + "node_id":"dli_spark_trim.xml", + "product_code":"dli", + "code":"182", + "des":"This function is used to remove characters from the left and right of str.If trimChars is not specified, spaces are removed by default.If trimChars is specified, the func", + "doc_type":"sqlreference", + "kw":"trim,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"trim", + "githuburl":"" + }, + { + "uri":"dli_spark_upper_ucase.html", + "node_id":"dli_spark_upper_ucase.xml", + "product_code":"dli", + "code":"183", + "des":"This function is used to convert all characters of a string to the upper case.orThe return value is of the STRING type.If the value of the input parameter is not of the S", + "doc_type":"sqlreference", + "kw":"upper/ucase,String Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"upper/ucase", + "githuburl":"" + }, + { + "uri":"dli_08_0473.html", + "node_id":"dli_08_0473.xml", + "product_code":"dli", + "code":"184", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"sqlreference", + "kw":"Mathematical Functions", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"Mathematical Functions", + "githuburl":"" + }, + { + "uri":"dli_08_0065.html", + "node_id":"dli_08_0065.xml", + "product_code":"dli", + "code":"185", + "des":"Table 1 lists the mathematical functions supported by DLI.", + "doc_type":"sqlreference", + "kw":"Overview,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"Overview", + "githuburl":"" + }, + { + "uri":"dli_spark_abs.html", + "node_id":"dli_spark_abs.xml", + "product_code":"dli", + "code":"186", + "des":"This function is used to calculate the absolute value of an input parameter.The return value is of the DOUBLE or INT type.If the value of a is NULL, NULL is returned.The ", + "doc_type":"sqlreference", + "kw":"abs,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"abs", + "githuburl":"" + }, + { + "uri":"dli_spark_aocs.html", + "node_id":"dli_spark_aocs.xml", + "product_code":"dli", + "code":"187", + "des":"This function is used to return the arc cosine value of a given angle a.The return value is of the DOUBLE type. The value ranges from 0 to π.If the value of a is not with", + "doc_type":"sqlreference", + "kw":"acos,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"acos", + "githuburl":"" + }, + { + "uri":"dli_spark_asin.html", + "node_id":"dli_spark_asin.xml", + "product_code":"dli", + "code":"188", + "des":"This function is used to return the arc sine value of a given angle a.The return value is of the DOUBLE type. The value ranges from -π/2 to π/2.If the value of a is not w", + "doc_type":"sqlreference", + "kw":"asin,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"asin", + "githuburl":"" + }, + { + "uri":"dli_spark_atan.html", + "node_id":"dli_spark_atan.xml", + "product_code":"dli", + "code":"189", + "des":"This function is used to return the arc tangent value of a given angle a.The return value is of the DOUBLE type. The value ranges from -π/2 to π/2.If the value of a is no", + "doc_type":"sqlreference", + "kw":"atan,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"atan", + "githuburl":"" + }, + { + "uri":"dli_spark_bin.html", + "node_id":"dli_spark_bin.xml", + "product_code":"dli", + "code":"190", + "des":"This function is used to return the binary format of a.The return value is of the STRING type.If the value of a is NULL, NULL is returned.The value 1 is returned.The valu", + "doc_type":"sqlreference", + "kw":"bin,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"bin", + "githuburl":"" + }, + { + "uri":"dli_spark_bround.html", + "node_id":"dli_spark_bround.xml", + "product_code":"dli", + "code":"191", + "des":"This function is used to return a value that is rounded off to d decimal places.The return value is of the DOUBLE type.If the value of a or d is NULL, NULL is returned.Th", + "doc_type":"sqlreference", + "kw":"bround,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"bround", + "githuburl":"" + }, + { + "uri":"dli_spark_cbrt.html", + "node_id":"dli_spark_cbrt.xml", + "product_code":"dli", + "code":"192", + "des":"This function is used to return the cube root of a.The return value is of the DOUBLE type.If the value of a is NULL, NULL is returned.The value 3 is returned.select cbrt(", + "doc_type":"sqlreference", + "kw":"cbrt,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"cbrt", + "githuburl":"" + }, + { + "uri":"dli_spark_ceil.html", + "node_id":"dli_spark_ceil.xml", + "product_code":"dli", + "code":"193", + "des":"This function is used to round up a to the nearest integer.The return value is of the DECIMAL type.If the value of a is NULL, NULL is returned.The value 2 is returned.The", + "doc_type":"sqlreference", + "kw":"ceil,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"ceil", + "githuburl":"" + }, + { + "uri":"dli_spark_conv.html", + "node_id":"dli_spark_conv.xml", + "product_code":"dli", + "code":"194", + "des":"This function is used to convert a number from from_base to to_base.The return value is of the STRING type.If the value of num, from_base, or to_base is NULL, NULL is ret", + "doc_type":"sqlreference", + "kw":"conv,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"conv", + "githuburl":"" + }, + { + "uri":"dli_spark_cos.html", + "node_id":"dli_spark_cos.xml", + "product_code":"dli", + "code":"195", + "des":"This function is used to calculate the cosine value of a, with input in radians.The return value is of the DOUBLE type.If the value of a is NULL, NULL is returned.The val", + "doc_type":"sqlreference", + "kw":"cos,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"cos", + "githuburl":"" + }, + { + "uri":"dli_spark_cot1.html", + "node_id":"dli_spark_cot1.xml", + "product_code":"dli", + "code":"196", + "des":"This function is used to calculate the cotangent value of a, with input in radians.The return value is of the DOUBLE or DECIMAL type.If the value of a is NULL, NULL is re", + "doc_type":"sqlreference", + "kw":"cot1,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"cot1", + "githuburl":"" + }, + { + "uri":"dli_spark_degress.html", + "node_id":"dli_spark_degress.xml", + "product_code":"dli", + "code":"197", + "des":"This function is used to calculate the angle corresponding to the returned radian.The return value is of the DOUBLE type.If the value of a is NULL, NULL is returned.The v", + "doc_type":"sqlreference", + "kw":"degrees,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"degrees", + "githuburl":"" + }, + { + "uri":"dli_spark_e.html", + "node_id":"dli_spark_e.xml", + "product_code":"dli", + "code":"198", + "des":"This function is used to return the value of e.The return value is of the DOUBLE type.The value 2.718281828459045 is returned.select e();", + "doc_type":"sqlreference", + "kw":"e,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"e", + "githuburl":"" + }, + { + "uri":"dli_spark_exp.html", + "node_id":"dli_spark_exp.xml", + "product_code":"dli", + "code":"199", + "des":"This function is used to return the value of e raised to the power of a.The return value is of the DOUBLE type.If the value of a is NULL, NULL is returned.The value 7.389", + "doc_type":"sqlreference", + "kw":"exp,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"exp", + "githuburl":"" + }, + { + "uri":"dli_spark_factorial.html", + "node_id":"dli_spark_factorial.xml", + "product_code":"dli", + "code":"200", + "des":"This function is used to return the factorial of a.The return value is of the BIGINT type.If the value of a is 0, 1 is returned.If the value of a is NULL or outside the r", + "doc_type":"sqlreference", + "kw":"factorial,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"factorial", + "githuburl":"" + }, + { + "uri":"dli_spark_floor.html", + "node_id":"dli_spark_floor.xml", + "product_code":"dli", + "code":"201", + "des":"This function is used to round down a to the nearest integer.The return value is of the BIGINT type.If the value of a is NULL, NULL is returned.The value 1 is returned.Th", + "doc_type":"sqlreference", + "kw":"floor,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"floor", + "githuburl":"" + }, + { + "uri":"dli_spark_greatest.html", + "node_id":"dli_spark_greatest.xml", + "product_code":"dli", + "code":"202", + "des":"This function is used to return the greatest value in a list of values.The return value is of the DOUBLE type.If the value of a is NULL, NULL is returned.The value 4.0 is", + "doc_type":"sqlreference", + "kw":"greatest,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"greatest", + "githuburl":"" + }, + { + "uri":"dli_spark_hex.html", + "node_id":"dli_spark_hex.xml", + "product_code":"dli", + "code":"203", + "des":"This function is used to convert an integer or character into its hexadecimal representation.The return value is of the STRING type.If the value of a is 0, 0 is returned.", + "doc_type":"sqlreference", + "kw":"hex,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"hex", + "githuburl":"" + }, + { + "uri":"dli_spark_least.html", + "node_id":"dli_spark_least.xml", + "product_code":"dli", + "code":"204", + "des":"This function is used to return the smallest value in a list of values.The return value is of the DOUBLE type.If the value of v1 or v2 is of the STRING type, an error is ", + "doc_type":"sqlreference", + "kw":"least,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"least", + "githuburl":"" + }, + { + "uri":"dli_spark_ln.html", + "node_id":"dli_spark_ln.xml", + "product_code":"dli", + "code":"205", + "des":"This function is used to return the natural logarithm of a given value.The return value is of the DOUBLE type.If the value of a is negative or 0, NULL is returned.If the ", + "doc_type":"sqlreference", + "kw":"ln,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"ln", + "githuburl":"" + }, + { + "uri":"dli_spark_log.html", + "node_id":"dli_spark_log.xml", + "product_code":"dli", + "code":"206", + "des":"This function is used to return the natural logarithm of a given base and exponent.The return value is of the DOUBLE type.If the value of base or a is NULL, NULL is retur", + "doc_type":"sqlreference", + "kw":"log,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"log", + "githuburl":"" + }, + { + "uri":"dli_spark_log10.html", + "node_id":"dli_spark_log10.xml", + "product_code":"dli", + "code":"207", + "des":"This function is used to return the natural logarithm of a given value with a base of 10.The return value is of the DOUBLE type.If the value of a is negative, 0, or NULL,", + "doc_type":"sqlreference", + "kw":"log10,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"log10", + "githuburl":"" + }, + { + "uri":"dli_spark_log2.html", + "node_id":"dli_spark_log2.xml", + "product_code":"dli", + "code":"208", + "des":"This function is used to return the natural logarithm of a given value with a base of 2.The return value is of the DOUBLE type.If the value of a is negative, 0, or NULL, ", + "doc_type":"sqlreference", + "kw":"log2,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"log2", + "githuburl":"" + }, + { + "uri":"dli_spark_median.html", + "node_id":"dli_spark_median.xml", + "product_code":"dli", + "code":"209", + "des":"This function is used to calculate the median of input parameters.The return value is of the DOUBLE or DECIMAL type.If the column name does not exist, an error is reporte", + "doc_type":"sqlreference", + "kw":"median,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"median", + "githuburl":"" + }, + { + "uri":"dli_spark_negative.html", + "node_id":"dli_spark_negative.xml", + "product_code":"dli", + "code":"210", + "des":"This function is used to return the additive inverse of a.The return value is of the DECIMAL or INT type.If the value of a is NULL, NULL is returned.The value -1 is retur", + "doc_type":"sqlreference", + "kw":"negative,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"negative", + "githuburl":"" + }, + { + "uri":"dli_spark_percentlie.html", + "node_id":"dli_spark_percentlie.xml", + "product_code":"dli", + "code":"211", + "des":"This function is used to return the exact percentile, which is applicable to a small amount of data. It sorts a specified column in ascending order, and then obtains the ", + "doc_type":"sqlreference", + "kw":"percentlie,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"percentlie", + "githuburl":"" + }, + { + "uri":"dli_spark_percentlie_approx.html", + "node_id":"dli_spark_percentlie_approx.xml", + "product_code":"dli", + "code":"212", + "des":"This function is used to return the approximate percentile, which is applicable to a large amount of data. It sorts a specified column in ascending order, and then obtain", + "doc_type":"sqlreference", + "kw":"percentlie_approx,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"percentlie_approx", + "githuburl":"" + }, + { + "uri":"dli_spark_pi.html", + "node_id":"dli_spark_pi.xml", + "product_code":"dli", + "code":"213", + "des":"This function is used to return the value of π.The return value is of the DOUBLE type.The value 3.141592653589793 is returned.", + "doc_type":"sqlreference", + "kw":"pi,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"pi", + "githuburl":"" + }, + { + "uri":"dli_spark_pmod.html", + "node_id":"dli_spark_pmod.xml", + "product_code":"dli", + "code":"214", + "des":"This function is used to return the positive value of the remainder after division of x by y.pmod(INT a, INT b)The return value is of the DECIMAL or INT type.If the value", + "doc_type":"sqlreference", + "kw":"pmod,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"pmod", + "githuburl":"" + }, + { + "uri":"dli_spark_positive.html", + "node_id":"dli_spark_positive.xml", + "product_code":"dli", + "code":"215", + "des":"This function is used to return the value of a.The return value is of the DECIMAL, DOUBLE, or INT type.If the value of a is NULL, NULL is returned.The value 3 is returned", + "doc_type":"sqlreference", + "kw":"positive,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"positive", + "githuburl":"" + }, + { + "uri":"dli_spark_pow.html", + "node_id":"dli_spark_pow.xml", + "product_code":"dli", + "code":"216", + "des":"This function is used to calculate and return the pth power of a.The return value is of the DOUBLE type.If the value of a or p is NULL, NULL is returned.The value 16 retu", + "doc_type":"sqlreference", + "kw":"pow,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"pow", + "githuburl":"" + }, + { + "uri":"dli_spark_radians.html", + "node_id":"dli_spark_radians.xml", + "product_code":"dli", + "code":"217", + "des":"This function is used to return the radian corresponding to an angle.The return value is of the DOUBLE type.If the value of a is NULL, NULL is returned.The value 1.047197", + "doc_type":"sqlreference", + "kw":"radians,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"radians", + "githuburl":"" + }, + { + "uri":"dli_spark_rand.html", + "node_id":"dli_spark_rand.xml", + "product_code":"dli", + "code":"218", + "des":"This function is used to return an evenly distributed random number that is greater than or equal to 0 and less than 1.The return value is of the DOUBLE type.The value 0.", + "doc_type":"sqlreference", + "kw":"rand,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"rand", + "githuburl":"" + }, + { + "uri":"dli_spark_round.html", + "node_id":"dli_spark_round.xml", + "product_code":"dli", + "code":"219", + "des":"This function is used to calculate the rounded value of a up to d decimal places.The return value is of the DOUBLE type.If the value of d is negative, an error is reporte", + "doc_type":"sqlreference", + "kw":"round,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"round", + "githuburl":"" + }, + { + "uri":"dli_spark_shiftleft.html", + "node_id":"dli_spark_shiftleft.xml", + "product_code":"dli", + "code":"220", + "des":"This function is used to perform a signed bitwise left shift. It takes the binary number a and shifts it b positions to the left.shiftleft(BIGINT a, BIGINT b)The return v", + "doc_type":"sqlreference", + "kw":"shiftleft,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"shiftleft", + "githuburl":"" + }, + { + "uri":"dli_spark_shiftright.html", + "node_id":"dli_spark_shiftright.xml", + "product_code":"dli", + "code":"221", + "des":"This function is used to perform a signed bitwise right shift. It takes the binary number a and shifts it b positions to the right.The return value is of the INT type.If ", + "doc_type":"sqlreference", + "kw":"shiftright,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"shiftright", + "githuburl":"" + }, + { + "uri":"dli_spark_shiftrightunsigned.html", + "node_id":"dli_spark_shiftrightunsigned.xml", + "product_code":"dli", + "code":"222", + "des":"This function is used to perform an unsigned bitwise right shift. It takes the binary number a and shifts it b positions to the right.The return value is of the INT type.", + "doc_type":"sqlreference", + "kw":"shiftrightunsigned,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"shiftrightunsigned", + "githuburl":"" + }, + { + "uri":"dli_spark_sign.html", + "node_id":"dli_spark_sign.xml", + "product_code":"dli", + "code":"223", + "des":"This function is used to return the positive and negative signs corresponding to a.The return value is of the DOUBLE type.If the value of a is a positive number, 1 is ret", + "doc_type":"sqlreference", + "kw":"sign,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"sign", + "githuburl":"" + }, + { + "uri":"dli_spark_sin.html", + "node_id":"dli_spark_sin.xml", + "product_code":"dli", + "code":"224", + "des":"This function is used to return the sine value of a, with input in radians.The return value is of the DOUBLE type.If the value of a is NULL, NULL is returned.The value 1 ", + "doc_type":"sqlreference", + "kw":"sin,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"sin", + "githuburl":"" + }, + { + "uri":"dli_spark_sqrt.html", + "node_id":"dli_spark_sqrt.xml", + "product_code":"dli", + "code":"225", + "des":"This function is used to return the square root of a value.The return value is of the DOUBLE type.If the value of a is NULL, NULL is returned.The value 2.8284271247461903", + "doc_type":"sqlreference", + "kw":"sqrt,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"sqrt", + "githuburl":"" + }, + { + "uri":"dli_spark_tan.html", + "node_id":"dli_spark_tan.xml", + "product_code":"dli", + "code":"226", + "des":"This function is used to return the tangent value of a, with input in radians.The return value is of the DOUBLE type.If the value of a is NULL, NULL is returned.The value", + "doc_type":"sqlreference", + "kw":"tan,Mathematical Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"tan", + "githuburl":"" + }, + { + "uri":"dli_08_0474.html", + "node_id":"dli_08_0474.xml", + "product_code":"dli", + "code":"227", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"sqlreference", + "kw":"Aggregate Functions", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Aggregate Functions", "githuburl":"" }, { - "uri":"dli_08_0069.html", + "uri":"dli_08_0068.html", + "node_id":"dli_08_0068.xml", "product_code":"dli", - "code":"101", - "des":"A window function performs a calculation operation on a set of values related to the current value. A window function can be an aggregate function used in the GROUP BY cl", + "code":"228", + "des":"Table 1 lists the aggregate functions supported by DLI.", "doc_type":"sqlreference", - "kw":"Window Functions,Built-in Functions,SQL Syntax Reference", + "kw":"Overview,Aggregate Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"Overview", + "githuburl":"" + }, + { + "uri":"dli_spark_avg.html", + "node_id":"dli_spark_avg.xml", + "product_code":"dli", + "code":"229", + "des":"This function is used to return the average value.The return value is of the DOUBLE type.If the value of col is NULL, the column is not involved in calculation.Calculates", + "doc_type":"sqlreference", + "kw":"avg,Aggregate Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"avg", + "githuburl":"" + }, + { + "uri":"dli_spark_corr.html", + "node_id":"dli_spark_corr.xml", + "product_code":"dli", + "code":"230", + "des":"This function is used to return the correlation coefficient between two columns of numerical values.The return value is of the DOUBLE type.Calculates the correlation coef", + "doc_type":"sqlreference", + "kw":"corr,Aggregate Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"corr", + "githuburl":"" + }, + { + "uri":"dli_spark_count.html", + "node_id":"dli_spark_count.xml", + "product_code":"dli", + "code":"231", + "des":"This function is used to return the number of records.The return value is of the BIGINT type.If the value of colname is NULL, the row is not involved in calculation.Calcu", + "doc_type":"sqlreference", + "kw":"count,Aggregate Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"count", + "githuburl":"" + }, + { + "uri":"dli_spark_covar_pop.html", + "node_id":"dli_spark_covar_pop.xml", + "product_code":"dli", + "code":"232", + "des":"This function is used to return the covariance between two columns of numerical values.The return value is of the DOUBLE type.Calculates the covariance between the invent", + "doc_type":"sqlreference", + "kw":"covar_pop,Aggregate Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"covar_pop", + "githuburl":"" + }, + { + "uri":"dli_spark_covar_samp.html", + "node_id":"dli_spark_covar_samp.xml", + "product_code":"dli", + "code":"233", + "des":"This function is used to return the sample covariance between two columns of numerical values.The return value is of the DOUBLE type.Calculates the sample covariance betw", + "doc_type":"sqlreference", + "kw":"covar_samp,Aggregate Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"covar_samp", + "githuburl":"" + }, + { + "uri":"dli_spark_max.html", + "node_id":"dli_spark_max.xml", + "product_code":"dli", + "code":"234", + "des":"This function is used to return the maximum value.The return value is of the DOUBLE type.The return type is the same as the type of col. The return rules are as follows:I", + "doc_type":"sqlreference", + "kw":"max,Aggregate Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"max", + "githuburl":"" + }, + { + "uri":"dli_spark_min.html", + "node_id":"dli_spark_min.xml", + "product_code":"dli", + "code":"235", + "des":"This function is used to return the minimum value.The return value is of the DOUBLE type.The return type is the same as the type of col. The return rules are as follows:I", + "doc_type":"sqlreference", + "kw":"min,Aggregate Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"min", + "githuburl":"" + }, + { + "uri":"dli_spark_percentile.html", + "node_id":"dli_spark_percentile.xml", + "product_code":"dli", + "code":"236", + "des":"This function is used to return the numerical value at a certain percentage point within a range of values.The return value is of the DOUBLE type.The value should be betw", + "doc_type":"sqlreference", + "kw":"percentile,Aggregate Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"percentile", + "githuburl":"" + }, + { + "uri":"dli_spark_percentile_approx.html", + "node_id":"dli_spark_percentile_approx.xml", + "product_code":"dli", + "code":"237", + "des":"This function is used to approximate the pth percentile (including floating-point numbers) of a numeric column within a group.The return value is of the DOUBLE type.Calcu", + "doc_type":"sqlreference", + "kw":"percentile_approx,Aggregate Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"percentile_approx", + "githuburl":"" + }, + { + "uri":"dli_spark_stddev_pop.html", + "node_id":"dli_spark_stddev_pop.xml", + "product_code":"dli", + "code":"238", + "des":"This function is used to return the deviation of a specified column.The return value is of the DOUBLE type.Calculates the deviation of all offering inventories (items). A", + "doc_type":"sqlreference", + "kw":"stddev_pop,Aggregate Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"stddev_pop", + "githuburl":"" + }, + { + "uri":"dli_spark_stddev_samp.html", + "node_id":"dli_spark_stddev_samp.xml", + "product_code":"dli", + "code":"239", + "des":"This function is used to return the sample deviation of a specified column.The return value is of the DOUBLE type.Calculates the sample covariance between the inventory (", + "doc_type":"sqlreference", + "kw":"stddev_samp,Aggregate Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"stddev_samp", + "githuburl":"" + }, + { + "uri":"dli_spark_sum.html", + "node_id":"dli_spark_sum.xml", + "product_code":"dli", + "code":"240", + "des":"This function is used to calculate the total sum.The return value is of the DOUBLE type.If the value of col is NULL, the row is not involved in calculation.Calculates the", + "doc_type":"sqlreference", + "kw":"sum,Aggregate Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"sum", + "githuburl":"" + }, + { + "uri":"dli_spark_variance_var_pop.html", + "node_id":"dli_spark_variance_var_pop.xml", + "product_code":"dli", + "code":"241", + "des":"This function is used to return the variance of a column.The return value is of the DOUBLE type.Calculates the variance of all offering inventories (items). An example co", + "doc_type":"sqlreference", + "kw":"variance/var_pop,Aggregate Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"variance/var_pop", + "githuburl":"" + }, + { + "uri":"dli_spark_war_samp.html", + "node_id":"dli_spark_war_samp.xml", + "product_code":"dli", + "code":"242", + "des":"This function is used to return the sample variance of a specified column.The return value is of the DOUBLE type.Calculates the sample variance of all offering inventorie", + "doc_type":"sqlreference", + "kw":"var_samp,Aggregate Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"var_samp", + "githuburl":"" + }, + { + "uri":"dli_08_0475.html", + "node_id":"dli_08_0475.xml", + "product_code":"dli", + "code":"243", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"sqlreference", + "kw":"Window Functions", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Window Functions", "githuburl":"" }, { - "uri":"dli_08_0150.html", + "uri":"dli_08_0069.html", + "node_id":"dli_08_0069.xml", "product_code":"dli", - "code":"102", + "code":"244", + "des":"Table 1 lists the window functions supported by DLI.", + "doc_type":"sqlreference", + "kw":"Overview,Window Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"Overview", + "githuburl":"" + }, + { + "uri":"dli_spark_cume_dist.html", + "node_id":"dli_spark_cume_dist.xml", + "product_code":"dli", + "code":"245", + "des":"This function is used to return the cumulative distribution, which is equivalent to calculating the proportion of data in the partition that is greater than or equal to, ", + "doc_type":"sqlreference", + "kw":"cume_dist,Window Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"cume_dist", + "githuburl":"" + }, + { + "uri":"dli_spark_first_value.html", + "node_id":"dli_spark_first_value.xml", + "product_code":"dli", + "code":"246", + "des":"This function is used to obtain the value of the first data record in the window corresponding to the current row.The restrictions on using window functions are as follow", + "doc_type":"sqlreference", + "kw":"first_value,Window Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"first_value", + "githuburl":"" + }, + { + "uri":"dli_spark_last_value.html", + "node_id":"dli_spark_last_value.xml", + "product_code":"dli", + "code":"247", + "des":"This function is used to obtain the value of the last data record in the window corresponding to the current row.The restrictions on using window functions are as follows", + "doc_type":"sqlreference", + "kw":"last_value,Window Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"last_value", + "githuburl":"" + }, + { + "uri":"dli_spark_lag.html", + "node_id":"dli_spark_lag.xml", + "product_code":"dli", + "code":"248", + "des":"This function is used to return the value of the nth row upwards within a specified window.The restrictions on using window functions are as follows:Window functions can ", + "doc_type":"sqlreference", + "kw":"lag,Window Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"lag", + "githuburl":"" + }, + { + "uri":"dli_spark_lead.html", + "node_id":"dli_spark_lead.xml", + "product_code":"dli", + "code":"249", + "des":"This function is used to return the value of the nth row downwards within a specified window.The restrictions on using window functions are as follows:Window functions ca", + "doc_type":"sqlreference", + "kw":"lead,Window Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"lead", + "githuburl":"" + }, + { + "uri":"dli_spark_percent_rank.html", + "node_id":"dli_spark_percent_rank.xml", + "product_code":"dli", + "code":"250", + "des":"This function is used to return the value of the column specified in the ORDER BY clause of a window, expressed as a decimal between 0 and 1. It is calculated as (the ran", + "doc_type":"sqlreference", + "kw":"percent_rank,Window Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"percent_rank", + "githuburl":"" + }, + { + "uri":"dli_spark_rank.html", + "node_id":"dli_spark_rank.xml", + "product_code":"dli", + "code":"251", + "des":"This function is used to return the rank of a value in a set of values. When multiple values share the same rank, the next rank in the sequence is not consecutive.The res", + "doc_type":"sqlreference", + "kw":"rank,Window Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"rank", + "githuburl":"" + }, + { + "uri":"dli_spark_row_number.html", + "node_id":"dli_spark_row_number.xml", + "product_code":"dli", + "code":"252", + "des":"This function is used to return the row number, starting from 1 and increasing incrementally.The restrictions on using window functions are as follows:Window functions ca", + "doc_type":"sqlreference", + "kw":"row_number,Window Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"row_number", + "githuburl":"" + }, + { + "uri":"dli_08_0476.html", + "node_id":"dli_08_0476.xml", + "product_code":"dli", + "code":"253", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"sqlreference", + "kw":"Other Functions", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"Other Functions", + "githuburl":"" + }, + { + "uri":"dli_08_0469.html", + "node_id":"dli_08_0469.xml", + "product_code":"dli", + "code":"254", + "des":"The following table lists the functions provided by DLI, such as decode1, javahash, and max_pt.", + "doc_type":"sqlreference", + "kw":"Overview,Other Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"Overview", + "githuburl":"" + }, + { + "uri":"dli_spark_decode1.html", + "node_id":"dli_spark_decode1.xml", + "product_code":"dli", + "code":"255", + "des":"This function is used to implement if-then-else branch selection.result and default are return values. These values can be of any data type.If they match, the value of re", + "doc_type":"sqlreference", + "kw":"decode1,Other Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"decode1", + "githuburl":"" + }, + { + "uri":"dli_spark_javahash.html", + "node_id":"dli_spark_javahash.xml", + "product_code":"dli", + "code":"256", + "des":"This function is used to return the hash value of a.The return value is of the STRING type.The hash value is returned. If the value of a is null, an error is reported.The", + "doc_type":"sqlreference", + "kw":"javahash,Other Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"javahash", + "githuburl":"" + }, + { + "uri":"dli_spark_max_pt.html", + "node_id":"dli_spark_max_pt.xml", + "product_code":"dli", + "code":"257", + "des":"This function is used to return the name of the largest level-1 partition that contains data in a partitioned table and read the data of this partition.The return value i", + "doc_type":"sqlreference", + "kw":"max_pt,Other Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"max_pt", + "githuburl":"" + }, + { + "uri":"dli_spark_ordinal.html", + "node_id":"dli_spark_ordinal.xml", + "product_code":"dli", + "code":"258", + "des":"This function is used to sort input variables in ascending order and return the value at the position specified by nth.The return value is of the DOUBLE or DECIMAL type.V", + "doc_type":"sqlreference", + "kw":"ordinal,Other Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"ordinal", + "githuburl":"" + }, + { + "uri":"dli_spark_trans_array.html", + "node_id":"dli_spark_trans_array.xml", + "product_code":"dli", + "code":"259", + "des":"This function is used to convert an array split by a fixed separator in a column into multiple rows.All columns used as keys must be placed before the columns to be trans", + "doc_type":"sqlreference", + "kw":"trans_array,Other Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"trans_array", + "githuburl":"" + }, + { + "uri":"dli_spark_trunc_numeric.html", + "node_id":"dli_spark_trunc_numeric.xml", + "product_code":"dli", + "code":"260", + "des":"This function is used to truncate the number value to a specified decimal place.The return value is of the DOUBLE or DECIMAL type.The return rules are as follows:If the n", + "doc_type":"sqlreference", + "kw":"trunc_numeric,Other Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"trunc_numeric", + "githuburl":"" + }, + { + "uri":"dli_spark_url_decode.html", + "node_id":"dli_spark_url_decode.xml", + "product_code":"dli", + "code":"261", + "des":"This function is used to convert a string from the application/x-www-form-urlencoded MIME format to regular characters.The return value is of the STRING type.UTF-8-encode", + "doc_type":"sqlreference", + "kw":"url_decode,Other Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"url_decode", + "githuburl":"" + }, + { + "uri":"dli_spark_url_encode.html", + "node_id":"dli_spark_url_encode.xml", + "product_code":"dli", + "code":"262", + "des":"This function is used to encode a string in the application/x-www-form-urlencoded MIME format.url_encode(string [, string ])The return value is of the ST", + "doc_type":"sqlreference", + "kw":"url_encode,Other Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"url_encode", + "githuburl":"" + }, + { + "uri":"dli_08_0150.html", + "node_id":"dli_08_0150.xml", + "product_code":"dli", + "code":"263", "des":"This statement is a basic query statement and is used to return the query results.The table to be queried must exist. Otherwise, an error is reported.To filter the record", "doc_type":"sqlreference", "kw":"Basic SELECT Statements,Spark SQL Syntax Reference,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Basic SELECT Statements", "githuburl":"" }, { "uri":"dli_08_0151.html", + "node_id":"dli_08_0151.xml", "product_code":"dli", - "code":"103", + "code":"264", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Filtering", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Filtering", "githuburl":"" }, { "uri":"dli_08_0152.html", + "node_id":"dli_08_0152.xml", "product_code":"dli", - "code":"104", + "code":"265", "des":"This statement is used to filter the query results using the WHERE clause.All is used to return repeated rows. By default, all repeated rows are returned. It is followed ", "doc_type":"sqlreference", "kw":"WHERE Filtering Clause,Filtering,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"WHERE Filtering Clause", "githuburl":"" }, { "uri":"dli_08_0153.html", + "node_id":"dli_08_0153.xml", "product_code":"dli", - "code":"105", + "code":"266", "des":"This statement is used to filter the query results using the HAVING clause.All is used to return repeated rows. By default, all repeated rows are returned. It is followed", "doc_type":"sqlreference", "kw":"HAVING Filtering Clause,Filtering,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"HAVING Filtering Clause", "githuburl":"" }, { "uri":"dli_08_0154.html", + "node_id":"dli_08_0154.xml", "product_code":"dli", - "code":"106", + "code":"267", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Sorting", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Sorting", "githuburl":"" }, { "uri":"dli_08_0155.html", + "node_id":"dli_08_0155.xml", "product_code":"dli", - "code":"107", + "code":"268", "des":"This statement is used to order the result set of a query by the specified field.ASC/DESC: ASC sorts from the lowest value to the highest value. DESC sorts from the highe", "doc_type":"sqlreference", "kw":"ORDER BY,Sorting,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"ORDER BY", "githuburl":"" }, { "uri":"dli_08_0156.html", + "node_id":"dli_08_0156.xml", "product_code":"dli", - "code":"108", + "code":"269", "des":"This statement is used to achieve the partial sorting of tables according to fields.ASC/DESC: ASC sorts from the lowest value to the highest value. DESC sorts from the hi", "doc_type":"sqlreference", "kw":"SORT BY,Sorting,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"SORT BY", "githuburl":"" }, { "uri":"dli_08_0157.html", + "node_id":"dli_08_0157.xml", "product_code":"dli", - "code":"109", + "code":"270", "des":"This statement is used to bucket a table and sort the table within buckets.CLUSTER BY: Buckets are created based on specified fields. Single fields and multiple fields ar", "doc_type":"sqlreference", "kw":"CLUSTER BY,Sorting,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"CLUSTER BY", "githuburl":"" }, { "uri":"dli_08_0158.html", + "node_id":"dli_08_0158.xml", "product_code":"dli", - "code":"110", + "code":"271", "des":"This statement is used to bucket a table according to the field.DISTRIBUTE BY: Buckets are created based on specified fields. A single field or multiple fields are suppor", "doc_type":"sqlreference", "kw":"DISTRIBUTE BY,Sorting,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"DISTRIBUTE BY", "githuburl":"" }, { "uri":"dli_08_0159.html", + "node_id":"dli_08_0159.xml", "product_code":"dli", - "code":"111", + "code":"272", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Grouping", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Grouping", "githuburl":"" }, { "uri":"dli_08_0160.html", + "node_id":"dli_08_0160.xml", "product_code":"dli", - "code":"112", + "code":"273", "des":"This statement is used to group a table based on columns.Column-based GROUP BY can be categorized into single-column GROUP BY and multi-column GROUP BY.Single-column GROU", "doc_type":"sqlreference", "kw":"Column-Based GROUP BY,Grouping,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Column-Based GROUP BY", "githuburl":"" }, { "uri":"dli_08_0161.html", + "node_id":"dli_08_0161.xml", "product_code":"dli", - "code":"113", + "code":"274", "des":"This statement is used to group a table according to expressions.The groupby_expression can contain a single field or multiple fields, and also can call aggregate functio", "doc_type":"sqlreference", "kw":"Expression-Based GROUP BY,Grouping,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Expression-Based GROUP BY", "githuburl":"" }, { "uri":"dli_08_0162.html", + "node_id":"dli_08_0162.xml", "product_code":"dli", - "code":"114", + "code":"275", "des":"This statement filters a table after grouping it using the HAVING clause.The groupby_expression can contain a single field or multiple fields, and can also call aggregate", "doc_type":"sqlreference", "kw":"GROUP BY Using HAVING,Grouping,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"GROUP BY Using HAVING", "githuburl":"" }, { "uri":"dli_08_0163.html", + "node_id":"dli_08_0163.xml", "product_code":"dli", - "code":"115", + "code":"276", "des":"This statement is used to generate the aggregate row, super-aggregate row, and the total row. The statement can achieve multi-layer statistics from right to left and disp", "doc_type":"sqlreference", "kw":"ROLLUP,Grouping,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"ROLLUP", "githuburl":"" }, { "uri":"dli_08_0164.html", + "node_id":"dli_08_0164.xml", "product_code":"dli", - "code":"116", + "code":"277", "des":"This statement is used to generate the cross-table row and achieve the cross-statistics of the GROUP BY field.GROUPING SETS is the expansion of GROUP BY. For example:SELE", "doc_type":"sqlreference", "kw":"GROUPING SETS,Grouping,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"GROUPING SETS", "githuburl":"" }, { "uri":"dli_08_0165.html", + "node_id":"dli_08_0165.xml", "product_code":"dli", - "code":"117", + "code":"278", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"JOIN", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"JOIN", "githuburl":"" }, { "uri":"dli_08_0166.html", + "node_id":"dli_08_0166.xml", "product_code":"dli", - "code":"118", + "code":"279", "des":"This statement is used to join and return the rows that meet the JOIN conditions from two tables as the result set.JOIN/INNER JOIN: Only the records that meet the JOIN co", "doc_type":"sqlreference", "kw":"INNER JOIN,JOIN,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"INNER JOIN", "githuburl":"" }, { "uri":"dli_08_0167.html", + "node_id":"dli_08_0167.xml", "product_code":"dli", - "code":"119", + "code":"280", "des":"Join the left table with the right table and return all joined records of the left table. If no joined record is found, NULL will be returned.LEFT OUTER JOIN: Returns all", "doc_type":"sqlreference", "kw":"LEFT OUTER JOIN,JOIN,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"LEFT OUTER JOIN", "githuburl":"" }, { "uri":"dli_08_0168.html", + "node_id":"dli_08_0168.xml", "product_code":"dli", - "code":"120", + "code":"281", "des":"Match the right table with the left table and return all matched records of the right table. If no matched record is found, NULL will be returned.RIGHT OUTER JOIN: Return", "doc_type":"sqlreference", "kw":"RIGHT OUTER JOIN,JOIN,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"RIGHT OUTER JOIN", "githuburl":"" }, { "uri":"dli_08_0169.html", + "node_id":"dli_08_0169.xml", "product_code":"dli", - "code":"121", + "code":"282", "des":"Join all records from the right table and the left table and return all joined records. If no joined record is found, NULL will be returned.FULL OUTER JOIN: Matches all r", "doc_type":"sqlreference", "kw":"FULL OUTER JOIN,JOIN,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"FULL OUTER JOIN", "githuburl":"" }, { "uri":"dli_08_0170.html", + "node_id":"dli_08_0170.xml", "product_code":"dli", - "code":"122", + "code":"283", "des":"This statement has the same function as INNER JOIN, that is, the result set that meet the WHERE condition is returned. However, IMPLICIT JOIN does not use the condition s", "doc_type":"sqlreference", "kw":"IMPLICIT JOIN,JOIN,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"IMPLICIT JOIN", "githuburl":"" }, { "uri":"dli_08_0171.html", + "node_id":"dli_08_0171.xml", "product_code":"dli", - "code":"123", + "code":"284", "des":"Cartesian JOIN joins each record of table A with all records in table B. For example, if there are m records in table A and n records in table B, m x n records will be ge", "doc_type":"sqlreference", "kw":"Cartesian JOIN,JOIN,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Cartesian JOIN", "githuburl":"" }, { "uri":"dli_08_0172.html", + "node_id":"dli_08_0172.xml", "product_code":"dli", - "code":"124", + "code":"285", "des":"This statement is used to query the records that meet the JOIN condition from the left table.LEFT SEMI JOIN: Indicates to only return the records from the left table. LEF", "doc_type":"sqlreference", "kw":"LEFT SEMI JOIN,JOIN,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"LEFT SEMI JOIN", "githuburl":"" }, { "uri":"dli_08_0173.html", + "node_id":"dli_08_0173.xml", "product_code":"dli", - "code":"125", + "code":"286", "des":"This statement is used to join multiple tables using unequal values and return the result set that meet the condition.The non_equi_join_condition is similar to join_condi", "doc_type":"sqlreference", "kw":"NON-EQUIJOIN,JOIN,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"NON-EQUIJOIN", "githuburl":"" }, { "uri":"dli_08_0174.html", + "node_id":"dli_08_0174.xml", "product_code":"dli", - "code":"126", + "code":"287", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Subquery", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Subquery", "githuburl":"" }, { "uri":"dli_08_0175.html", + "node_id":"dli_08_0175.xml", "product_code":"dli", - "code":"127", + "code":"288", "des":"Subqueries are nested in the WHERE clause, and the subquery result is used as the filtering condition.All is used to return repeated rows. By default, all repeated rows a", "doc_type":"sqlreference", "kw":"Subquery Nested by WHERE,Subquery,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Subquery Nested by WHERE", "githuburl":"" }, { "uri":"dli_08_0176.html", + "node_id":"dli_08_0176.xml", "product_code":"dli", - "code":"128", + "code":"289", "des":"This statement is used to nest subquery by FROM and use the subquery results as the data source of the external SELECT statement.All is used to return repeated rows. By d", "doc_type":"sqlreference", "kw":"Subquery Nested by FROM,Subquery,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Subquery Nested by FROM", "githuburl":"" }, { "uri":"dli_08_0177.html", + "node_id":"dli_08_0177.xml", "product_code":"dli", - "code":"129", + "code":"290", "des":"This statement is used to embed a subquery in the HAVING clause. The subquery result is used as a part of the HAVING clause.All is used to return repeated rows. By defaul", "doc_type":"sqlreference", "kw":"Subquery Nested by HAVING,Subquery,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Subquery Nested by HAVING", "githuburl":"" }, { "uri":"dli_08_0178.html", + "node_id":"dli_08_0178.xml", "product_code":"dli", - "code":"130", + "code":"291", "des":"This statement is used to nest queries in the subquery.All is used to return repeated rows. By default, all repeated rows are returned. It is followed by asterisks (*) on", "doc_type":"sqlreference", "kw":"Multi-Layer Nested Subquery,Subquery,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Multi-Layer Nested Subquery", "githuburl":"" }, { "uri":"dli_08_0179.html", + "node_id":"dli_08_0179.xml", "product_code":"dli", - "code":"131", + "code":"292", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Alias", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Alias", "githuburl":"" }, { "uri":"dli_08_0180.html", + "node_id":"dli_08_0180.xml", "product_code":"dli", - "code":"132", + "code":"293", "des":"This statement is used to specify an alias for a table or the subquery result.table_reference: Can be a table, view, or subquery.As: Is used to connect to table_reference", "doc_type":"sqlreference", "kw":"AS for Table,Alias,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"AS for Table", "githuburl":"" }, { "uri":"dli_08_0181.html", + "node_id":"dli_08_0181.xml", "product_code":"dli", - "code":"133", + "code":"294", "des":"This statement is used to specify an alias for a column.alias: gives an alias for the attr_expr field.AS: Whether to add AS does not affect the result.The to-be-queried t", "doc_type":"sqlreference", "kw":"AS for Column,Alias,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"AS for Column", "githuburl":"" }, { "uri":"dli_08_0182.html", + "node_id":"dli_08_0182.xml", "product_code":"dli", - "code":"134", + "code":"295", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Set Operations", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Set Operations", "githuburl":"" }, { "uri":"dli_08_0183.html", + "node_id":"dli_08_0183.xml", "product_code":"dli", - "code":"135", + "code":"296", "des":"This statement is used to return the union set of multiple query results.UNION: The set operation is used to join the head and tail of a table based on certain conditions", "doc_type":"sqlreference", "kw":"UNION,Set Operations,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"UNION", "githuburl":"" }, { "uri":"dli_08_0184.html", + "node_id":"dli_08_0184.xml", "product_code":"dli", - "code":"136", + "code":"297", "des":"This statement is used to return the intersection set of multiple query results.INTERSECT returns the intersection of multiple query results. The number of columns return", "doc_type":"sqlreference", "kw":"INTERSECT,Set Operations,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"INTERSECT", "githuburl":"" }, { "uri":"dli_08_0185.html", + "node_id":"dli_08_0185.xml", "product_code":"dli", - "code":"137", + "code":"298", "des":"This statement is used to return the difference set of two query results.EXCEPT minus the sets. A EXCEPT B indicates to remove the records that exist in both A and B from", "doc_type":"sqlreference", "kw":"EXCEPT,Set Operations,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"EXCEPT", "githuburl":"" }, { "uri":"dli_08_0186.html", + "node_id":"dli_08_0186.xml", "product_code":"dli", - "code":"138", + "code":"299", "des":"This statement is used to define the common table expression (CTE) using WITH...AS to simplify the query and make the result easier to read and maintain.cte_name: Name of", "doc_type":"sqlreference", "kw":"WITH...AS,Spark SQL Syntax Reference,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"WITH...AS", "githuburl":"" }, { "uri":"dli_08_0187.html", + "node_id":"dli_08_0187.xml", "product_code":"dli", - "code":"139", + "code":"300", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"CASE...WHEN", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"CASE...WHEN", "githuburl":"" }, { "uri":"dli_08_0188.html", + "node_id":"dli_08_0188.xml", "product_code":"dli", - "code":"140", + "code":"301", "des":"This statement is used to display result_expression according to the joined results of input_expression and when_expression.CASE: Subquery is supported in basic CASE stat", "doc_type":"sqlreference", "kw":"Basic CASE Statement,CASE...WHEN,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Basic CASE Statement", "githuburl":"" }, { "uri":"dli_08_0189.html", + "node_id":"dli_08_0189.xml", "product_code":"dli", - "code":"141", + "code":"302", "des":"This statement is used to obtain the value of boolean_expression for each WHEN statement in a specified order. Then return the first result_expression with the value TRUE", "doc_type":"sqlreference", "kw":"CASE Query Statement,CASE...WHEN,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"CASE Query Statement", "githuburl":"" }, { "uri":"dli_08_0190.html", + "node_id":"dli_08_0190.xml", "product_code":"dli", - "code":"142", + "code":"303", "des":"This statement is used together with the window function. The OVER statement is used to group data and sort the data within the group. The window function is used to gene", "doc_type":"sqlreference", "kw":"OVER Clause,Spark SQL Syntax Reference,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"OVER Clause", "githuburl":"" }, { "uri":"dli_08_0370.html", + "node_id":"dli_08_0370.xml", "product_code":"dli", - "code":"143", + "code":"304", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", - "kw":"Flink Opensource SQL 1.12 Syntax Reference", - "title":"Flink Opensource SQL 1.12 Syntax Reference", + "kw":"Flink OpenSource SQL 1.12 Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"Flink OpenSource SQL 1.12 Syntax Reference", "githuburl":"" }, { "uri":"dli_08_0371.html", + "node_id":"dli_08_0371.xml", "product_code":"dli", - "code":"144", + "code":"305", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Constraints and Definitions", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Constraints and Definitions", "githuburl":"" }, { "uri":"dli_08_0372.html", + "node_id":"dli_08_0372.xml", "product_code":"dli", - "code":"145", + "code":"306", "des":"STRING, BOOLEAN, BYTES, DECIMAL, TINYINT, SMALLINT, INTEGER, BIGINT, FLOAT, DOUBLE, DATE, TIME, TIMESTAMP, TIMESTAMP WITH LOCAL TIME ZONE, INTERVAL, ARRAY, MULTISET, MAP,", "doc_type":"sqlreference", "kw":"Supported Data Types,Constraints and Definitions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Supported Data Types", "githuburl":"" }, { "uri":"dli_08_0373.html", + "node_id":"dli_08_0373.xml", "product_code":"dli", - "code":"146", + "code":"307", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Syntax", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Syntax", "githuburl":"" }, { "uri":"dli_08_0374.html", + "node_id":"dli_08_0374.xml", "product_code":"dli", - "code":"147", + "code":"308", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Data Definition Language (DDL)", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Data Definition Language (DDL)", "githuburl":"" }, { "uri":"dli_08_0375.html", + "node_id":"dli_08_0375.xml", "product_code":"dli", - "code":"148", + "code":"309", "des":"Create a table with a specified name.COMPUTED COLUMNA computed column is a virtual column generated using column_name AS computed_column_expression. A computed column eva", "doc_type":"sqlreference", "kw":"CREATE TABLE,Data Definition Language (DDL),SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"CREATE TABLE", "githuburl":"" }, { "uri":"dli_08_0376.html", + "node_id":"dli_08_0376.xml", "product_code":"dli", - "code":"149", + "code":"310", "des":"Create a view with multiple layers nested in it to simplify the development process.IF NOT EXISTSIf the view already exists, nothing happens.Create a view named viewName.", "doc_type":"sqlreference", "kw":"CREATE VIEW,Data Definition Language (DDL),SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"CREATE VIEW", "githuburl":"" }, { "uri":"dli_08_0377.html", + "node_id":"dli_08_0377.xml", "product_code":"dli", - "code":"150", + "code":"311", "des":"Create a user-defined function.For details about how to create a user-defined function, see User-Defined Functions (UDFs).IF NOT EXISTSIf the function already exists, not", "doc_type":"sqlreference", "kw":"CREATE FUNCTION,Data Definition Language (DDL),SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"CREATE FUNCTION", "githuburl":"" }, { "uri":"dli_08_0378.html", + "node_id":"dli_08_0378.xml", "product_code":"dli", - "code":"151", - "des":"SyntaxPrecautionsFlink SQL uses a lexical policy for identifier (table, attribute, function names) similar to Java:The case of identifiers is preserved whether or not the", + "code":"312", + "des":"SyntaxPrecautionsFlink SQL uses a lexical policy for identifier (table, attribute, function names) similar to Java:The case of identifiers is preserved whether they are q", "doc_type":"sqlreference", "kw":"Data Manipulation Language (DML),Syntax,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Data Manipulation Language (DML)", "githuburl":"" }, { "uri":"dli_08_0379.html", + "node_id":"dli_08_0379.xml", "product_code":"dli", - "code":"152", + "code":"313", "des":"This section describes the Flink open source SQL 1.12 syntax supported by DLI. For details about the parameters and examples, see the syntax description.", "doc_type":"sqlreference", - "kw":"Overview,Flink Opensource SQL 1.12 Syntax Reference,SQL Syntax Reference", + "kw":"Overview,Flink OpenSource SQL 1.12 Syntax Reference,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Overview", "githuburl":"" }, { "uri":"dli_08_0380.html", + "node_id":"dli_08_0380.xml", "product_code":"dli", - "code":"153", + "code":"314", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"DDL Syntax", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"DDL Syntax", "githuburl":"" }, { "uri":"dli_08_0381.html", + "node_id":"dli_08_0381.xml", "product_code":"dli", - "code":"154", + "code":"315", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Creating Source Tables", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating Source Tables", "githuburl":"" }, { "uri":"dli_08_0382.html", + "node_id":"dli_08_0382.xml", "product_code":"dli", - "code":"155", + "code":"316", "des":"DataGen is used to generate random data for debugging and testing.NoneWhen you create a DataGen table, the table field type cannot be Array, Map, or Row. You can use COMP", "doc_type":"sqlreference", "kw":"DataGen Source Table,Creating Source Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"DataGen Source Table", "githuburl":"" }, { "uri":"dli_08_0383.html", + "node_id":"dli_08_0383.xml", "product_code":"dli", - "code":"156", + "code":"317", "des":"DLI reads data of Flink jobs from GaussDB(DWS). GaussDB(DWS) database kernel is compliant with PostgreSQL. The PostgreSQL database can store data of more complex types an", "doc_type":"sqlreference", "kw":"GaussDB(DWS) Source Table,Creating Source Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"GaussDB(DWS) Source Table", "githuburl":"" }, { "uri":"dli_08_0384.html", + "node_id":"dli_08_0384.xml", "product_code":"dli", - "code":"157", + "code":"318", "des":"Create a source stream to obtain data from HBase as input for jobs. HBase is a column-oriented distributed cloud storage system that features enhanced reliability, excell", "doc_type":"sqlreference", "kw":"HBase Source Table,Creating Source Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"HBase Source Table", "githuburl":"" }, { "uri":"dli_08_0385.html", + "node_id":"dli_08_0385.xml", "product_code":"dli", - "code":"158", + "code":"319", "des":"The JDBC connector is a Flink's built-in connector to read data from a database.An enhanced datasource connection with the instances has been established, so that you can", "doc_type":"sqlreference", "kw":"JDBC Source Table,Creating Source Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"JDBC Source Table", "githuburl":"" }, { "uri":"dli_08_0386.html", + "node_id":"dli_08_0386.xml", "product_code":"dli", - "code":"159", + "code":"320", "des":"Create a source stream to obtain data from Kafka as input data for jobs.Apache Kafka is a fast, scalable, and fault-tolerant distributed message publishing and subscripti", "doc_type":"sqlreference", "kw":"Kafka Source Table,Creating Source Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Kafka Source Table", "githuburl":"" }, { "uri":"dli_08_0387.html", + "node_id":"dli_08_0387.xml", "product_code":"dli", - "code":"160", + "code":"321", "des":"The MySQL CDC source table, that is, the MySQL streaming source table, reads all historical data in the database first and then smoothly switches data read to the Binlog ", "doc_type":"sqlreference", "kw":"MySQL CDC Source Table,Creating Source Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"MySQL CDC Source Table", "githuburl":"" }, { "uri":"dli_08_0388.html", + "node_id":"dli_08_0388.xml", "product_code":"dli", - "code":"161", + "code":"322", "des":"The Postgres CDC source table, that is, Postgres streaming source table, is used to read the full snapshot data and changed data of the PostgreSQL database in sequence. T", "doc_type":"sqlreference", "kw":"Postgres CDC Source Table,Creating Source Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Postgres CDC Source Table", "githuburl":"" }, { "uri":"dli_08_0389.html", + "node_id":"dli_08_0389.xml", "product_code":"dli", - "code":"162", + "code":"323", "des":"Create a source stream to obtain data from Redis as input for jobs.An enhanced datasource connection has been created for DLI to connect to the Redis database, so that yo", "doc_type":"sqlreference", "kw":"Redis Source Table,Creating Source Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Redis Source Table", "githuburl":"" }, { "uri":"dli_08_0390.html", + "node_id":"dli_08_0390.xml", "product_code":"dli", - "code":"163", + "code":"324", "des":"Apache Kafka is a fast, scalable, and fault-tolerant distributed message publishing and subscription system. It delivers high throughput and built-in partitions and provi", "doc_type":"sqlreference", "kw":"Upsert Kafka Source Table,Creating Source Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Upsert Kafka Source Table", "githuburl":"" }, { "uri":"dli_08_0391.html", + "node_id":"dli_08_0391.xml", "product_code":"dli", - "code":"164", + "code":"325", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Creating Result Tables", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating Result Tables", "githuburl":"" }, { "uri":"dli_08_0392.html", + "node_id":"dli_08_0392.xml", "product_code":"dli", - "code":"165", + "code":"326", "des":"The BlackHole connector allows for swallowing all input records. It is designed for high-performance testing and UDF output. It is not a substantive sink. The BlackHole r", "doc_type":"sqlreference", "kw":"BlackHole Result Table,Creating Result Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"BlackHole Result Table", "githuburl":"" }, { "uri":"dli_08_0393.html", + "node_id":"dli_08_0393.xml", "product_code":"dli", - "code":"166", + "code":"327", "des":"DLI can output Flink job data to the ClickHouse database. ClickHouse is a column-based database oriented to online analysis and processing. It supports SQL query and prov", "doc_type":"sqlreference", "kw":"ClickHouse Result Table,Creating Result Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"ClickHouse Result Table", "githuburl":"" }, { "uri":"dli_08_0394.html", + "node_id":"dli_08_0394.xml", "product_code":"dli", - "code":"167", + "code":"328", "des":"DLI outputs the Flink job output data to GaussDB(DWS). GaussDB(DWS) database kernel is compliant with PostgreSQL. The PostgreSQL database can store data of more complex t", "doc_type":"sqlreference", "kw":"GaussDB(DWS) Result Table,Creating Result Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"GaussDB(DWS) Result Table", "githuburl":"" }, { "uri":"dli_08_0395.html", + "node_id":"dli_08_0395.xml", "product_code":"dli", - "code":"168", + "code":"329", "des":"DLI outputs Flink job output data to Elasticsearch of Cloud Search Service (CSS). Elasticsearch is a popular enterprise-class Lucene-powered search server and provides th", "doc_type":"sqlreference", "kw":"Elasticsearch Result Table,Creating Result Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Elasticsearch Result Table", "githuburl":"" }, { "uri":"dli_08_0396.html", + "node_id":"dli_08_0396.xml", "product_code":"dli", - "code":"169", + "code":"330", "des":"DLI outputs the job data to HBase. HBase is a column-oriented distributed cloud storage system that features enhanced reliability, excellent performance, and elastic scal", "doc_type":"sqlreference", "kw":"HBase Result Table,Creating Result Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"HBase Result Table", "githuburl":"" }, { "uri":"dli_08_0397.html", + "node_id":"dli_08_0397.xml", "product_code":"dli", - "code":"170", + "code":"331", "des":"DLI outputs the Flink job output data to RDS through the JDBC result table.An enhanced datasource connection with the instances has been established, so that you can conf", "doc_type":"sqlreference", "kw":"JDBC Result Table,Creating Result Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"JDBC Result Table", "githuburl":"" }, { "uri":"dli_08_0398.html", + "node_id":"dli_08_0398.xml", "product_code":"dli", - "code":"171", + "code":"332", "des":"DLI outputs the Flink job output data to Kafka through the Kafka result table.Apache Kafka is a fast, scalable, and fault-tolerant distributed message publishing and subs", "doc_type":"sqlreference", "kw":"Kafka Result Table,Creating Result Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Kafka Result Table", "githuburl":"" }, { "uri":"dli_08_0399.html", + "node_id":"dli_08_0399.xml", "product_code":"dli", - "code":"172", + "code":"333", "des":"The Print connector is used to print output data to the error file or TaskManager file, making it easier for you to view the result in code debugging.NoneThe Print result", "doc_type":"sqlreference", "kw":"Print Result Table,Creating Result Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Print Result Table", "githuburl":"" }, { "uri":"dli_08_0400.html", + "node_id":"dli_08_0400.xml", "product_code":"dli", - "code":"173", + "code":"334", "des":"DLI outputs the Flink job output data to Redis. Redis is a key-value storage system that supports multiple types of data structures. It can be used in scenarios such as c", "doc_type":"sqlreference", "kw":"Redis Result Table,Creating Result Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Redis Result Table", "githuburl":"" }, { "uri":"dli_08_0401.html", + "node_id":"dli_08_0401.xml", "product_code":"dli", - "code":"174", + "code":"335", "des":"Apache Kafka is a fast, scalable, and fault-tolerant distributed message publishing and subscription system. It delivers high throughput and built-in partitions and provi", "doc_type":"sqlreference", "kw":"Upsert Kafka Result Table,Creating Result Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Upsert Kafka Result Table", "githuburl":"" }, { - "uri":"dli_08_0402.html", + "uri":"dli_08_0439.html", + "node_id":"dli_08_0439.xml", "product_code":"dli", - "code":"175", + "code":"336", + "des":"The FileSystem result (sink) table is used to export data to the HDFS or OBS file system. It is applicable to scenarios such as data dumping, big data analysis, data back", + "doc_type":"sqlreference", + "kw":"FileSystem Result Table,Creating Result Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], + "title":"FileSystem Result Table", + "githuburl":"" + }, + { + "uri":"dli_08_0402.html", + "node_id":"dli_08_0402.xml", + "product_code":"dli", + "code":"337", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Creating Dimension Tables", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating Dimension Tables", "githuburl":"" }, { "uri":"dli_08_0403.html", + "node_id":"dli_08_0403.xml", "product_code":"dli", - "code":"176", + "code":"338", "des":"Create a GaussDB(DWS) table to connect to source streams for wide table generation.Ensure that you have created a GaussDB(DWS) cluster using your account.A DWS database t", "doc_type":"sqlreference", "kw":"GaussDB(DWS) Dimension Table,Creating Dimension Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"GaussDB(DWS) Dimension Table", "githuburl":"" }, { "uri":"dli_08_0404.html", + "node_id":"dli_08_0404.xml", "product_code":"dli", - "code":"177", + "code":"339", "des":"Create a Hbase dimension table to connect to the source streams for wide table generation.An enhanced datasource connection has been created for DLI to connect to HBase, ", "doc_type":"sqlreference", "kw":"HBase Dimension Table,Creating Dimension Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"HBase Dimension Table", "githuburl":"" }, { "uri":"dli_08_0405.html", + "node_id":"dli_08_0405.xml", "product_code":"dli", - "code":"178", + "code":"340", "des":"Create a JDBC dimension table to connect to the source stream.You have created a JDBC instance for your account.When you create a Flink OpenSource SQL job, set Flink Vers", "doc_type":"sqlreference", "kw":"JDBC Dimension Table,Creating Dimension Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"JDBC Dimension Table", "githuburl":"" }, { "uri":"dli_08_0406.html", + "node_id":"dli_08_0406.xml", "product_code":"dli", - "code":"179", + "code":"341", "des":"Create a Redis table to connect to source streams for wide table generation.An enhanced datasource connection with Redis has been established, so that you can configure s", "doc_type":"sqlreference", "kw":"Redis Dimension Table,Creating Dimension Tables,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Redis Dimension Table", "githuburl":"" }, { "uri":"dli_08_0407.html", + "node_id":"dli_08_0407.xml", "product_code":"dli", - "code":"180", + "code":"342", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Format", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Format", "githuburl":"" }, { "uri":"dli_08_0408.html", + "node_id":"dli_08_0408.xml", "product_code":"dli", - "code":"181", + "code":"343", "des":"Apache Avro is supported for you to read and write Avro data based on an Avro schema with Flink. The Avro schema is derived from the table schema.KafkaUpsert KafkaCurrent", "doc_type":"sqlreference", "kw":"Avro,Format,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Avro", "githuburl":"" }, { "uri":"dli_08_0409.html", + "node_id":"dli_08_0409.xml", "product_code":"dli", - "code":"182", + "code":"344", "des":"Canal is a Changelog Data Capture (CDC) tool that can stream changes in real-time from MySQL into other systems. Canal provides a unified format schema for changelog and ", "doc_type":"sqlreference", "kw":"Canal,Format,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Canal", "githuburl":"" }, { "uri":"dli_08_0410.html", + "node_id":"dli_08_0410.xml", "product_code":"dli", - "code":"183", + "code":"345", "des":"The Avro Schema Registry (avro-confluent) format allows you to read records that were serialized by the io.confluent.kafka.serializers.KafkaAvroSerializer and to write re", "doc_type":"sqlreference", "kw":"Confluent Avro,Format,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Confluent Avro", "githuburl":"" }, { "uri":"dli_08_0411.html", + "node_id":"dli_08_0411.xml", "product_code":"dli", - "code":"184", + "code":"346", "des":"The CSV format allows you to read and write CSV data based on a CSV schema. Currently, the CSV schema is derived from table schema.KafkaUpsert KafkaUse Kafka to send data", "doc_type":"sqlreference", "kw":"CSV,Format,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"CSV", "githuburl":"" }, { "uri":"dli_08_0412.html", + "node_id":"dli_08_0412.xml", "product_code":"dli", - "code":"185", + "code":"347", "des":"Debezium is a Changelog Data Capture (CDC) tool that can stream changes in real-time from other databases into Kafka. Debezium provides a unified format schema for change", "doc_type":"sqlreference", "kw":"Debezium,Format,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Debezium", "githuburl":"" }, { "uri":"dli_08_0413.html", + "node_id":"dli_08_0413.xml", "product_code":"dli", - "code":"186", + "code":"348", "des":"The JSON format allows you to read and write JSON data based on a JSON schema. Currently, the JSON schema is derived from table schema.KafkaUpsert KafkaElasticsearchIn th", "doc_type":"sqlreference", "kw":"JSON,Format,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"JSON", "githuburl":"" }, { "uri":"dli_08_0414.html", + "node_id":"dli_08_0414.xml", "product_code":"dli", - "code":"187", + "code":"349", "des":"Flink supports to interpret Maxwell JSON messages as INSERT/UPDATE/DELETE messages into Flink SQL system. This is useful in many cases to leverage this feature,such as:Sy", "doc_type":"sqlreference", "kw":"Maxwell,Format,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Maxwell", "githuburl":"" }, { "uri":"dli_08_0415.html", + "node_id":"dli_08_0415.xml", "product_code":"dli", - "code":"188", + "code":"350", "des":"The raw format allows you to read and write raw (byte based) values as a single column.Note: This format encodes null values as null of the byte[] type. This may have lim", "doc_type":"sqlreference", "kw":"Raw,Format,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Raw", "githuburl":"" }, { "uri":"dli_08_0416.html", + "node_id":"dli_08_0416.xml", "product_code":"dli", - "code":"189", + "code":"351", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"DML Snytax", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"DML Snytax", "githuburl":"" }, { "uri":"dli_08_0417.html", + "node_id":"dli_08_0417.xml", "product_code":"dli", - "code":"190", + "code":"352", "des":"SyntaxDescriptionSELECT is used to select data from a table.ALL indicates that all results are returned.DISTINCT indicates that the duplicated results are removed.Precaut", "doc_type":"sqlreference", "kw":"SELECT,DML Snytax,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"SELECT", "githuburl":"" }, { "uri":"dli_08_0418.html", + "node_id":"dli_08_0418.xml", "product_code":"dli", - "code":"191", + "code":"353", "des":"SyntaxDescriptionUNION is used to return the union set of multiple query results.INTERSECT is used to return the intersection of multiple query results.EXCEPT is used to ", "doc_type":"sqlreference", "kw":"Set Operations,DML Snytax,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Set Operations", "githuburl":"" }, { "uri":"dli_08_0419.html", + "node_id":"dli_08_0419.xml", "product_code":"dli", - "code":"192", + "code":"354", "des":"DescriptionGroup Window is defined in GROUP BY. One record is generated from each group. Group Window involves the following functions:Array functionsArray functionsGroup", "doc_type":"sqlreference", "kw":"Window,DML Snytax,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Window", "githuburl":"" }, { "uri":"dli_08_0420.html", + "node_id":"dli_08_0420.xml", "product_code":"dli", - "code":"193", + "code":"355", "des":"SyntaxPrecautionsCurrently, only equi-joins are supported, for example, joins that have at least one conjunctive condition with an equality predicate. Arbitrary cross or ", "doc_type":"sqlreference", "kw":"JOIN,DML Snytax,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"JOIN", "githuburl":"" }, { "uri":"dli_08_0421.html", + "node_id":"dli_08_0421.xml", "product_code":"dli", - "code":"194", + "code":"356", "des":"FunctionThis clause is used to sort data in ascending order on a time attribute.PrecautionsCurrently, only sorting by time attribute is supported.ExampleSort data in asce", "doc_type":"sqlreference", "kw":"OrderBy & Limit,DML Snytax,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"OrderBy & Limit", "githuburl":"" }, { "uri":"dli_08_0422.html", + "node_id":"dli_08_0422.xml", "product_code":"dli", - "code":"195", + "code":"357", "des":"Top-N queries ask for the N smallest or largest values ordered by columns. Both smallest and largest values sets are considered Top-N queries. Top-N queries are useful in", "doc_type":"sqlreference", "kw":"Top-N,DML Snytax,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Top-N", "githuburl":"" }, { "uri":"dli_08_0423.html", + "node_id":"dli_08_0423.xml", "product_code":"dli", - "code":"196", + "code":"358", "des":"Deduplication removes rows that duplicate over a set of columns, keeping only the first one or the last one.ROW_NUMBER(): Assigns a unique, sequential number to each row,", "doc_type":"sqlreference", "kw":"Deduplication,DML Snytax,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Deduplication", "githuburl":"" }, { "uri":"dli_08_0424.html", + "node_id":"dli_08_0424.xml", "product_code":"dli", - "code":"197", + "code":"359", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Functions", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Functions", "githuburl":"" }, { "uri":"dli_08_0425.html", + "node_id":"dli_08_0425.xml", "product_code":"dli", - "code":"198", + "code":"360", "des":"DLI supports the following three types of user-defined functions (UDFs):Regular UDF: takes in one or more input parameters and returns a single result.User-defined table-", "doc_type":"sqlreference", "kw":"User-Defined Functions (UDFs),Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"User-Defined Functions (UDFs)", "githuburl":"" }, { "uri":"dli_08_0426.html", + "node_id":"dli_08_0426.xml", "product_code":"dli", - "code":"199", + "code":"361", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Built-In Functions", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Built-In Functions", "githuburl":"" }, { "uri":"dli_08_0427.html", + "node_id":"dli_08_0427.xml", "product_code":"dli", - "code":"200", + "code":"362", "des":"All data types can be compared by using relational operators and the result is returned as a BOOLEAN value.Relationship operators are binary operators. Two compared data ", "doc_type":"sqlreference", "kw":"Mathematical Operation Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Mathematical Operation Functions", "githuburl":"" }, { "uri":"dli_08_0428.html", + "node_id":"dli_08_0428.xml", "product_code":"dli", - "code":"201", + "code":"363", "des":"SyntaxExampleTest input data.Test the data source kafka. The message content is as follows:{name:James,age:24,sex:male,grade:{math:95,science:[80,85],english:100}}\n{name:", "doc_type":"sqlreference", "kw":"String Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"String Functions", "githuburl":"" }, { "uri":"dli_08_0429.html", + "node_id":"dli_08_0429.xml", "product_code":"dli", - "code":"202", + "code":"364", "des":"Table 1 lists the time functions supported by Flink OpenSource SQL.FunctionReturns a SQL date parsed from string in form of yyyy-MM-dd.Returns a SQL date parsed from stri", "doc_type":"sqlreference", "kw":"Temporal Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Temporal Functions", "githuburl":"" }, { "uri":"dli_08_0430.html", + "node_id":"dli_08_0430.xml", "product_code":"dli", - "code":"203", + "code":"365", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Conditional Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Conditional Functions", "githuburl":"" }, { "uri":"dli_08_0431.html", + "node_id":"dli_08_0431.xml", "product_code":"dli", - "code":"204", - "des":"This function is used to forcibly convert types.If the input is NULL, NULL is returned.The following example converts the amount value to an integer.Flink jobs do not sup", + "code":"366", + "des":"This function is used to forcibly convert types.If the input is NULL, NULL is returned.The cast function does not support converting a string to the JSON format.The follo", "doc_type":"sqlreference", "kw":"Type Conversion Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Type Conversion Functions", "githuburl":"" }, { "uri":"dli_08_0432.html", + "node_id":"dli_08_0432.xml", "product_code":"dli", - "code":"205", + "code":"367", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Collection Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Collection Functions", "githuburl":"" }, { "uri":"dli_08_0433.html", + "node_id":"dli_08_0433.xml", "product_code":"dli", - "code":"206", + "code":"368", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Value Construction Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Value Construction Functions", "githuburl":"" }, { "uri":"dli_08_0434.html", + "node_id":"dli_08_0434.xml", "product_code":"dli", - "code":"207", + "code":"369", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Value Access Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Value Access Functions", "githuburl":"" }, { "uri":"dli_08_0435.html", + "node_id":"dli_08_0435.xml", "product_code":"dli", - "code":"208", + "code":"370", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Hash Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Hash Functions", "githuburl":"" }, { "uri":"dli_08_0436.html", + "node_id":"dli_08_0436.xml", "product_code":"dli", - "code":"209", + "code":"371", "des":"An aggregate function performs a calculation operation on a set of input values and returns a value. For example, the COUNT function counts the number of rows retrieved b", "doc_type":"sqlreference", "kw":"Aggregate Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Aggregate Functions", "githuburl":"" }, { "uri":"dli_08_0437.html", + "node_id":"dli_08_0437.xml", "product_code":"dli", - "code":"210", + "code":"372", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Table-Valued Functions", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Table-Valued Functions", "githuburl":"" }, { "uri":"dli_08_0438.html", + "node_id":"dli_08_0438.xml", "product_code":"dli", - "code":"211", + "code":"373", "des":"The string_split function splits a target string into substrings based on the specified separator and returns a substring list.Create a Flink OpenSource SQL job by referr", "doc_type":"sqlreference", "kw":"string_split,Table-Valued Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"string_split", "githuburl":"" }, { "uri":"dli_08_0289.html", + "node_id":"dli_08_0289.xml", "product_code":"dli", - "code":"212", + "code":"374", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Flink Opensource SQL 1.10 Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Flink Opensource SQL 1.10 Syntax Reference", "githuburl":"" }, { "uri":"dli_08_0290.html", + "node_id":"dli_08_0290.xml", "product_code":"dli", - "code":"213", + "code":"375", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Constraints and Definitions", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Constraints and Definitions", "githuburl":"" }, { "uri":"dli_08_0291.html", + "node_id":"dli_08_0291.xml", "product_code":"dli", - "code":"214", + "code":"376", "des":"STRING, BOOLEAN, BYTES, DECIMAL, TINYINT, SMALLINT, INTEGER, BIGINT, FLOAT, DOUBLE, DATE, TIME, TIMESTAMP, TIMESTAMP WITH LOCAL TIME ZONE, INTERVAL, ARRAY, MULTISET, MAP,", "doc_type":"sqlreference", "kw":"Supported Data Types,Constraints and Definitions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Supported Data Types", "githuburl":"" }, { "uri":"dli_08_0292.html", + "node_id":"dli_08_0292.xml", "product_code":"dli", - "code":"215", + "code":"377", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Syntax Definition", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Syntax Definition", "githuburl":"" }, { "uri":"dli_08_0293.html", + "node_id":"dli_08_0293.xml", "product_code":"dli", - "code":"216", + "code":"378", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Data Definition Language (DDL)", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Data Definition Language (DDL)", "githuburl":"" }, { "uri":"dli_08_0294.html", + "node_id":"dli_08_0294.xml", "product_code":"dli", - "code":"217", + "code":"379", "des":"This clause is used to create a table with a specified name.COMPUTED COLUMNA computed column is a virtual column generated using column_name AS computed_column_expression", "doc_type":"sqlreference", "kw":"CREATE TABLE,Data Definition Language (DDL),SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"CREATE TABLE", "githuburl":"" }, { "uri":"dli_08_0295.html", + "node_id":"dli_08_0295.xml", "product_code":"dli", - "code":"218", + "code":"380", "des":"Create a view with multiple layers nested in it to simplify the development process.IF NOT EXISTSIf the view already exists, nothing happens.Create a view named viewName.", "doc_type":"sqlreference", "kw":"CREATE VIEW,Data Definition Language (DDL),SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"CREATE VIEW", "githuburl":"" }, { "uri":"dli_08_0296.html", + "node_id":"dli_08_0296.xml", "product_code":"dli", - "code":"219", + "code":"381", "des":"Create a user-defined function.IF NOT EXISTSIf the function already exists, nothing happens.LANGUAGE JAVA|SCALALanguage tag is used to instruct Flink runtime how to execu", "doc_type":"sqlreference", "kw":"CREATE FUNCTION,Data Definition Language (DDL),SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"CREATE FUNCTION", "githuburl":"" }, { "uri":"dli_08_0297.html", + "node_id":"dli_08_0297.xml", "product_code":"dli", - "code":"220", + "code":"382", "des":"SyntaxPrecautionsFlink SQL uses a lexical policy for identifier (table, attribute, function names) similar to Java:The case of identifiers is preserved whether they are q", "doc_type":"sqlreference", "kw":"Data Manipulation Language (DML),Syntax Definition,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Data Manipulation Language (DML)", "githuburl":"" }, { "uri":"dli_08_0298.html", + "node_id":"dli_08_0298.xml", "product_code":"dli", - "code":"221", + "code":"383", "des":"This section describes the Flink OpenSource SQL syntax supported by DLI. For details about the parameters and examples, see the syntax description.", "doc_type":"sqlreference", "kw":"Flink OpenSource SQL 1.10 Syntax,Flink Opensource SQL 1.10 Syntax Reference,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Flink OpenSource SQL 1.10 Syntax", "githuburl":"" }, { "uri":"dli_08_0299.html", + "node_id":"dli_08_0299.xml", "product_code":"dli", - "code":"222", + "code":"384", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Data Definition Language (DDL)", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Data Definition Language (DDL)", "githuburl":"" }, { "uri":"dli_08_0300.html", + "node_id":"dli_08_0300.xml", "product_code":"dli", - "code":"223", + "code":"385", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Creating a Source Table", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a Source Table", "githuburl":"" }, { "uri":"dli_08_0301.html", + "node_id":"dli_08_0301.xml", "product_code":"dli", - "code":"224", + "code":"386", "des":"Create a source stream to obtain data from Kafka as input data for jobs.Apache Kafka is a fast, scalable, and fault-tolerant distributed message publishing and subscripti", "doc_type":"sqlreference", "kw":"Kafka Source Table,Creating a Source Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Kafka Source Table", "githuburl":"" }, { "uri":"dli_08_0302.html", + "node_id":"dli_08_0302.xml", "product_code":"dli", - "code":"225", + "code":"387", "des":"Create a source stream to read data from DIS. DIS accesses user data and Flink job reads data from the DIS stream as input data for jobs. Flink jobs can quickly remove da", "doc_type":"sqlreference", "kw":"DIS Source Table,Creating a Source Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"DIS Source Table", "githuburl":"" }, { "uri":"dli_08_0303.html", + "node_id":"dli_08_0303.xml", "product_code":"dli", - "code":"226", + "code":"388", "des":"The JDBC connector is a Flink's built-in connector to read data from a database.An enhanced datasource connection with the database has been established, so that you can ", "doc_type":"sqlreference", "kw":"JDBC Source Table,Creating a Source Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"JDBC Source Table", "githuburl":"" }, { "uri":"dli_08_0304.html", + "node_id":"dli_08_0304.xml", "product_code":"dli", - "code":"227", + "code":"389", "des":"DLI reads data of Flink jobs from GaussDB(DWS). GaussDB(DWS) database kernel is compliant with PostgreSQL. The PostgreSQL database can store data of more complex types an", "doc_type":"sqlreference", "kw":"GaussDB(DWS) Source Table,Creating a Source Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"GaussDB(DWS) Source Table", "githuburl":"" }, { "uri":"dli_08_0305.html", + "node_id":"dli_08_0305.xml", "product_code":"dli", - "code":"228", + "code":"390", "des":"Create a source stream to obtain data from Redis as input for jobs.An enhanced datasource connection with Redis has been established, so that you can configure security g", "doc_type":"sqlreference", "kw":"Redis Source Table,Creating a Source Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Redis Source Table", "githuburl":"" }, { "uri":"dli_08_0306.html", + "node_id":"dli_08_0306.xml", "product_code":"dli", - "code":"229", + "code":"391", "des":"Create a source stream to obtain data from HBase as input for jobs. HBase is a column-oriented distributed cloud storage system that features enhanced reliability, excell", "doc_type":"sqlreference", "kw":"HBase Source Table,Creating a Source Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"HBase Source Table", "githuburl":"" }, { "uri":"dli_08_0358.html", + "node_id":"dli_08_0358.xml", "product_code":"dli", - "code":"230", + "code":"392", "des":"You can call APIs to obtain data from the cloud ecosystem or an open source ecosystem and use the obtained data as input of Flink jobs.The customized source class needs t", "doc_type":"sqlreference", "kw":"userDefined Source Table,Creating a Source Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"userDefined Source Table", "githuburl":"" }, { "uri":"dli_08_0307.html", + "node_id":"dli_08_0307.xml", "product_code":"dli", - "code":"231", + "code":"393", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Creating a Result Table", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a Result Table", "githuburl":"" }, { "uri":"dli_08_0344.html", + "node_id":"dli_08_0344.xml", "product_code":"dli", - "code":"232", + "code":"394", "des":"DLI exports Flink job data to ClickHouse result tables.ClickHouse is a column-based database oriented to online analysis and processing. It supports SQL query and provide", "doc_type":"sqlreference", "kw":"ClickHouse Result Table,Creating a Result Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"ClickHouse Result Table", "githuburl":"" }, { "uri":"dli_08_0308.html", + "node_id":"dli_08_0308.xml", "product_code":"dli", - "code":"233", + "code":"395", "des":"DLI exports the output data of the Flink job to Kafka.Apache Kafka is a fast, scalable, and fault-tolerant distributed message publishing and subscription system. It deli", "doc_type":"sqlreference", "kw":"Kafka Result Table,Creating a Result Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Kafka Result Table", "githuburl":"" }, { "uri":"dli_08_0309.html", + "node_id":"dli_08_0309.xml", "product_code":"dli", - "code":"234", + "code":"396", "des":"DLI exports the output data of the Flink job to Kafka in upsert mode.Apache Kafka is a fast, scalable, and fault-tolerant distributed message publishing and subscription ", "doc_type":"sqlreference", "kw":"Upsert Kafka Result Table,Creating a Result Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Upsert Kafka Result Table", "githuburl":"" }, { "uri":"dli_08_0310.html", + "node_id":"dli_08_0310.xml", "product_code":"dli", - "code":"235", + "code":"397", "des":"DLI writes the Flink job output data into DIS. The data is filtered and imported to the DIS stream for future processing.DIS addresses the challenge of transmitting data ", "doc_type":"sqlreference", "kw":"DIS Result Table,Creating a Result Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"DIS Result Table", "githuburl":"" }, { "uri":"dli_08_0311.html", + "node_id":"dli_08_0311.xml", "product_code":"dli", - "code":"236", + "code":"398", "des":"DLI exports the output data of the Flink job to RDS.An enhanced datasource connection with the database has been established, so that you can configure security group rul", "doc_type":"sqlreference", "kw":"JDBC Result Table,Creating a Result Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"JDBC Result Table", "githuburl":"" }, { "uri":"dli_08_0312.html", + "node_id":"dli_08_0312.xml", "product_code":"dli", - "code":"237", + "code":"399", "des":"DLI outputs the Flink job output data to GaussDB(DWS). GaussDB(DWS) database kernel is compliant with PostgreSQL. The PostgreSQL database can store data of more complex t", "doc_type":"sqlreference", "kw":"GaussDB(DWS) Result Table,Creating a Result Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"GaussDB(DWS) Result Table", "githuburl":"" }, { "uri":"dli_08_0313.html", + "node_id":"dli_08_0313.xml", "product_code":"dli", - "code":"238", + "code":"400", "des":"DLI exports the output data of the Flink job to Redis. Redis is a storage system that supports multiple types of data structures such as key-value. It can be used in scen", "doc_type":"sqlreference", "kw":"Redis Result Table,Creating a Result Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Redis Result Table", "githuburl":"" }, { "uri":"dli_08_0314.html", + "node_id":"dli_08_0314.xml", "product_code":"dli", - "code":"239", + "code":"401", "des":"DLI exports Flink job output data to SMN.SMN provides reliable and flexible large-scale message notification services to DLI. It significantly simplifies system coupling ", "doc_type":"sqlreference", "kw":"SMN Result Table,Creating a Result Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"SMN Result Table", "githuburl":"" }, { "uri":"dli_08_0315.html", + "node_id":"dli_08_0315.xml", "product_code":"dli", - "code":"240", + "code":"402", "des":"DLI outputs the job data to HBase. HBase is a column-oriented distributed cloud storage system that features enhanced reliability, excellent performance, and elastic scal", "doc_type":"sqlreference", "kw":"HBase Result Table,Creating a Result Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"HBase Result Table", "githuburl":"" }, { "uri":"dli_08_0316.html", + "node_id":"dli_08_0316.xml", "product_code":"dli", - "code":"241", + "code":"403", "des":"DLI exports Flink job output data to Elasticsearch of Cloud Search Service (CSS). Elasticsearch is a popular enterprise-class Lucene-powered search server and provides th", "doc_type":"sqlreference", "kw":"Elasticsearch Result Table,Creating a Result Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Elasticsearch Result Table", "githuburl":"" }, { "uri":"dli_08_0348.html", + "node_id":"dli_08_0348.xml", "product_code":"dli", - "code":"242", + "code":"404", "des":"OpenTSDB is a distributed, scalable time series database based on HBase. OpenTSDB is designed to collect monitoring information of a large-scale cluster and query data in", "doc_type":"sqlreference", "kw":"OpenTSDB Result Table,Creating a Result Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"OpenTSDB Result Table", "githuburl":"" }, { "uri":"dli_08_0347.html", + "node_id":"dli_08_0347.xml", "product_code":"dli", - "code":"243", + "code":"405", "des":"Write your Java code to insert the processed data into a specified database supported by your cloud service.Implement the custom sink class :The custom sink class is inhe", "doc_type":"sqlreference", "kw":"User-defined Result Table,Creating a Result Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"User-defined Result Table", "githuburl":"" }, { "uri":"dli_08_0345.html", + "node_id":"dli_08_0345.xml", "product_code":"dli", - "code":"244", + "code":"406", "des":"The print connector exports your data output to the error file or the out file of TaskManager. It is mainly used for code debugging and output viewing.Read data from Kafk", "doc_type":"sqlreference", "kw":"Print Result Table,Creating a Result Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Print Result Table", "githuburl":"" }, { "uri":"dli_08_0346.html", + "node_id":"dli_08_0346.xml", "product_code":"dli", - "code":"245", + "code":"407", "des":"You can create a file system result table to export data to a file system such as HDFS or OBS. After the data is generated, a non-DLI table can be created directly accord", "doc_type":"sqlreference", "kw":"File System Result Table,Creating a Result Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"File System Result Table", "githuburl":"" }, { "uri":"dli_08_0317.html", + "node_id":"dli_08_0317.xml", "product_code":"dli", - "code":"246", + "code":"408", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Creating a Dimension Table", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a Dimension Table", "githuburl":"" }, { "uri":"dli_08_0318.html", + "node_id":"dli_08_0318.xml", "product_code":"dli", - "code":"247", + "code":"409", "des":"Create a JDBC dimension table to connect to the source stream.You have created a JDBC instance for your account.The RDS table is used to connect to the source stream.CREA", "doc_type":"sqlreference", "kw":"JDBC Dimension Table,Creating a Dimension Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"JDBC Dimension Table", "githuburl":"" }, { "uri":"dli_08_0319.html", + "node_id":"dli_08_0319.xml", "product_code":"dli", - "code":"248", + "code":"410", "des":"Create a GaussDB(DWS) dimension table to connect to the input stream.You have created a GaussDB(DWS) instance for your account.Use an RDS table to connect to the source s", "doc_type":"sqlreference", "kw":"GaussDB(DWS) Dimension Table,Creating a Dimension Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"GaussDB(DWS) Dimension Table", "githuburl":"" }, { "uri":"dli_08_0320.html", + "node_id":"dli_08_0320.xml", "product_code":"dli", - "code":"249", + "code":"411", "des":"Create a Hbase dimension table to connect to the source stream.An enhanced datasource connection has been created for DLI to connect to HBase, so that jobs can run on the", "doc_type":"sqlreference", "kw":"HBase Dimension Table,Creating a Dimension Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"HBase Dimension Table", "githuburl":"" }, { "uri":"dli_08_0321.html", + "node_id":"dli_08_0321.xml", "product_code":"dli", - "code":"250", + "code":"412", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Data Manipulation Language (DML)", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Data Manipulation Language (DML)", "githuburl":"" }, { "uri":"dli_08_0322.html", + "node_id":"dli_08_0322.xml", "product_code":"dli", - "code":"251", + "code":"413", "des":"SyntaxDescriptionThis clause is used to select data from a table.ALL indicates that all results are returned.DISTINCT indicates that the duplicated results are removed.Pr", "doc_type":"sqlreference", "kw":"SELECT,Data Manipulation Language (DML),SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"SELECT", "githuburl":"" }, { "uri":"dli_08_0323.html", + "node_id":"dli_08_0323.xml", "product_code":"dli", - "code":"252", + "code":"414", "des":"SyntaxDescriptionUNION is used to return the union set of multiple query results.INTERSECT is used to return the intersection of multiple query results.EXCEPT is used to ", "doc_type":"sqlreference", "kw":"Set Operations,Data Manipulation Language (DML),SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Set Operations", "githuburl":"" }, { "uri":"dli_08_0324.html", + "node_id":"dli_08_0324.xml", "product_code":"dli", - "code":"253", + "code":"415", "des":"DescriptionGroup Window is defined in GROUP BY. One record is generated from each group. Group Window involves the following functions:Array functionsArray functionsGroup", "doc_type":"sqlreference", "kw":"Window,Data Manipulation Language (DML),SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Window", "githuburl":"" }, { "uri":"dli_08_0325.html", + "node_id":"dli_08_0325.xml", "product_code":"dli", - "code":"254", + "code":"416", "des":"SyntaxPrecautionsCurrently, only equi-joins are supported, for example, joins that have at least one conjunctive condition with an equality predicate. Arbitrary cross or ", "doc_type":"sqlreference", "kw":"JOIN,Data Manipulation Language (DML),SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"JOIN", "githuburl":"" }, { "uri":"dli_08_0326.html", + "node_id":"dli_08_0326.xml", "product_code":"dli", - "code":"255", + "code":"417", "des":"FunctionThis clause is used to sort data in ascending order on a time attribute.PrecautionsCurrently, only sorting by time attribute is supported.ExampleSort data in asce", "doc_type":"sqlreference", "kw":"OrderBy & Limit,Data Manipulation Language (DML),SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"OrderBy & Limit", "githuburl":"" }, { "uri":"dli_08_0327.html", + "node_id":"dli_08_0327.xml", "product_code":"dli", - "code":"256", + "code":"418", "des":"Top-N queries ask for the N smallest or largest values ordered by columns. Both smallest and largest values sets are considered Top-N queries. Top-N queries are useful in", "doc_type":"sqlreference", "kw":"Top-N,Data Manipulation Language (DML),SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Top-N", "githuburl":"" }, { "uri":"dli_08_0328.html", + "node_id":"dli_08_0328.xml", "product_code":"dli", - "code":"257", + "code":"419", "des":"Deduplication removes rows that duplicate over a set of columns, keeping only the first one or the last one.ROW_NUMBER(): Assigns a unique, sequential number to each row,", "doc_type":"sqlreference", "kw":"Deduplication,Data Manipulation Language (DML),SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Deduplication", "githuburl":"" }, { "uri":"dli_08_0329.html", + "node_id":"dli_08_0329.xml", "product_code":"dli", - "code":"258", + "code":"420", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Functions", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Functions", "githuburl":"" }, { "uri":"dli_08_0330.html", + "node_id":"dli_08_0330.xml", "product_code":"dli", - "code":"259", + "code":"421", "des":"DLI supports the following three types of user-defined functions (UDFs):Regular UDF: takes in one or more input parameters and returns a single result.User-defined table-", "doc_type":"sqlreference", "kw":"User-Defined Functions,Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"User-Defined Functions", "githuburl":"" }, { "uri":"dli_08_0331.html", + "node_id":"dli_08_0331.xml", "product_code":"dli", - "code":"260", + "code":"422", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Built-In Functions", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Built-In Functions", "githuburl":"" }, { "uri":"dli_08_0332.html", + "node_id":"dli_08_0332.xml", "product_code":"dli", - "code":"261", + "code":"423", "des":"All data types can be compared by using relational operators and the result is returned as a BOOLEAN value.Relationship operators are binary operators. Two compared data ", "doc_type":"sqlreference", "kw":"Mathematical Operation Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Mathematical Operation Functions", "githuburl":"" }, { "uri":"dli_08_0333.html", + "node_id":"dli_08_0333.xml", "product_code":"dli", - "code":"262", + "code":"424", "des":"SyntaxExampleTest input data.Test the data source kafka. The message content is as follows:\"{name:James,age:24,sex:male,grade:{math:95,science:[80,85],english:100}}\"\n\"{na", "doc_type":"sqlreference", "kw":"String Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"String Functions", "githuburl":"" }, { "uri":"dli_08_0334.html", + "node_id":"dli_08_0334.xml", "product_code":"dli", - "code":"263", + "code":"425", "des":"Table 1 lists the temporal functions supported by Flink OpenSource SQL.FunctionReturns a date parsed from string in form of yyyy-MM-dd.Returns a date parsed from string i", "doc_type":"sqlreference", "kw":"Temporal Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Temporal Functions", "githuburl":"" }, { "uri":"dli_08_0335.html", + "node_id":"dli_08_0335.xml", "product_code":"dli", - "code":"264", + "code":"426", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Conditional Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Conditional Functions", "githuburl":"" }, { "uri":"dli_08_0336.html", + "node_id":"dli_08_0336.xml", "product_code":"dli", - "code":"265", + "code":"427", "des":"This function is used to forcibly convert types.If the input is NULL, NULL is returned.The following example converts the amount value to an integer.Flink jobs do not sup", "doc_type":"sqlreference", "kw":"Type Conversion Function,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Type Conversion Function", "githuburl":"" }, { "uri":"dli_08_0337.html", + "node_id":"dli_08_0337.xml", "product_code":"dli", - "code":"266", + "code":"428", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Collection Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Collection Functions", "githuburl":"" }, { "uri":"dli_08_0338.html", + "node_id":"dli_08_0338.xml", "product_code":"dli", - "code":"267", + "code":"429", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Value Construction Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Value Construction Functions", "githuburl":"" }, { "uri":"dli_08_0339.html", + "node_id":"dli_08_0339.xml", "product_code":"dli", - "code":"268", + "code":"430", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Value Access Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Value Access Functions", "githuburl":"" }, { "uri":"dli_08_0340.html", + "node_id":"dli_08_0340.xml", "product_code":"dli", - "code":"269", + "code":"431", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Hash Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Hash Functions", "githuburl":"" }, { "uri":"dli_08_0341.html", + "node_id":"dli_08_0341.xml", "product_code":"dli", - "code":"270", + "code":"432", "des":"An aggregate function performs a calculation operation on a set of input values and returns a value. For example, the COUNT function counts the number of rows retrieved b", "doc_type":"sqlreference", "kw":"Aggregate Function,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Aggregate Function", "githuburl":"" }, { "uri":"dli_08_0342.html", + "node_id":"dli_08_0342.xml", "product_code":"dli", - "code":"271", + "code":"433", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Table-Valued Functions", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Table-Valued Functions", "githuburl":"" }, { "uri":"dli_08_0357.html", + "node_id":"dli_08_0357.xml", "product_code":"dli", - "code":"272", + "code":"434", "des":"The split_cursor function can convert one row of records into multiple rows or convert one column of records into multiple columns. Table-valued functions can only be use", "doc_type":"sqlreference", "kw":"split_cursor,Table-Valued Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"split_cursor", "githuburl":"" }, { "uri":"dli_08_0356.html", + "node_id":"dli_08_0356.xml", "product_code":"dli", - "code":"273", + "code":"435", "des":"The string_split function splits a target string into substrings based on the specified separator and returns a substring list.Prepare test input data.Source table disSou", "doc_type":"sqlreference", "kw":"string_split,Table-Valued Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"string_split", "githuburl":"" }, { "uri":"dli_08_0450.html", + "node_id":"dli_08_0450.xml", "product_code":"dli", - "code":"274", + "code":"436", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Historical Versions", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Historical Versions", "githuburl":"" }, { "uri":"dli_08_0233.html", + "node_id":"dli_08_0233.xml", "product_code":"dli", - "code":"275", + "code":"437", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Flink SQL Syntax", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Flink SQL Syntax", "githuburl":"" }, { "uri":"dli_08_0075.html", + "node_id":"dli_08_0075.xml", "product_code":"dli", - "code":"276", + "code":"438", "des":"Currently, Flink SQL only supports the following operations: SELECT, FROM, WHERE, UNION, aggregation, window, JOIN between stream and table data, and JOIN between streams", "doc_type":"sqlreference", "kw":"SQL Syntax Constraints and Definitions,Flink SQL Syntax,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"SQL Syntax Constraints and Definitions", "githuburl":"" }, { "uri":"dli_08_0275.html", + "node_id":"dli_08_0275.xml", "product_code":"dli", - "code":"277", + "code":"439", "des":"This section describes the Flink SQL syntax list provided by DLI. For details about the parameters and examples, see the syntax description.", "doc_type":"sqlreference", "kw":"SQL Syntax Overview of Stream Jobs,Flink SQL Syntax,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"SQL Syntax Overview of Stream Jobs", "githuburl":"" }, { "uri":"dli_08_0234.html", + "node_id":"dli_08_0234.xml", "product_code":"dli", - "code":"278", + "code":"440", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Creating a Source Stream", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a Source Stream", "githuburl":"" }, { "uri":"dli_08_0237.html", + "node_id":"dli_08_0237.xml", "product_code":"dli", - "code":"279", + "code":"441", "des":"Create a source stream to obtain data from HBase of CloudTable as input data of the job. HBase is a column-oriented distributed cloud storage system that features enhance", "doc_type":"sqlreference", "kw":"CloudTable HBase Source Stream,Creating a Source Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"CloudTable HBase Source Stream", "githuburl":"" }, { "uri":"dli_08_0235.html", + "node_id":"dli_08_0235.xml", "product_code":"dli", - "code":"280", + "code":"442", "des":"Create a source stream to read data from DIS. DIS accesses user data and Flink job reads data from the DIS stream as input data for jobs. Flink jobs can quickly remove da", "doc_type":"sqlreference", "kw":"DIS Source Stream,Creating a Source Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"DIS Source Stream", "githuburl":"" }, { "uri":"dli_08_0270.html", + "node_id":"dli_08_0270.xml", "product_code":"dli", - "code":"281", + "code":"443", "des":"DMS (Distributed Message Service) is a message middleware service based on distributed, high-availability clustering technology. It provides reliable, scalable, fully man", "doc_type":"sqlreference", "kw":"DMS Source Stream,Creating a Source Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"DMS Source Stream", "githuburl":"" }, { "uri":"dli_08_0238.html", + "node_id":"dli_08_0238.xml", "product_code":"dli", - "code":"282", + "code":"444", "des":"Create a source stream to obtain data from Kafka as input data for jobs.Apache Kafka is a fast, scalable, and fault-tolerant distributed message publishing and subscripti", "doc_type":"sqlreference", "kw":"MRS Kafka Source Stream,Creating a Source Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"MRS Kafka Source Stream", "githuburl":"" }, { "uri":"dli_08_0239.html", + "node_id":"dli_08_0239.xml", "product_code":"dli", - "code":"283", + "code":"445", "des":"Create a source stream to obtain data from Kafka as input data for jobs.Apache Kafka is a fast, scalable, and fault-tolerant distributed message publishing and subscripti", "doc_type":"sqlreference", "kw":"Open-Source Kafka Source Stream,Creating a Source Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Open-Source Kafka Source Stream", "githuburl":"" }, { "uri":"dli_08_0236.html", + "node_id":"dli_08_0236.xml", "product_code":"dli", - "code":"284", + "code":"446", "des":"Create a source stream to obtain data from OBS. DLI reads data stored by users in OBS as input data for jobs. OBS applies to various scenarios, such as big data analysis,", "doc_type":"sqlreference", "kw":"OBS Source Stream,Creating a Source Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"OBS Source Stream", "githuburl":"" }, { "uri":"dli_08_0240.html", + "node_id":"dli_08_0240.xml", "product_code":"dli", - "code":"285", + "code":"447", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Creating a Sink Stream", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a Sink Stream", "githuburl":"" }, { "uri":"dli_08_0243.html", + "node_id":"dli_08_0243.xml", "product_code":"dli", - "code":"286", + "code":"448", "des":"DLI exports the job output data to HBase of CloudTable. HBase is a column-oriented distributed cloud storage system that features enhanced reliability, excellent performa", "doc_type":"sqlreference", "kw":"CloudTable HBase Sink Stream,Creating a Sink Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"CloudTable HBase Sink Stream", "githuburl":"" }, { "uri":"dli_08_0244.html", + "node_id":"dli_08_0244.xml", "product_code":"dli", - "code":"287", + "code":"449", "des":"DLI exports the job output data to OpenTSDB of CloudTable. OpenTSDB is a distributed, scalable time series database based on HBase. It stores time series data. Time serie", "doc_type":"sqlreference", "kw":"CloudTable OpenTSDB Sink Stream,Creating a Sink Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"CloudTable OpenTSDB Sink Stream", "githuburl":"" }, { "uri":"dli_08_0286.html", + "node_id":"dli_08_0286.xml", "product_code":"dli", - "code":"288", + "code":"450", "des":"DLI exports the output data of the Flink job to OpenTSDB of MRS.OpenTSDB has been installed in the MRS cluster.In this scenario, jobs must run on the dedicated queue of D", "doc_type":"sqlreference", "kw":"MRS OpenTSDB Sink Stream,Creating a Sink Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"MRS OpenTSDB Sink Stream", "githuburl":"" }, { "uri":"dli_08_0252.html", + "node_id":"dli_08_0252.xml", "product_code":"dli", - "code":"289", + "code":"451", "des":"DLI exports Flink job output data to Elasticsearch of Cloud Search Service (CSS). Elasticsearch is a popular enterprise-class Lucene-powered search server and provides th", "doc_type":"sqlreference", "kw":"CSS Elasticsearch Sink Stream,Creating a Sink Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"CSS Elasticsearch Sink Stream", "githuburl":"" }, { "uri":"dli_08_0253.html", + "node_id":"dli_08_0253.xml", "product_code":"dli", - "code":"290", + "code":"452", "des":"DLI exports the Flink job output data to Redis of DCS. Redis is a storage system that supports multiple types of data structures such as key-value. It can be used in scen", "doc_type":"sqlreference", "kw":"DCS Sink Stream,Creating a Sink Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"DCS Sink Stream", "githuburl":"" }, { "uri":"dli_08_0249.html", + "node_id":"dli_08_0249.xml", "product_code":"dli", - "code":"291", + "code":"453", "des":"DLI outputs the job output data to Document Database Service (DDS).DDS is compatible with the MongoDB protocol and is secure, highly available, reliable, scalable, and ea", "doc_type":"sqlreference", "kw":"DDS Sink Stream,Creating a Sink Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"DDS Sink Stream", "githuburl":"" }, { "uri":"dli_08_0241.html", + "node_id":"dli_08_0241.xml", "product_code":"dli", - "code":"292", + "code":"454", "des":"DLI writes the Flink job output data into DIS. This cloud ecosystem is applicable to scenarios where data is filtered and imported to the DIS stream for future processing", "doc_type":"sqlreference", "kw":"DIS Sink Stream,Creating a Sink Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"DIS Sink Stream", "githuburl":"" }, { "uri":"dli_08_0271.html", + "node_id":"dli_08_0271.xml", "product_code":"dli", - "code":"293", + "code":"455", "des":"DMS (Distributed Message Service) is a message middleware service based on distributed, high-availability clustering technology. It provides reliable, scalable, fully man", "doc_type":"sqlreference", "kw":"DMS Sink Stream,Creating a Sink Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"DMS Sink Stream", "githuburl":"" }, { "uri":"dli_08_0247.html", + "node_id":"dli_08_0247.xml", "product_code":"dli", - "code":"294", + "code":"456", "des":"DLI outputs the Flink job output data to Data Warehouse Service (DWS). DWS database kernel is compliant with PostgreSQL. The PostgreSQL database can store data of more co", "doc_type":"sqlreference", "kw":"DWS Sink Stream (JDBC Mode),Creating a Sink Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"DWS Sink Stream (JDBC Mode)", "githuburl":"" }, { "uri":"dli_08_0248.html", + "node_id":"dli_08_0248.xml", "product_code":"dli", - "code":"295", + "code":"457", "des":"Create a sink stream to export Flink job data to DWS through OBS-based dumping, specifically, output Flink job data to OBS and then import data from OBS to DWS. For detai", "doc_type":"sqlreference", "kw":"DWS Sink Stream (OBS-based Dumping),Creating a Sink Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"DWS Sink Stream (OBS-based Dumping)", "githuburl":"" }, { "uri":"dli_08_0255.html", + "node_id":"dli_08_0255.xml", "product_code":"dli", - "code":"296", + "code":"458", "des":"DLI exports the output data of the Flink job to HBase of MRS.An MRS cluster has been created by using your account. DLI can interconnect with HBase clusters with Kerberos", "doc_type":"sqlreference", "kw":"MRS HBase Sink Stream,Creating a Sink Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"MRS HBase Sink Stream", "githuburl":"" }, { "uri":"dli_08_0254.html", + "node_id":"dli_08_0254.xml", "product_code":"dli", - "code":"297", + "code":"459", "des":"DLI exports the output data of the Flink job to Kafka.Apache Kafka is a fast, scalable, and fault-tolerant distributed message publishing and subscription system. It deli", "doc_type":"sqlreference", "kw":"MRS Kafka Sink Stream,Creating a Sink Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"MRS Kafka Sink Stream", "githuburl":"" }, { "uri":"dli_08_0257.html", + "node_id":"dli_08_0257.xml", "product_code":"dli", - "code":"298", + "code":"460", "des":"DLI exports the output data of the Flink job to Kafka.Apache Kafka is a fast, scalable, and fault-tolerant distributed message publishing and subscription system. It deli", "doc_type":"sqlreference", "kw":"Open-Source Kafka Sink Stream,Creating a Sink Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Open-Source Kafka Sink Stream", "githuburl":"" }, { "uri":"dli_08_0267.html", + "node_id":"dli_08_0267.xml", "product_code":"dli", - "code":"299", + "code":"461", "des":"You can create a sink stream to export data to a file system such as HDFS or OBS. After the data is generated, a non-DLI table can be created directly according to the ge", "doc_type":"sqlreference", "kw":"File System Sink Stream (Recommended),Creating a Sink Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"File System Sink Stream (Recommended)", "githuburl":"" }, { "uri":"dli_08_0242.html", + "node_id":"dli_08_0242.xml", "product_code":"dli", - "code":"300", + "code":"462", "des":"Create a sink stream to export DLI data to OBS. DLI can export the job analysis results to OBS. OBS applies to various scenarios, such as big data analysis, cloud-native ", "doc_type":"sqlreference", "kw":"OBS Sink Stream,Creating a Sink Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"OBS Sink Stream", "githuburl":"" }, { "uri":"dli_08_0245.html", + "node_id":"dli_08_0245.xml", "product_code":"dli", - "code":"301", + "code":"463", "des":"DLI outputs the Flink job output data to RDS. Currently, PostgreSQL and MySQL databases are supported. The PostgreSQL database can store data of more complex types and de", "doc_type":"sqlreference", "kw":"RDS Sink Stream,Creating a Sink Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"RDS Sink Stream", "githuburl":"" }, { "uri":"dli_08_0251.html", + "node_id":"dli_08_0251.xml", "product_code":"dli", - "code":"302", + "code":"464", "des":"DLI exports Flink job output data to SMN.SMN provides reliable and flexible large-scale message notification services to DLI. It significantly simplifies system coupling ", "doc_type":"sqlreference", "kw":"SMN Sink Stream,Creating a Sink Stream,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"SMN Sink Stream", "githuburl":"" }, { "uri":"dli_08_0258.html", + "node_id":"dli_08_0258.xml", "product_code":"dli", - "code":"303", + "code":"465", "des":"The temporary stream is used to simplify SQL logic. If complex SQL logic is followed, write SQL statements concatenated with temporary streams. The temporary stream is ju", "doc_type":"sqlreference", "kw":"Creating a Temporary Stream,Flink SQL Syntax,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a Temporary Stream", "githuburl":"" }, { "uri":"dli_08_0259.html", + "node_id":"dli_08_0259.xml", "product_code":"dli", - "code":"304", + "code":"466", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Creating a Dimension Table", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a Dimension Table", "githuburl":"" }, { "uri":"dli_08_0260.html", + "node_id":"dli_08_0260.xml", "product_code":"dli", - "code":"305", + "code":"467", "des":"Create a Redis table to connect to the source stream.For details about the JOIN syntax, see JOIN Between Stream Data and Table Data.Redis clusters are not supported.Ensur", "doc_type":"sqlreference", "kw":"Creating a Redis Table,Creating a Dimension Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating a Redis Table", "githuburl":"" }, { "uri":"dli_08_0261.html", + "node_id":"dli_08_0261.xml", "product_code":"dli", - "code":"306", + "code":"468", "des":"Create an RDS/DWS table to connect to the source stream.For details about the JOIN syntax, see JOIN.Ensure that you have created a PostgreSQL or MySQL RDS instance in RDS", "doc_type":"sqlreference", "kw":"Creating an RDS Table,Creating a Dimension Table,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Creating an RDS Table", "githuburl":"" }, { "uri":"dli_08_0272.html", + "node_id":"dli_08_0272.xml", "product_code":"dli", - "code":"307", + "code":"469", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Custom Stream Ecosystem", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Custom Stream Ecosystem", "githuburl":"" }, { "uri":"dli_08_0273.html", + "node_id":"dli_08_0273.xml", "product_code":"dli", - "code":"308", + "code":"470", "des":"Compile code to obtain data from the desired cloud ecosystem or open-source ecosystem as the input data of Flink jobs.The user-defined source class needs to inherit the R", "doc_type":"sqlreference", "kw":"Custom Source Stream,Custom Stream Ecosystem,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Custom Source Stream", "githuburl":"" }, { "uri":"dli_08_0274.html", + "node_id":"dli_08_0274.xml", "product_code":"dli", - "code":"309", + "code":"471", "des":"Compile code to write the data processed by DLI to a specified cloud ecosystem or open-source ecosystem.The user-defined sink class needs to inherit the RichSinkFunction ", "doc_type":"sqlreference", "kw":"Custom Sink Stream,Custom Stream Ecosystem,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Custom Sink Stream", "githuburl":"" }, { "uri":"dli_08_0207.html", + "node_id":"dli_08_0207.xml", "product_code":"dli", - "code":"310", + "code":"472", "des":"Data type is a basic attribute of data and used to distinguish different types of data. Different data types occupy different storage space and support different operatio", "doc_type":"sqlreference", "kw":"Data Type,Flink SQL Syntax,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Data Type", "githuburl":"" }, { "uri":"dli_08_0086.html", + "node_id":"dli_08_0086.xml", "product_code":"dli", - "code":"311", + "code":"473", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Built-In Functions", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Built-In Functions", "githuburl":"" }, { "uri":"dli_08_0191.html", + "node_id":"dli_08_0191.xml", "product_code":"dli", - "code":"312", + "code":"474", "des":"All data types can be compared by using relational operators and the result is returned as a BOOLEAN value.Relationship operators are binary operators. Two compared data ", "doc_type":"sqlreference", "kw":"Mathematical Operation Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Mathematical Operation Functions", "githuburl":"" }, { "uri":"dli_08_0096.html", + "node_id":"dli_08_0096.xml", "product_code":"dli", - "code":"313", - "des":"The common character string functions of DLI are as follows:FunctionConcatenates two character strings.Concatenates two character strings.SyntaxVARCHAR VARCHAR a || VARCH", + "code":"475", + "des":"The common string functions of DLI are as follows:FunctionConcatenates two strings.Concatenates two strings.SyntaxVARCHAR VARCHAR a || VARCHAR bParametersa: string.b: str", "doc_type":"sqlreference", "kw":"String Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"String Functions", "githuburl":"" }, { "uri":"dli_08_0097.html", + "node_id":"dli_08_0097.xml", "product_code":"dli", - "code":"314", + "code":"476", "des":"Table 1 lists the time functions supported by Flink SQL.None", "doc_type":"sqlreference", "kw":"Temporal Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Temporal Functions", "githuburl":"" }, { "uri":"dli_08_0112.html", + "node_id":"dli_08_0112.xml", "product_code":"dli", - "code":"315", + "code":"477", "des":"This function is used to forcibly convert types.If the input is NULL, NULL is returned.Flink jobs do not support the conversion of bigint to timestamp using CAST. You can", "doc_type":"sqlreference", "kw":"Type Conversion Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Type Conversion Functions", "githuburl":"" }, { "uri":"dli_08_0104.html", + "node_id":"dli_08_0104.xml", "product_code":"dli", - "code":"316", + "code":"478", "des":"An aggregate function performs a calculation operation on a set of input values and returns a value. For example, the COUNT function counts the number of rows retrieved b", "doc_type":"sqlreference", "kw":"Aggregate Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Aggregate Functions", "githuburl":"" }, { "uri":"dli_08_0206.html", + "node_id":"dli_08_0206.xml", "product_code":"dli", - "code":"317", + "code":"479", "des":"Table-valued functions can convert one row of records into multiple rows or convert one column of records into multiple columns. Table-valued functions can only be used i", "doc_type":"sqlreference", "kw":"Table-Valued Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Table-Valued Functions", "githuburl":"" }, { "uri":"dli_08_0101.html", + "node_id":"dli_08_0101.xml", "product_code":"dli", - "code":"318", + "code":"480", "des":"Example:The returned number of elements in the array is 3.HELLO WORLD is returned.", "doc_type":"sqlreference", "kw":"Other Functions,Built-In Functions,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Other Functions", "githuburl":"" }, { "uri":"dli_08_0099.html", + "node_id":"dli_08_0099.xml", "product_code":"dli", - "code":"319", + "code":"481", "des":"DLI supports the following three types of user-defined functions (UDFs):Regular UDF: takes in one or more input parameters and returns a single result.User-defined table-", "doc_type":"sqlreference", "kw":"User-Defined Functions,Flink SQL Syntax,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"User-Defined Functions", "githuburl":"" }, { "uri":"dli_08_0209.html", + "node_id":"dli_08_0209.xml", "product_code":"dli", - "code":"320", + "code":"482", "des":"Table 1 describes the basic geospatial geometric elements.You can build complex geospatial geometries based on basic geospatial geometric elements. Table 2 describes the ", "doc_type":"sqlreference", "kw":"Geographical Functions,Flink SQL Syntax,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Geographical Functions", "githuburl":"" }, { "uri":"dli_08_0102.html", + "node_id":"dli_08_0102.xml", "product_code":"dli", - "code":"321", + "code":"483", "des":"SyntaxDescriptionThe SELECT statement is used to select data from a table or insert constant data into a table.PrecautionsThe table to be queried must exist. Otherwise, a", "doc_type":"sqlreference", "kw":"SELECT,Flink SQL Syntax,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"SELECT", "githuburl":"" }, { "uri":"dli_08_0103.html", + "node_id":"dli_08_0103.xml", "product_code":"dli", - "code":"322", + "code":"484", "des":"SyntaxorDescriptionIf the value of value is value1, result1 is returned. If the value is not any of the values listed in the clause, resultZ is returned. If no else state", "doc_type":"sqlreference", "kw":"Condition Expression,Flink SQL Syntax,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Condition Expression", "githuburl":"" }, { "uri":"dli_08_0218.html", + "node_id":"dli_08_0218.xml", "product_code":"dli", - "code":"323", + "code":"485", "des":"DescriptionGroup Window is defined in GROUP BY. One record is generated from each group. Group Window involves the following functions:time_attr can be processing-time or", "doc_type":"sqlreference", "kw":"Window,Flink SQL Syntax,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Window", "githuburl":"" }, { "uri":"dli_08_0106.html", + "node_id":"dli_08_0106.xml", "product_code":"dli", - "code":"324", + "code":"486", "des":"The JOIN operation allows you to query data from a table and write the query result to the sink stream. Currently, only RDSs and DCS Redis tables are supported. The ON ke", "doc_type":"sqlreference", "kw":"JOIN Between Stream Data and Table Data,Flink SQL Syntax,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"JOIN Between Stream Data and Table Data", "githuburl":"" }, { "uri":"dli_08_0107.html", + "node_id":"dli_08_0107.xml", "product_code":"dli", - "code":"325", + "code":"487", "des":"Flink provides two time models: processing time and event time.DLI allows you to specify the time model during creation of the source stream and temporary stream.Processi", "doc_type":"sqlreference", "kw":"Configuring Time Models,Flink SQL Syntax,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Configuring Time Models", "githuburl":"" }, { "uri":"dli_08_0108.html", + "node_id":"dli_08_0108.xml", "product_code":"dli", - "code":"326", + "code":"488", "des":"Complex event processing (CEP) is used to detect complex patterns in endless data streams so as to identify and search patterns in various data rows. Pattern matching is ", "doc_type":"sqlreference", "kw":"Pattern Matching,Flink SQL Syntax,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Pattern Matching", "githuburl":"" }, { "uri":"dli_08_0109.html", + "node_id":"dli_08_0109.xml", "product_code":"dli", - "code":"327", + "code":"489", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"StreamingML", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"StreamingML", "githuburl":"" }, { "uri":"dli_08_0110.html", + "node_id":"dli_08_0110.xml", "product_code":"dli", - "code":"328", + "code":"490", "des":"Anomaly detection applies to various scenarios, including intrusion detection, financial fraud detection, sensor data monitoring, medical diagnosis, natural data detectio", "doc_type":"sqlreference", "kw":"Anomaly Detection,StreamingML,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Anomaly Detection", "githuburl":"" }, { "uri":"dli_08_0111.html", + "node_id":"dli_08_0111.xml", "product_code":"dli", - "code":"329", + "code":"491", "des":"Modeling and forecasting time series is a common task in many business verticals. Modeling is used to extract meaningful statistics and other characteristics of the data.", "doc_type":"sqlreference", "kw":"Time Series Forecasting,StreamingML,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Time Series Forecasting", "githuburl":"" }, { "uri":"dli_08_0216.html", + "node_id":"dli_08_0216.xml", "product_code":"dli", - "code":"330", + "code":"492", "des":"Clustering algorithms belong to unsupervised algorithms. K-Means, a clustering algorithm, partitions data points into related clusters by calculating the distance between", "doc_type":"sqlreference", "kw":"Real-Time Clustering,StreamingML,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Real-Time Clustering", "githuburl":"" }, { "uri":"dli_08_0088.html", + "node_id":"dli_08_0088.xml", "product_code":"dli", - "code":"331", + "code":"493", "des":"Deep learning has a wide range of applications in many industries, such as image classification, image recognition, and speech recognition. DLI provides several functions", "doc_type":"sqlreference", "kw":"Deep Learning Model Prediction,StreamingML,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Deep Learning Model Prediction", "githuburl":"" }, { "uri":"dli_08_0125.html", + "node_id":"dli_08_0125.xml", "product_code":"dli", - "code":"332", - "des":"Flink SQL reserves some strings as keywords. If you want to use the following character strings as field names, ensure that they are enclosed by back quotes, for example,", + "code":"494", + "des":"Flink SQL reserves some strings as keywords. If you want to use the following strings as field names, ensure that they are enclosed by back quotes, for example, `value` a", "doc_type":"sqlreference", "kw":"Reserved Keywords,Flink SQL Syntax,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Reserved Keywords", "githuburl":"" }, { "uri":"dli_08_0001.html", + "node_id":"dli_08_0001.xml", "product_code":"dli", - "code":"333", + "code":"495", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Identifiers", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Identifiers", "githuburl":"" }, { "uri":"dli_08_0002.html", + "node_id":"dli_08_0002.xml", "product_code":"dli", - "code":"334", + "code":"496", "des":"None.Aggregate function.", "doc_type":"sqlreference", "kw":"aggregate_func,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"aggregate_func", "githuburl":"" }, { "uri":"dli_08_0003.html", + "node_id":"dli_08_0003.xml", "product_code":"dli", - "code":"335", + "code":"497", "des":"None.Alias, which must be STRING type. It can be assigned to a field, table, view, or subquery.", "doc_type":"sqlreference", "kw":"alias,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"alias", "githuburl":"" }, { "uri":"dli_08_0004.html", + "node_id":"dli_08_0004.xml", "product_code":"dli", - "code":"336", + "code":"498", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"attr_expr,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"attr_expr", "githuburl":"" }, { "uri":"dli_08_0005.html", + "node_id":"dli_08_0005.xml", "product_code":"dli", - "code":"337", + "code":"499", "des":"None.List of attr_expr, which is separated by commas (,).", "doc_type":"sqlreference", "kw":"attr_expr_list,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"attr_expr_list", "githuburl":"" }, { "uri":"dli_08_0006.html", + "node_id":"dli_08_0006.xml", "product_code":"dli", - "code":"338", + "code":"500", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"attrs_value_set_expr,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"attrs_value_set_expr", "githuburl":"" }, { "uri":"dli_08_0007.html", + "node_id":"dli_08_0007.xml", "product_code":"dli", - "code":"339", + "code":"501", "des":"None.Return a boolean expression.", "doc_type":"sqlreference", "kw":"boolean_expression,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"boolean_expression", "githuburl":"" }, { "uri":"dli_08_0009.html", + "node_id":"dli_08_0009.xml", "product_code":"dli", - "code":"340", + "code":"502", "des":"None.Formal parameter for function call. It is usually a field name, which is the same as col_name.", "doc_type":"sqlreference", "kw":"col,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"col", "githuburl":"" }, { "uri":"dli_08_0010.html", + "node_id":"dli_08_0010.xml", "product_code":"dli", - "code":"341", + "code":"503", "des":"None.Column (field) description, which must be STRING type and cannot exceed 256 bytes.", "doc_type":"sqlreference", "kw":"col_comment,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"col_comment", "githuburl":"" }, { "uri":"dli_08_0011.html", + "node_id":"dli_08_0011.xml", "product_code":"dli", - "code":"342", + "code":"504", "des":"None.Column name, which must be STRING type and cannot exceed 128 bytes.", "doc_type":"sqlreference", "kw":"col_name,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"col_name", "githuburl":"" }, { "uri":"dli_08_0012.html", + "node_id":"dli_08_0012.xml", "product_code":"dli", - "code":"343", + "code":"505", "des":"None.Field list, which consists of one col_name or more. If there is more than one col_name, separate them by using a comma (,).", "doc_type":"sqlreference", "kw":"col_name_list,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"col_name_list", "githuburl":"" }, { "uri":"dli_08_0013.html", + "node_id":"dli_08_0013.xml", "product_code":"dli", - "code":"344", + "code":"506", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"condition,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"condition", "githuburl":"" }, { "uri":"dli_08_0014.html", + "node_id":"dli_08_0014.xml", "product_code":"dli", - "code":"345", + "code":"507", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"condition_list,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"condition_list", "githuburl":"" }, { "uri":"dli_08_0015.html", + "node_id":"dli_08_0015.xml", "product_code":"dli", - "code":"346", + "code":"508", "des":"None.Common expression name.", "doc_type":"sqlreference", "kw":"cte_name,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"cte_name", "githuburl":"" }, { "uri":"dli_08_0016.html", + "node_id":"dli_08_0016.xml", "product_code":"dli", - "code":"347", + "code":"509", "des":"None.Data type. Currently, only the primitive data types are supported.", "doc_type":"sqlreference", "kw":"data_type,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"data_type", "githuburl":"" }, { "uri":"dli_08_0017.html", + "node_id":"dli_08_0017.xml", "product_code":"dli", - "code":"348", + "code":"510", "des":"None.Database description, which must be STRING type and cannot exceed 256 characters.", "doc_type":"sqlreference", "kw":"db_comment,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"db_comment", "githuburl":"" }, { "uri":"dli_08_0018.html", + "node_id":"dli_08_0018.xml", "product_code":"dli", - "code":"349", + "code":"511", "des":"None.Database name, which must be STRING type and cannot exceed 128 bytes.", "doc_type":"sqlreference", "kw":"db_name,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"db_name", "githuburl":"" }, { "uri":"dli_08_0019.html", + "node_id":"dli_08_0019.xml", "product_code":"dli", - "code":"350", + "code":"512", "des":"None.Returned result for the ELSE clause of the CASE WHEN statement.", "doc_type":"sqlreference", "kw":"else_result_expression,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"else_result_expression", "githuburl":"" }, { "uri":"dli_08_0020.html", + "node_id":"dli_08_0020.xml", "product_code":"dli", - "code":"351", + "code":"513", "des":"| AVRO| CSV| JSON| ORC| PARQUETCurrently, the preceding formats are supported.Both USING and STORED AS can be used for specifying the data format. You can specify the pre", "doc_type":"sqlreference", "kw":"file_format,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"file_format", "githuburl":"" }, { "uri":"dli_08_0021.html", + "node_id":"dli_08_0021.xml", "product_code":"dli", - "code":"352", + "code":"514", "des":"None.File path, which is the OBS path", "doc_type":"sqlreference", "kw":"file_path,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"file_path", "githuburl":"" }, { "uri":"dli_08_0022.html", + "node_id":"dli_08_0022.xml", "product_code":"dli", - "code":"353", + "code":"515", "des":"None.Function name, which must be STRING type.", "doc_type":"sqlreference", "kw":"function_name,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"function_name", "githuburl":"" }, { "uri":"dli_08_0023.html", + "node_id":"dli_08_0023.xml", "product_code":"dli", - "code":"354", + "code":"516", "des":"None.Expression that includes GROUP BY.", "doc_type":"sqlreference", "kw":"groupby_expression,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"groupby_expression", "githuburl":"" }, { "uri":"dli_08_0024.html", + "node_id":"dli_08_0024.xml", "product_code":"dli", - "code":"355", + "code":"517", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"having_condition,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"having_condition", "githuburl":"" }, { "uri":"dli_08_0026.html", + "node_id":"dli_08_0026.xml", "product_code":"dli", - "code":"356", + "code":"518", "des":"None.Input expression of the CASE WHEN statement.", "doc_type":"sqlreference", "kw":"input_expression,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"input_expression", "githuburl":"" }, { "uri":"dli_08_0029.html", + "node_id":"dli_08_0029.xml", "product_code":"dli", - "code":"357", + "code":"519", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"join_condition,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"join_condition", "githuburl":"" }, { "uri":"dli_08_0030.html", + "node_id":"dli_08_0030.xml", "product_code":"dli", - "code":"358", + "code":"520", "des":"None.The condition of an inequality join.", "doc_type":"sqlreference", "kw":"non_equi_join_condition,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"non_equi_join_condition", "githuburl":"" }, { "uri":"dli_08_0031.html", + "node_id":"dli_08_0031.xml", "product_code":"dli", - "code":"359", + "code":"521", "des":"None.Maximum number of output lines specified by LIMIT. Which must be INT type.", "doc_type":"sqlreference", "kw":"number,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"number", "githuburl":"" }, { "uri":"dli_08_0034.html", + "node_id":"dli_08_0034.xml", "product_code":"dli", - "code":"360", + "code":"522", "des":"None.Partition column name, that is, partition field name, which must be STRING type.", "doc_type":"sqlreference", "kw":"partition_col_name,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"partition_col_name", "githuburl":"" }, { "uri":"dli_08_0035.html", + "node_id":"dli_08_0035.xml", "product_code":"dli", - "code":"361", + "code":"523", "des":"None.Partition column value, that is, partition field value.", "doc_type":"sqlreference", "kw":"partition_col_value,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"partition_col_value", "githuburl":"" }, { "uri":"dli_08_0036.html", + "node_id":"dli_08_0036.xml", "product_code":"dli", - "code":"362", + "code":"524", "des":"partition_specs : (partition_col_name = partition_col_value, partition_col_name = partition_col_value, ...);Table partition list, which is expressed by using key=value pa", "doc_type":"sqlreference", "kw":"partition_specs,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"partition_specs", "githuburl":"" }, { "uri":"dli_08_0037.html", + "node_id":"dli_08_0037.xml", "product_code":"dli", - "code":"363", + "code":"525", "des":"None.Property name, which must be STRING type.", "doc_type":"sqlreference", "kw":"property_name,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"property_name", "githuburl":"" }, { "uri":"dli_08_0038.html", + "node_id":"dli_08_0038.xml", "product_code":"dli", - "code":"364", + "code":"526", "des":"None.Property value, which must be STRING type.", "doc_type":"sqlreference", "kw":"property_value,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"property_value", "githuburl":"" }, { "uri":"dli_08_0039.html", + "node_id":"dli_08_0039.xml", "product_code":"dli", - "code":"365", + "code":"527", "des":"None.Pattern matching string, which supports wildcard matching.", "doc_type":"sqlreference", "kw":"regex_expression,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"regex_expression", "githuburl":"" }, { "uri":"dli_08_0040.html", + "node_id":"dli_08_0040.xml", "product_code":"dli", - "code":"366", + "code":"528", "des":"None.Returned result for the THEN clause of the CASE WHEN statement.", "doc_type":"sqlreference", "kw":"result_expression,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"result_expression", "githuburl":"" }, { "uri":"dli_08_0042.html", + "node_id":"dli_08_0042.xml", "product_code":"dli", - "code":"367", + "code":"529", "des":"None.Query clause for the basic SELECT statement.", "doc_type":"sqlreference", "kw":"select_statement,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"select_statement", "githuburl":"" }, { "uri":"dli_08_0043.html", + "node_id":"dli_08_0043.xml", "product_code":"dli", - "code":"368", + "code":"530", "des":"None.Separator, which can be customized by users, for example, comma (,), semicolon (;), and colon (:). Which must be CHAR type.", "doc_type":"sqlreference", "kw":"separator,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"separator", "githuburl":"" }, { "uri":"dli_08_0045.html", + "node_id":"dli_08_0045.xml", "product_code":"dli", - "code":"369", + "code":"531", "des":"None.SQL statement containing the common expression defined by cte_name.", "doc_type":"sqlreference", "kw":"sql_containing_cte_name,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"sql_containing_cte_name", "githuburl":"" }, { "uri":"dli_08_0046.html", + "node_id":"dli_08_0046.xml", "product_code":"dli", - "code":"370", + "code":"532", "des":"None.Subquery.", "doc_type":"sqlreference", "kw":"sub_query,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"sub_query", "githuburl":"" }, { "uri":"dli_08_0047.html", + "node_id":"dli_08_0047.xml", "product_code":"dli", - "code":"371", + "code":"533", "des":"None.Table description, which must be STRING type and cannot exceed 256 bytes.", "doc_type":"sqlreference", "kw":"table_comment,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"table_comment", "githuburl":"" }, { "uri":"dli_08_0048.html", + "node_id":"dli_08_0048.xml", "product_code":"dli", - "code":"372", + "code":"534", "des":"NoneTable name, which cannot exceed 128 bytes. The string type and \"$\" symbol are supported.", "doc_type":"sqlreference", "kw":"table_name,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"table_name", "githuburl":"" }, { "uri":"dli_08_0049.html", + "node_id":"dli_08_0049.xml", "product_code":"dli", - "code":"373", + "code":"535", "des":"None.Table property list, which is expressed by using key=value pairs. key represents property_name, and value represents property_value. If there is more than one key=va", "doc_type":"sqlreference", "kw":"table_properties,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"table_properties", "githuburl":"" }, { "uri":"dli_08_0050.html", + "node_id":"dli_08_0050.xml", "product_code":"dli", - "code":"374", + "code":"536", "des":"None.Table or view name, which must be STRING type. It can also be a subquery. If it is subquery, an alias must also be provided.", "doc_type":"sqlreference", "kw":"table_reference,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"table_reference", "githuburl":"" }, { "uri":"dli_08_0053.html", + "node_id":"dli_08_0053.xml", "product_code":"dli", - "code":"375", + "code":"537", "des":"None.When expression of the CASE WHEN statement. It is used for matching with the input expression.", "doc_type":"sqlreference", "kw":"when_expression,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"when_expression", "githuburl":"" }, { "uri":"dli_08_0054.html", + "node_id":"dli_08_0054.xml", "product_code":"dli", - "code":"376", + "code":"538", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"where_condition,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"where_condition", "githuburl":"" }, { "uri":"dli_08_0055.html", + "node_id":"dli_08_0055.xml", "product_code":"dli", - "code":"377", - "des":"None.Analysis window function. For details, see Window Functions.", + "code":"539", + "des":"NoneAnalysis window function.", "doc_type":"sqlreference", "kw":"window_function,Identifiers,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"window_function", "githuburl":"" }, { "uri":"dli_08_0060.html", + "node_id":"dli_08_0060.xml", "product_code":"dli", - "code":"378", + "code":"540", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Operators", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Operators", "githuburl":"" }, { "uri":"dli_08_0061.html", + "node_id":"dli_08_0061.xml", "product_code":"dli", - "code":"379", + "code":"541", "des":"All data types can be compared by using relational operators and the result is returned as a BOOLEAN value.Relationship operators are binary operators. Two compared data ", "doc_type":"sqlreference", "kw":"Relational Operators,Operators,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Relational Operators", "githuburl":"" }, { "uri":"dli_08_0062.html", + "node_id":"dli_08_0062.xml", "product_code":"dli", - "code":"380", + "code":"542", "des":"Arithmetic operators include binary operators and unary operators. For both types of operators, the returned results are numbers. Table 1 lists the arithmetic operators s", "doc_type":"sqlreference", "kw":"Arithmetic Operators,Operators,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Arithmetic Operators", "githuburl":"" }, { "uri":"dli_08_0063.html", + "node_id":"dli_08_0063.xml", "product_code":"dli", - "code":"381", + "code":"543", "des":"Common logical operators include AND, OR, and NOT. The operation result can be TRUE, FALSE, or NULL (which means unknown). The priorities of the operators are as follows:", "doc_type":"sqlreference", "kw":"Logical Operators,Operators,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference" + } + ], "title":"Logical Operators", "githuburl":"" }, { "uri":"dli_08_00005.html", + "node_id":"dli_08_00005.xml", "product_code":"dli", - "code":"382", + "code":"544", "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "doc_type":"sqlreference", "kw":"Change History,SQL Syntax Reference", + "search_title":"", + "metedata":[ + { + "prodname":"dli", + "documenttype":"sqlreference", + "IsBot":"Yes", + "opensource":"true" + } + ], "title":"Change History", "githuburl":"" } diff --git a/docs/dli/sqlreference/CLASS.TXT.json b/docs/dli/sqlreference/CLASS.TXT.json index 2925ad5d..4116898a 100644 --- a/docs/dli/sqlreference/CLASS.TXT.json +++ b/docs/dli/sqlreference/CLASS.TXT.json @@ -26,6 +26,15 @@ "p_code":"1", "code":"3" }, + { + "desc":"This section describes the open source Spark SQL syntax supported by DLI. For details about the syntax, parameters, and examples, see Spark SQL Syntax.", + "product_code":"dli", + "title":"Spark Open Source Commands", + "uri":"dli_08_0477.html", + "doc_type":"sqlreference", + "p_code":"1", + "code":"4" + }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "product_code":"dli", @@ -33,7 +42,7 @@ "uri":"dli_08_0070.html", "doc_type":"sqlreference", "p_code":"1", - "code":"4" + "code":"5" }, { "desc":"This statement is used to create a database.IF NOT EXISTS: Prevents system errors if the database to be created exists.COMMENT: Describes a database.DBPROPERTIES: Specifi", @@ -41,8 +50,8 @@ "title":"Creating a Database", "uri":"dli_08_0071.html", "doc_type":"sqlreference", - "p_code":"4", - "code":"5" + "p_code":"5", + "code":"6" }, { "desc":"This statement is used to delete a database.IF EXISTS: Prevents system errors if the database to be deleted does not exist.DATABASE and SCHEMA can be used interchangeably", @@ -50,8 +59,8 @@ "title":"Deleting a Database", "uri":"dli_08_0072.html", "doc_type":"sqlreference", - "p_code":"4", - "code":"6" + "p_code":"5", + "code":"7" }, { "desc":"This syntax is used to view the information about a specified database, including the database name and database description.EXTENDED: Displays the database properties.If", @@ -59,8 +68,8 @@ "title":"Viewing a Specified Database", "uri":"dli_08_0073.html", "doc_type":"sqlreference", - "p_code":"4", - "code":"7" + "p_code":"5", + "code":"8" }, { "desc":"This syntax is used to query all current databases.NoneKeyword DATABASES is equivalent to SCHEMAS. You can use either of them in this statement.View all the current datab", @@ -68,8 +77,8 @@ "title":"Viewing All Databases", "uri":"dli_08_0074.html", "doc_type":"sqlreference", - "p_code":"4", - "code":"8" + "p_code":"5", + "code":"9" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -78,7 +87,7 @@ "uri":"dli_08_0223.html", "doc_type":"sqlreference", "p_code":"1", - "code":"9" + "code":"10" }, { "desc":"Create an OBS table using the DataSource syntax.The main differences between the DataSource and the Hive syntax lie in the supported data formats and the number of suppor", @@ -86,8 +95,8 @@ "title":"Creating an OBS Table Using the DataSource Syntax", "uri":"dli_08_0076.html", "doc_type":"sqlreference", - "p_code":"9", - "code":"10" + "p_code":"10", + "code":"11" }, { "desc":"This statement is used to create an OBS table using the Hive syntax. The main differences between the DataSource and the Hive syntax lie in the supported data formats and", @@ -95,8 +104,8 @@ "title":"Creating an OBS Table Using the Hive Syntax", "uri":"dli_08_0077.html", "doc_type":"sqlreference", - "p_code":"9", - "code":"11" + "p_code":"10", + "code":"12" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -105,7 +114,7 @@ "uri":"dli_08_0224.html", "doc_type":"sqlreference", "p_code":"1", - "code":"12" + "code":"13" }, { "desc":"This DataSource syntax can be used to create a DLI table. The main differences between the DataSource and the Hive syntax lie in the supported data formats and the number", @@ -113,8 +122,8 @@ "title":"Creating a DLI Table Using the DataSource Syntax", "uri":"dli_08_0098.html", "doc_type":"sqlreference", - "p_code":"12", - "code":"13" + "p_code":"13", + "code":"14" }, { "desc":"This Hive syntax is used to create a DLI table. The main differences between the DataSource and the Hive syntax lie in the supported data formats and the number of suppor", @@ -122,8 +131,8 @@ "title":"Creating a DLI Table Using the Hive Syntax", "uri":"dli_08_0204.html", "doc_type":"sqlreference", - "p_code":"12", - "code":"14" + "p_code":"13", + "code":"15" }, { "desc":"This statement is used to delete tables.If the table is stored in OBS, only the metadata is deleted. The data stored on OBS is not deleted.If the table is stored in DLI, ", @@ -132,7 +141,7 @@ "uri":"dli_08_0087.html", "doc_type":"sqlreference", "p_code":"1", - "code":"15" + "code":"16" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -141,7 +150,7 @@ "uri":"dli_08_0089.html", "doc_type":"sqlreference", "p_code":"1", - "code":"16" + "code":"17" }, { "desc":"This statement is used to view all tables and views in the current database.FROM/IN: followed by the name of a database whose tables and views will be displayed.NoneCreat", @@ -149,8 +158,8 @@ "title":"Viewing All Tables", "uri":"dli_08_0090.html", "doc_type":"sqlreference", - "p_code":"16", - "code":"17" + "p_code":"17", + "code":"18" }, { "desc":"This statement is used to show the statements for creating a table.CREATE TABLE: statement for creating a tableThe table specified in this statement must exist. Otherwise", @@ -158,8 +167,8 @@ "title":"Viewing Table Creation Statements", "uri":"dli_08_0091.html", "doc_type":"sqlreference", - "p_code":"16", - "code":"18" + "p_code":"17", + "code":"19" }, { "desc":"Check the properties of a table.TBLPROPERTIES: This statement allows you to add a key/value property to a table.property_name is case sensitive. You cannot specify multip", @@ -167,8 +176,8 @@ "title":"Viewing Table Properties", "uri":"dli_08_0092.html", "doc_type":"sqlreference", - "p_code":"16", - "code":"19" + "p_code":"17", + "code":"20" }, { "desc":"This statement is used to query all columns in a specified table.COLUMNS: columns in the current tableFROM/IN: followed by the name of a database whose tables and views w", @@ -176,8 +185,8 @@ "title":"Viewing All Columns in a Specified Table", "uri":"dli_08_0093.html", "doc_type":"sqlreference", - "p_code":"16", - "code":"20" + "p_code":"17", + "code":"21" }, { "desc":"This statement is used to view all partitions in a specified table.PARTITIONS: partitions in a specified tablePARTITION: a specified partitionThe table specified in this ", @@ -185,8 +194,8 @@ "title":"Viewing All Partitions in a Specified Table", "uri":"dli_08_0094.html", "doc_type":"sqlreference", - "p_code":"16", - "code":"21" + "p_code":"17", + "code":"22" }, { "desc":"This statement is used to view the table statistics. The names and data types of all columns in a specified table will be returned.EXTENDED: displays all metadata of the ", @@ -194,8 +203,8 @@ "title":"Viewing Table Statistics", "uri":"dli_08_0105.html", "doc_type":"sqlreference", - "p_code":"16", - "code":"22" + "p_code":"17", + "code":"23" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -204,7 +213,7 @@ "uri":"dli_08_0262.html", "doc_type":"sqlreference", "p_code":"1", - "code":"23" + "code":"24" }, { "desc":"This statement is used to add one or more new columns to a table.ADD COLUMNS: columns to addCOMMENT: column descriptionDo not run this SQL statement concurrently. Otherwi", @@ -212,8 +221,17 @@ "title":"Adding a Column", "uri":"dli_08_0263.html", "doc_type":"sqlreference", - "p_code":"23", - "code":"24" + "p_code":"24", + "code":"25" + }, + { + "desc":"You can modify the column comments of non-partitioned or partitioned tables.CHANGE COLUMN: Modify a column.COMMENT: column descriptionChange the comment of the c1 column ", + "product_code":"dli", + "title":"Modifying Column Comments", + "uri":"dli_08_0470.html", + "doc_type":"sqlreference", + "p_code":"24", + "code":"26" }, { "desc":"DLI controls multiple versions of backup data for restoration. After the multiversion function is enabled, the system automatically backs up table data when you delete or", @@ -221,8 +239,8 @@ "title":"Enabling or Disabling Multiversion Backup", "uri":"dli_08_0354.html", "doc_type":"sqlreference", - "p_code":"23", - "code":"25" + "p_code":"24", + "code":"27" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -231,7 +249,7 @@ "uri":"dli_08_0080.html", "doc_type":"sqlreference", "p_code":"1", - "code":"26" + "code":"28" }, { "desc":"After an OBS partitioned table is created, no partition information is generated for the table. Partition information is generated only after you:Insert data to the OBS p", @@ -239,8 +257,8 @@ "title":"Adding Partition Data (Only OBS Tables Supported)", "uri":"dli_08_0081.html", "doc_type":"sqlreference", - "p_code":"26", - "code":"27" + "p_code":"28", + "code":"29" }, { "desc":"This statement is used to rename partitions.PARTITION: a specified partitionRENAME: new name of the partitionThis statement is used for OBS table operations.The table and", @@ -248,26 +266,26 @@ "title":"Renaming a Partition (Only OBS Tables Supported)", "uri":"dli_08_0082.html", "doc_type":"sqlreference", - "p_code":"26", - "code":"28" + "p_code":"28", + "code":"30" }, { - "desc":"Deletes one or more partitions from a partitioned table.The table in which partitions are to be deleted must exist. Otherwise, an error is reported.The to-be-deleted part", + "desc":"This statement is used to delete one or more partitions from a partitioned table.Partitioned tables are classified into OBS tables and DLI tables. You can delete one or m", "product_code":"dli", "title":"Deleting a Partition", "uri":"dli_08_0083.html", "doc_type":"sqlreference", - "p_code":"26", - "code":"29" + "p_code":"28", + "code":"31" }, { - "desc":"This statement is used to delete one or more partitions based on specified conditions.This statement is used for OBS table operations only.The table in which partitions a", + "desc":"This statement is used to delete one or more partitions based on specified conditions.This statement is only used for OBS tables.The table in which partitions are to be d", "product_code":"dli", - "title":"Deleting Partitions by Specifying Filter Criteria (Only OBS Tables Supported)", + "title":"Deleting Partitions by Specifying Filter Criteria (Only Supported on OBS Tables)", "uri":"dli_08_0343.html", "doc_type":"sqlreference", - "p_code":"26", - "code":"30" + "p_code":"28", + "code":"32" }, { "desc":"This statement is used to modify the positions of table partitions.PARTITION: a specified partitionLOCATION: path of the partitionFor a table partition whose position is ", @@ -275,8 +293,8 @@ "title":"Altering the Partition Location of a Table (Only OBS Tables Supported)", "uri":"dli_08_0084.html", "doc_type":"sqlreference", - "p_code":"26", - "code":"31" + "p_code":"28", + "code":"33" }, { "desc":"This statement is used to update the partition information about a table in the Metastore.OrPARTITIONS: partition informationSERDEPROPERTIES: Serde attributeThis statemen", @@ -284,8 +302,8 @@ "title":"Updating Partitioned Table Data (Only OBS Tables Supported)", "uri":"dli_08_0079.html", "doc_type":"sqlreference", - "p_code":"26", - "code":"32" + "p_code":"28", + "code":"34" }, { "desc":"Spark caches Parquet metadata to improve performance. If you update a Parquet table, the cached metadata is not updated. Spark SQL cannot find the newly inserted data and", @@ -293,8 +311,8 @@ "title":"Updating Table Metadata with REFRESH TABLE", "uri":"dli_08_0359.html", "doc_type":"sqlreference", - "p_code":"26", - "code":"33" + "p_code":"28", + "code":"35" }, { "desc":"The LOAD DATA function can be used to import data in CSV, Parquet, ORC, JSON, and Avro formats. The data is converted into the Parquet data format for storage.INPATH: pat", @@ -303,7 +321,7 @@ "uri":"dli_08_0100.html", "doc_type":"sqlreference", "p_code":"1", - "code":"34" + "code":"36" }, { "desc":"This statement is used to insert the SELECT query result or a certain data record into a table.Insert the SELECT query result into a table.INSERT INTO [TABLE] [db_name.]t", @@ -312,7 +330,7 @@ "uri":"dli_08_0095.html", "doc_type":"sqlreference", "p_code":"1", - "code":"35" + "code":"37" }, { "desc":"This statement is used to delete data from the DLI or OBS table.Only data in the DLI or OBS table can be deleted.", @@ -321,7 +339,7 @@ "uri":"dli_08_0217.html", "doc_type":"sqlreference", "p_code":"1", - "code":"36" + "code":"38" }, { "desc":"This statement is used to directly write query results to a specified directory. The query results can be stored in CSV, Parquet, ORC, JSON, or Avro format.USING: Specifi", @@ -330,7 +348,7 @@ "uri":"dli_08_0205.html", "doc_type":"sqlreference", "p_code":"1", - "code":"37" + "code":"39" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -339,7 +357,7 @@ "uri":"dli_08_0349.html", "doc_type":"sqlreference", "p_code":"1", - "code":"38" + "code":"40" }, { "desc":"After multiversion is enabled, backup data is retained for seven days by default. You can change the retention period by setting system parameterdli.multi.version.retenti", @@ -347,8 +365,8 @@ "title":"Setting the Retention Period for Multiversion Backup Data", "uri":"dli_08_0350.html", "doc_type":"sqlreference", - "p_code":"38", - "code":"39" + "p_code":"40", + "code":"41" }, { "desc":"After the multiversion function is enabled, you can run the SHOW HISTORY command to view the backup data of a table. For details about the syntax for enabling or disablin", @@ -356,8 +374,8 @@ "title":"Viewing Multiversion Backup Data", "uri":"dli_08_0351.html", "doc_type":"sqlreference", - "p_code":"38", - "code":"40" + "p_code":"40", + "code":"42" }, { "desc":"After the multiversion function is enabled, you can run the RESTORE TABLE statement to restore a table or partition of a specified version. For details about the syntax f", @@ -365,8 +383,8 @@ "title":"Restoring Multiversion Backup Data", "uri":"dli_08_0352.html", "doc_type":"sqlreference", - "p_code":"38", - "code":"41" + "p_code":"40", + "code":"43" }, { "desc":"After the multiversion function is enabled, expired backup data will be directly deleted by the system when theinsert overwrite or truncate statement is executed. You can", @@ -374,8 +392,8 @@ "title":"Configuring the Trash Bin for Expired Multiversion Data", "uri":"dli_08_0353.html", "doc_type":"sqlreference", - "p_code":"38", - "code":"42" + "p_code":"40", + "code":"44" }, { "desc":"The retention period of multiversion backup data takes effect each time the insert overwrite or truncate statement is executed. If neither statement is executed for the t", @@ -383,8 +401,44 @@ "title":"Deleting Multiversion Backup Data", "uri":"dli_08_0355.html", "doc_type":"sqlreference", - "p_code":"38", - "code":"43" + "p_code":"40", + "code":"45" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Table Lifecycle Management", + "uri":"en-us_topic_0000001571023676.html", + "doc_type":"sqlreference", + "p_code":"1", + "code":"46" + }, + { + "desc":"DLI provides table lifecycle management to allow you to specify the lifecycle of a table when creating the table. DLI determines whether to reclaim a table based on the t", + "product_code":"dli", + "title":"Specifying the Lifecycle of a Table When Creating the Table", + "uri":"en-us_topic_0000001621263317.html", + "doc_type":"sqlreference", + "p_code":"46", + "code":"47" + }, + { + "desc":"This section describes how to modify the lifecycle of an existing partitioned or non-partitioned table.When the lifecycle function is enabled for the first time, the syst", + "product_code":"dli", + "title":"Modifying the Lifecycle of a Table", + "uri":"en-us_topic_0000001621382957.html", + "doc_type":"sqlreference", + "p_code":"46", + "code":"48" + }, + { + "desc":"This section describes how to disable or restore the lifecycle of a specified table or partition.You can disable or restore the lifecycle of a table in either of the foll", + "product_code":"dli", + "title":"Disabling or Restoring the Lifecycle of a Table", + "uri":"en-us_topic_0000001621542965.html", + "doc_type":"sqlreference", + "p_code":"46", + "code":"49" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -393,16 +447,16 @@ "uri":"dli_08_0118.html", "doc_type":"sqlreference", "p_code":"1", - "code":"44" + "code":"50" }, { - "desc":"This statement is used to create a DLI table and associate it with an existing HBase table.Before creating a DLI table and associating it with HBase, you need to create a", + "desc":"This statement is used to create a DLI table and associate it with an existing HBase table.In Spark cross-source development scenarios, there is a risk of password leakag", "product_code":"dli", "title":"Creating a DLI Table and Associating It with HBase", "uri":"dli_08_0119.html", "doc_type":"sqlreference", - "p_code":"44", - "code":"45" + "p_code":"50", + "code":"51" }, { "desc":"This statement is used to insert data in a DLI table to the associated HBase table.Insert the SELECT query result into a table.INSERT INTO DLI_TABLE\n SELECT field1,field", @@ -410,8 +464,8 @@ "title":"Inserting Data to an HBase Table", "uri":"dli_08_0120.html", "doc_type":"sqlreference", - "p_code":"44", - "code":"46" + "p_code":"50", + "code":"52" }, { "desc":"This statement is used to query data in an HBase table.LIMIT is used to limit the query results. Only INT type is supported by the number parameter.The table to be querie", @@ -419,8 +473,8 @@ "title":"Querying an HBase Table", "uri":"dli_08_0121.html", "doc_type":"sqlreference", - "p_code":"44", - "code":"47" + "p_code":"50", + "code":"53" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -429,7 +483,7 @@ "uri":"dli_08_0220.html", "doc_type":"sqlreference", "p_code":"1", - "code":"48" + "code":"54" }, { "desc":"Run the CREATE TABLE statement to create the DLI table and associate it with the existing metric in OpenTSDB. This syntax supports the OpenTSDB of CloudTable and MRS.Befo", @@ -437,8 +491,8 @@ "title":"Creating a DLI Table and Associating It with OpenTSDB", "uri":"dli_08_0122.html", "doc_type":"sqlreference", - "p_code":"48", - "code":"49" + "p_code":"54", + "code":"55" }, { "desc":"Run the INSERT INTO statement to insert the data in the DLI table to the associated OpenTSDB metric.If no metric exists on the OpenTSDB, a new metric is automatically cre", @@ -446,8 +500,8 @@ "title":"Inserting Data to the OpenTSDB Table", "uri":"dli_08_0123.html", "doc_type":"sqlreference", - "p_code":"48", - "code":"50" + "p_code":"54", + "code":"56" }, { "desc":"This SELECT command is used to query data in an OpenTSDB table.If no metric exists in OpenTSDB, an error will be reported when the corresponding DLI table is queried.If t", @@ -455,8 +509,8 @@ "title":"Querying an OpenTSDB Table", "uri":"dli_08_0124.html", "doc_type":"sqlreference", - "p_code":"48", - "code":"51" + "p_code":"54", + "code":"57" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -465,16 +519,16 @@ "uri":"dli_08_0192.html", "doc_type":"sqlreference", "p_code":"1", - "code":"52" + "code":"58" }, { - "desc":"This statement is used to create a DLI table and associate it with an existing DWS table.Before creating a DLI table and associating it with DWS, you need to create a dat", + "desc":"This statement is used to create a DLI table and associate it with an existing DWS table.In Spark cross-source development scenarios, there is a risk of password leakage ", "product_code":"dli", "title":"Creating a DLI Table and Associating It with DWS", "uri":"dli_08_0193.html", "doc_type":"sqlreference", - "p_code":"52", - "code":"53" + "p_code":"58", + "code":"59" }, { "desc":"This statement is used to insert data in a DLI table to the associated DWS table.Insert the SELECT query result into a table.INSERT INTO DLI_TABLE\n SELECT field1,field2.", @@ -482,8 +536,8 @@ "title":"Inserting Data to the DWS Table", "uri":"dli_08_0194.html", "doc_type":"sqlreference", - "p_code":"52", - "code":"54" + "p_code":"58", + "code":"60" }, { "desc":"This statement is used to query data in a DWS table.LIMIT is used to limit the query results. Only INT type is supported by the number parameter.The table to be queried m", @@ -491,8 +545,8 @@ "title":"Querying the DWS Table", "uri":"dli_08_0195.html", "doc_type":"sqlreference", - "p_code":"52", - "code":"55" + "p_code":"58", + "code":"61" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -501,16 +555,16 @@ "uri":"dli_08_0196.html", "doc_type":"sqlreference", "p_code":"1", - "code":"56" + "code":"62" }, { - "desc":"This statement is used to create a DLI table and associate it with an existing RDS table. This function supports access to the MySQL and PostgreSQL clusters of RDS.Before", + "desc":"This statement is used to create a DLI table and associate it with an existing RDS table. This function supports access to the MySQL and PostgreSQL clusters of RDS.In Spa", "product_code":"dli", "title":"Creating a DLI Table and Associating It with RDS", "uri":"dli_08_0197.html", "doc_type":"sqlreference", - "p_code":"56", - "code":"57" + "p_code":"62", + "code":"63" }, { "desc":"This statement is used to insert data in a DLI table to the associated RDS table.Insert the SELECT query result into a table.INSERT INTO DLI_TABLE\n SELECT field1,field2.", @@ -518,8 +572,8 @@ "title":"Inserting Data to the RDS Table", "uri":"dli_08_0198.html", "doc_type":"sqlreference", - "p_code":"56", - "code":"58" + "p_code":"62", + "code":"64" }, { "desc":"This statement is used to query data in an RDS table.LIMIT is used to limit the query results. Only INT type is supported by the number parameter.The table to be queried ", @@ -527,8 +581,8 @@ "title":"Querying the RDS Table", "uri":"dli_08_0199.html", "doc_type":"sqlreference", - "p_code":"56", - "code":"59" + "p_code":"62", + "code":"65" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -537,16 +591,16 @@ "uri":"dli_08_0200.html", "doc_type":"sqlreference", "p_code":"1", - "code":"60" + "code":"66" }, { - "desc":"This statement is used to create a DLI table and associate it with an existing CSS table.Before creating a DLI table and associating it with CSS, you need to create a dat", + "desc":"This statement is used to create a DLI table and associate it with an existing CSS table.In Spark cross-source development scenarios, there is a risk of password leakage ", "product_code":"dli", "title":"Creating a DLI Table and Associating It with CSS", "uri":"dli_08_0201.html", "doc_type":"sqlreference", - "p_code":"60", - "code":"61" + "p_code":"66", + "code":"67" }, { "desc":"This statement is used to insert data in a DLI table to the associated CSS table.Insert the SELECT query result into a table.INSERT INTO DLI_TABLE\n SELECT field1,field2.", @@ -554,8 +608,8 @@ "title":"Inserting Data to the CSS Table", "uri":"dli_08_0202.html", "doc_type":"sqlreference", - "p_code":"60", - "code":"62" + "p_code":"66", + "code":"68" }, { "desc":"This statement is used to query data in a CSS table.LIMIT is used to limit the query results. Only INT type is supported by the number parameter.The table to be queried m", @@ -563,8 +617,8 @@ "title":"Querying the CSS Table", "uri":"dli_08_0203.html", "doc_type":"sqlreference", - "p_code":"60", - "code":"63" + "p_code":"66", + "code":"69" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -573,16 +627,16 @@ "uri":"dli_08_0225.html", "doc_type":"sqlreference", "p_code":"1", - "code":"64" + "code":"70" }, { - "desc":"This statement is used to create a DLI table and associate it with an existing DCS key.Before creating a DLI table and associating it with DCS, you need to create a datas", + "desc":"This statement is used to create a DLI table and associate it with an existing DCS key.In Spark cross-source development scenarios, there is a risk of password leakage if", "product_code":"dli", "title":"Creating a DLI Table and Associating It with DCS", "uri":"dli_08_0226.html", "doc_type":"sqlreference", - "p_code":"64", - "code":"65" + "p_code":"70", + "code":"71" }, { "desc":"This statement is used to insert data in a DLI table to the DCS key.Insert the SELECT query result into a table.INSERT INTO DLI_TABLE\n SELECT field1,field2...\n [FROM DL", @@ -590,8 +644,8 @@ "title":"Inserting Data to a DCS Table", "uri":"dli_08_0227.html", "doc_type":"sqlreference", - "p_code":"64", - "code":"66" + "p_code":"70", + "code":"72" }, { "desc":"This statement is used to query data in a DCS table.LIMIT is used to limit the query results. Only INT type is supported by the number parameter.Query data in the test_re", @@ -599,8 +653,8 @@ "title":"Querying the DCS Table", "uri":"dli_08_0228.html", "doc_type":"sqlreference", - "p_code":"64", - "code":"67" + "p_code":"70", + "code":"73" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -609,16 +663,16 @@ "uri":"dli_08_0229.html", "doc_type":"sqlreference", "p_code":"1", - "code":"68" + "code":"74" }, { - "desc":"This statement is used to create a DLI table and associate it with an existing DDS collection.Before creating a DLI table and associating it with DDS, you need to create ", + "desc":"This statement is used to create a DLI table and associate it with an existing DDS collection.In Spark cross-source development scenarios, there is a risk of password lea", "product_code":"dli", "title":"Creating a DLI Table and Associating It with DDS", "uri":"dli_08_0230.html", "doc_type":"sqlreference", - "p_code":"68", - "code":"69" + "p_code":"74", + "code":"75" }, { "desc":"This statement is used to insert data in a DLI table to the associated DDS table.Insert the SELECT query result into a table.INSERT INTO DLI_TABLE\n SELECT field1,field2.", @@ -626,8 +680,8 @@ "title":"Inserting Data to the DDS Table", "uri":"dli_08_0231.html", "doc_type":"sqlreference", - "p_code":"68", - "code":"70" + "p_code":"74", + "code":"76" }, { "desc":"This statement is used to query data in a DDS table.LIMIT is used to limit the query results. Only INT type is supported by the number parameter.If schema information is ", @@ -635,8 +689,44 @@ "title":"Querying the DDS Table", "uri":"dli_08_0232.html", "doc_type":"sqlreference", - "p_code":"68", - "code":"71" + "p_code":"74", + "code":"77" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Creating a Datasource Connection with an Oracle Table", + "uri":"dli_08_0460.html", + "doc_type":"sqlreference", + "p_code":"1", + "code":"78" + }, + { + "desc":"This statement is used to create a DLI table and associate it with an existing Oracle table.Before creating a DLI table and associating it with Oracle, you need to create", + "product_code":"dli", + "title":"Creating a DLI Table and Associating It with Oracle", + "uri":"dli_08_0461.html", + "doc_type":"sqlreference", + "p_code":"78", + "code":"79" + }, + { + "desc":"This statement is used to insert data into an associated Oracle table.Insert the SELECT query result into a table.INSERT INTO DLI_TABLE\n SELECT field1,field2...\n [FROM ", + "product_code":"dli", + "title":"Inserting Data to an Oracle Table", + "uri":"dli_08_0462.html", + "doc_type":"sqlreference", + "p_code":"78", + "code":"80" + }, + { + "desc":"This statement is used to query data in an Oracle table.LIMIT is used to limit the query results. Only INT type is supported by the number parameter.If schema information", + "product_code":"dli", + "title":"Querying an Oracle Table", + "uri":"dli_08_0463.html", + "doc_type":"sqlreference", + "p_code":"78", + "code":"81" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -645,7 +735,7 @@ "uri":"dli_08_0129.html", "doc_type":"sqlreference", "p_code":"1", - "code":"72" + "code":"82" }, { "desc":"This statement is used to create views.CREATE VIEW: creates views based on the given select statement. The result of the select statement will not be written into the dis", @@ -653,8 +743,8 @@ "title":"Creating a View", "uri":"dli_08_0130.html", "doc_type":"sqlreference", - "p_code":"72", - "code":"73" + "p_code":"82", + "code":"83" }, { "desc":"This statement is used to delete views.DROP: Deletes the metadata of a specified view. Although views and tables have many common points, the DROP TABLE statement cannot ", @@ -662,8 +752,8 @@ "title":"Deleting a View", "uri":"dli_08_0131.html", "doc_type":"sqlreference", - "p_code":"72", - "code":"74" + "p_code":"82", + "code":"84" }, { "desc":"This statement returns the logical plan and physical execution plan for the SQL statement.EXTENDED: After this keyword is specified, the logical and physical plans are ou", @@ -672,7 +762,7 @@ "uri":"dli_08_0138.html", "doc_type":"sqlreference", "p_code":"1", - "code":"75" + "code":"85" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -681,7 +771,7 @@ "uri":"dli_08_0139.html", "doc_type":"sqlreference", "p_code":"1", - "code":"76" + "code":"86" }, { "desc":"Table 1 describes the SQL statement permission matrix in DLI in terms of permissions on databases, tables, and roles.For privilege granting or revocation on databases and", @@ -689,8 +779,8 @@ "title":"Data Permissions List", "uri":"dli_08_0140.html", "doc_type":"sqlreference", - "p_code":"76", - "code":"77" + "p_code":"86", + "code":"87" }, { "desc":"This statement is used to create a role in the current database or a specified database.Only users with the CREATE_ROLE permission on the database can create roles. For e", @@ -698,8 +788,8 @@ "title":"Creating a Role", "uri":"dli_08_0141.html", "doc_type":"sqlreference", - "p_code":"76", - "code":"78" + "p_code":"86", + "code":"88" }, { "desc":"This statement is used to delete a role in the current database or a specified database.NoneThe role_name to be deleted must exist in the current database or the specifie", @@ -707,8 +797,8 @@ "title":"Deleting a Role", "uri":"dli_08_0148.html", "doc_type":"sqlreference", - "p_code":"76", - "code":"79" + "p_code":"86", + "code":"89" }, { "desc":"This statement is used to bind a user with a role.NoneThe role_name and username must exist. Otherwise, an error will be reported.", @@ -716,8 +806,8 @@ "title":"Binding a Role", "uri":"dli_08_0142.html", "doc_type":"sqlreference", - "p_code":"76", - "code":"80" + "p_code":"86", + "code":"90" }, { "desc":"This statement is used to unbind the user with the role.Nonerole_name and user_name must exist and user_name has been bound to role_name.To unbind the user_name1 from rol", @@ -725,8 +815,8 @@ "title":"Unbinding a Role", "uri":"dli_08_0147.html", "doc_type":"sqlreference", - "p_code":"76", - "code":"81" + "p_code":"86", + "code":"91" }, { "desc":"This statement is used to display all roles or roles bound to the user_name in the current database.ALL: Displays all roles.Keywords ALL and user_name cannot coexist.To d", @@ -734,8 +824,8 @@ "title":"Displaying a Role", "uri":"dli_08_0143.html", "doc_type":"sqlreference", - "p_code":"76", - "code":"82" + "p_code":"86", + "code":"92" }, { "desc":"This statement is used to grant permissions to a user or role.ROLE: The subsequent role_name must be a role.USER: The subsequent user_name must be a user.The privilege mu", @@ -743,8 +833,8 @@ "title":"Granting a Permission", "uri":"dli_08_0144.html", "doc_type":"sqlreference", - "p_code":"76", - "code":"83" + "p_code":"86", + "code":"93" }, { "desc":"This statement is used to revoke permissions granted to a user or role.ROLE: The subsequent role_name must be a role.USER: The subsequent user_name must be a user.The pri", @@ -752,17 +842,17 @@ "title":"Revoking a Permission", "uri":"dli_08_0146.html", "doc_type":"sqlreference", - "p_code":"76", - "code":"84" + "p_code":"86", + "code":"94" }, { - "desc":"This statement is used to show the permissions granted to a user or role in the resource.ROLE: The subsequent role_name must be a role.USER: The subsequent user_name must", + "desc":"This statement is used to show the permissions granted to a user on a resource.USER: The subsequent user_name must be a user.The resource can be a queue, database, table,", "product_code":"dli", - "title":"Displaying the Granted Permissions", + "title":"Showing Granted Permissions", "uri":"dli_08_0145.html", "doc_type":"sqlreference", - "p_code":"76", - "code":"85" + "p_code":"86", + "code":"95" }, { "desc":"This statement is used to display the binding relationship between roles and a user in the current database.NoneThe ROLE variable must exist.", @@ -770,8 +860,8 @@ "title":"Displaying the Binding Relationship Between All Roles and Users", "uri":"dli_08_0149.html", "doc_type":"sqlreference", - "p_code":"76", - "code":"86" + "p_code":"86", + "code":"96" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -780,7 +870,7 @@ "uri":"dli_08_0056.html", "doc_type":"sqlreference", "p_code":"1", - "code":"87" + "code":"97" }, { "desc":"Data type is a basic attribute of data. It is used to distinguish different types of data. Different data types occupy different storage space and support different opera", @@ -788,8 +878,8 @@ "title":"Overview", "uri":"dli_08_0057.html", "doc_type":"sqlreference", - "p_code":"87", - "code":"88" + "p_code":"97", + "code":"98" }, { "desc":"Table 1 lists the primitive data types supported by DLI.VARCHAR and CHAR data is stored in STRING type on DLI. Therefore, the string that exceeds the specified length wil", @@ -797,8 +887,8 @@ "title":"Primitive Data Types", "uri":"dli_08_0058.html", "doc_type":"sqlreference", - "p_code":"87", - "code":"89" + "p_code":"97", + "code":"99" }, { "desc":"Spark SQL supports complex data types, as shown in Table 1.When a table containing fields of the complex data type is created, the storage format of this table cannot be ", @@ -806,8 +896,8 @@ "title":"Complex Data Types", "uri":"dli_08_0059.html", "doc_type":"sqlreference", - "p_code":"87", - "code":"90" + "p_code":"97", + "code":"100" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -816,16 +906,16 @@ "uri":"dli_08_0282.html", "doc_type":"sqlreference", "p_code":"1", - "code":"91" + "code":"101" }, { - "desc":"DLI allows you to create and use user-defined functions (UDF) and user-defined table functions (UDTF) in Spark jobs.If a function with the same name exists in the databas", + "desc":"DLI allows you to create and use user-defined functions (UDF) and user-defined table functions (UDTF) in Spark jobs.OrIf a function with the same name exists in the datab", "product_code":"dli", "title":"Creating a Function", "uri":"dli_08_0283.html", "doc_type":"sqlreference", - "p_code":"91", - "code":"92" + "p_code":"101", + "code":"102" }, { "desc":"This statement is used to delete functions.TEMPORARY: Indicates whether the function to be deleted is a temporary function.IF EXISTS: Used when the function to be deleted", @@ -833,8 +923,8 @@ "title":"Deleting a Function", "uri":"dli_08_0284.html", "doc_type":"sqlreference", - "p_code":"91", - "code":"93" + "p_code":"101", + "code":"103" }, { "desc":"Displays information about a specified function.EXTENDED: displays extended usage information.The metadata (implementation class and usage) of an existing function is ret", @@ -842,8 +932,8 @@ "title":"Displaying Function Details", "uri":"dli_08_0281.html", "doc_type":"sqlreference", - "p_code":"91", - "code":"94" + "p_code":"101", + "code":"104" }, { "desc":"View all functions in the current project.In the preceding statement, regex is a regular expression. For details about its parameters, see Table 1.For details about other", @@ -851,8 +941,8 @@ "title":"Displaying All Functions", "uri":"dli_08_0285.html", "doc_type":"sqlreference", - "p_code":"91", - "code":"95" + "p_code":"101", + "code":"105" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -861,52 +951,1411 @@ "uri":"dli_08_0064.html", "doc_type":"sqlreference", "p_code":"1", - "code":"96" + "code":"106" }, { - "desc":"Table 1 lists the mathematical functions supported in DLI.", - "product_code":"dli", - "title":"Mathematical Functions", - "uri":"dli_08_0065.html", - "doc_type":"sqlreference", - "p_code":"96", - "code":"97" - }, - { - "desc":"Table 1 lists the date functions supported in DLI.", + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "product_code":"dli", "title":"Date Functions", + "uri":"dli_08_0471.html", + "doc_type":"sqlreference", + "p_code":"106", + "code":"107" + }, + { + "desc":"Table 1 lists the date functions supported by DLI.", + "product_code":"dli", + "title":"Overview", "uri":"dli_08_0066.html", "doc_type":"sqlreference", - "p_code":"96", - "code":"98" + "p_code":"107", + "code":"108" + }, + { + "desc":"This function is used to calculate the date after a date value is increased by a specified number of months. That is, it calculates the data that is num_months after star", + "product_code":"dli", + "title":"add_months", + "uri":"dli_spark_add_months.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"109" + }, + { + "desc":"This function is used to return the current date, in the yyyy-mm-dd format.Similar function: getdate. The getdate function is used to return the current system time, in t", + "product_code":"dli", + "title":"current_date", + "uri":"dli_spark_current_date.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"110" + }, + { + "desc":"This function is used to return the current timestamp.NoneThe return value is of the TIMESTAMP type.The value 1692002816300 is returned.", + "product_code":"dli", + "title":"current_timestamp", + "uri":"dli_spark_current_timestamp.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"111" + }, + { + "desc":"This function is used to calculate the number of days in which start_date is increased by days.To obtain the date with a specified change range based on the current date,", + "product_code":"dli", + "title":"date_add", + "uri":"dli_spark_date_add.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"112" + }, + { + "desc":"This function is used to change a date based on datepart and delta.To obtain the date with a specified change range based on the current date, use this function together ", + "product_code":"dli", + "title":"dateadd", + "uri":"dli_spark_dateadd.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"113" + }, + { + "desc":"This function is used to calculate the number of days in which start_date is subtracted by days.To obtain the date with a specified change range based on the current date", + "product_code":"dli", + "title":"date_sub", + "uri":"dli_spark_date_sub.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"114" + }, + { + "desc":"This function is used to convert a date into a string based on the format specified by format.The return value is of the STRING type.If the value of date is not of the DA", + "product_code":"dli", + "title":"date_format", + "uri":"dli_spark_date_format.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"115" + }, + { + "desc":"This function is used to calculate the difference between date1 and date2.Similar function: datediff1. The datediff1 function is used to calculate the difference between ", + "product_code":"dli", + "title":"datediff", + "uri":"dli_spark_datediff.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"116" + }, + { + "desc":"This function is used to calculate the difference between date1 and date2 and return the difference in a specified datepart.Similar function: datediff. The datediff funct", + "product_code":"dli", + "title":"datediff1", + "uri":"dli_spark_datediff1.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"117" + }, + { + "desc":"This function is used to calculate the value that meets the specified datepart in date.The return value is of the BIGINT type.If the value of date is not of the DATE or S", + "product_code":"dli", + "title":"datepart", + "uri":"dli_spark_datepart.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"118" + }, + { + "desc":"This function is used to calculate the date otained through the truncation of a specified date based on a specified datepart.It truncates the date before the specified da", + "product_code":"dli", + "title":"datetrunc", + "uri":"dli_spark_datetrunc.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"119" + }, + { + "desc":"This function is used to return the day of a specified date.The return value is of the INT type.If the value of date is not of the DATE or STRING type, the error message ", + "product_code":"dli", + "title":"day/dayofmonth", + "uri":"dli_spark_daydayofmonth.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"120" + }, + { + "desc":"This function is used to convert a timestamp represented by a numeric UNIX value to a date value.The return value is of the STRING type, in the yyyy-mm-dd hh:mi:ss format", + "product_code":"dli", + "title":"from_unixtime", + "uri":"dli_spark_from_unixtime.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"121" + }, + { + "desc":"This function is used to convert a UTC timestamp to a UNIX timestamp in a given time zone.The return value is of the TIMESTAMP type.If the value of timestamp is not of th", + "product_code":"dli", + "title":"from_utc_timestamp", + "uri":"dli_spark_from_utc_timestamp.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"122" + }, + { + "desc":"This function is used to return the current system time, in the yyyy-mm-dd hh:mi:ss format.Similar function: current_date. The current_date function is used to return the", + "product_code":"dli", + "title":"getdate", + "uri":"dli_spark_getdate.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"123" + }, + { + "desc":"This function is used to return the hour (from 0 to 23) of a specified time.The return value is of the INT type.If the value of date is not of the DATE or STRING type, th", + "product_code":"dli", + "title":"hour", + "uri":"dli_spark_hour.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"124" + }, + { + "desc":"This function is used to determine whether a date string can be converted into a date value based on a specified format.The return value is of the BOOLEAN type.If the val", + "product_code":"dli", + "title":"isdate", + "uri":"dli_spark_isdate.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"125" + }, + { + "desc":"This function is used to return the last day of the month a date belongs to.Similar function: lastday. The lastday function is used to return the last day of the month a ", + "product_code":"dli", + "title":"last_day", + "uri":"dli_spark_last_day.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"126" + }, + { + "desc":"This function is used to return the last day of the month a date belongs to. The hour, minute, and second part is 00:00:00.Similar function: last_day. The last_day functi", + "product_code":"dli", + "title":"lastday", + "uri":"dli_spark_lastday.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"127" + }, + { + "desc":"This function is used to return the minute (from 0 to 59) of a specified time.The return value is of the INT type.If the value of date is not of the DATE or STRING type, ", + "product_code":"dli", + "title":"minute", + "uri":"dli_spark_minute.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"128" + }, + { + "desc":"This function is used to return the month (from January to December) of a specified time.The return value is of the INT type.If the value of date is not of the DATE or ST", + "product_code":"dli", + "title":"month", + "uri":"dli_spark_month.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"129" + }, + { + "desc":"This function returns the month difference between date1 and date2.The return value is of the DOUBLE type.If the values of date1 and date2 are not of the DATE or STRING t", + "product_code":"dli", + "title":"months_between", + "uri":"dli_spark_months_between.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"130" + }, + { + "desc":"This function is used to return the date closest to day_of_week after start_date.The return value is of the DATE type, in the yyyy-mm-dd format.If the value of start_date", + "product_code":"dli", + "title":"next_day", + "uri":"dli_spark_next_day.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"131" + }, + { + "desc":"This function is used to return the quarter of a date. The value ranges from 1 to 4.The return value is of the INT type.If the value of date is not of the DATE or STRING ", + "product_code":"dli", + "title":"quarter", + "uri":"dli_spark_quarter.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"132" + }, + { + "desc":"This function is used to return the second (from 0 to 59) of a specified time.The return value is of the INT type.If the value of date is not of the DATE or STRING type, ", + "product_code":"dli", + "title":"second", + "uri":"dli_spark_second.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"133" + }, + { + "desc":"This function is used to convert a date into a string in a specified format.The return value is of the STRING type.If the value of date is not of the DATE or STRING type,", + "product_code":"dli", + "title":"to_char", + "uri":"dli_spark_to_char.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"134" + }, + { + "desc":"This function is used to return the year, month, and day in a time.Similar function: to_date1. The to_date1 function is used to convert a string in a specified format to ", + "product_code":"dli", + "title":"to_date", + "uri":"dli_spark_to_date.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"135" + }, + { + "desc":"This function is used to convert a string in a specified format to a date value.Similar function: to_date. The to_date function is used to return the year, month, and day", + "product_code":"dli", + "title":"to_date1", + "uri":"dli_spark_to_date1.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"136" + }, + { + "desc":"This function is used to convert a timestamp in a given time zone to a UTC timestamp.The return value is of the BIGINT type.If the value of timestamp is not of the DATE o", + "product_code":"dli", + "title":"to_utc_timestamp", + "uri":"dli_spark_to_utc_timestamp.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"137" + }, + { + "desc":"This function is used to reset a date to a specific format.Resetting means returning to default values, where the default values for year, month, and day are 01, and the ", + "product_code":"dli", + "title":"trunc", + "uri":"dli_spark_trunc.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"138" + }, + { + "desc":"This function is used to convert a date value to a numeric date value in UNIX format.The function returns the first ten digits of the timestamp in normal UNIX format.The ", + "product_code":"dli", + "title":"unix_timestamp", + "uri":"dli_spark_unix_timestamp.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"139" + }, + { + "desc":"This function is used to return the day of the current week.The return value is of the INT type.If Monday is used as the first day of a week, the value 0 is returned. For", + "product_code":"dli", + "title":"weekday", + "uri":"dli_spark_weekday.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"140" + }, + { + "desc":"This function is used to return the week number (from 0 to 53) of a specified date.The return value is of the INT type.If the value of date is not of the DATE or STRING t", + "product_code":"dli", + "title":"weekofyear", + "uri":"dli_spark_weekofyear.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"141" + }, + { + "desc":"This function is used to return the year of a specified date.The return value is of the INT type.If the value of date is not of the DATE or STRING type, the error message", + "product_code":"dli", + "title":"year", + "uri":"dli_spark_year.html", + "doc_type":"sqlreference", + "p_code":"107", + "code":"142" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"String Functions", + "uri":"dli_08_0472.html", + "doc_type":"sqlreference", + "p_code":"106", + "code":"143" }, { "desc":"Table 1 lists the string functions supported by DLI.", "product_code":"dli", - "title":"String Functions", + "title":"Overview", "uri":"dli_08_0067.html", "doc_type":"sqlreference", - "p_code":"96", - "code":"99" + "p_code":"143", + "code":"144" }, { - "desc":"An aggregate function performs a calculation operation on a set of input values and returns a value. For example, the COUNT function counts the number of rows retrieved b", + "desc":"This function is used to return the ASCII code of the first character in str.The return value is of the BIGINT type.If the value of str is not of the STRING, BIGINT, DOUB", + "product_code":"dli", + "title":"ascii", + "uri":"dli_spark_ascii.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"145" + }, + { + "desc":"This function is used to concatenate arrays or strings.If multiple arrays are used as the input, all elements in the arrays are connected to generate a new array.If multi", + "product_code":"dli", + "title":"concat", + "uri":"dli_spark_concat.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"146" + }, + { + "desc":"This function is used to return a string concatenated from multiple input strings that are separated by specified separators.orReturns the result of joining all the strin", + "product_code":"dli", + "title":"concat_ws", + "uri":"dli_spark_concat_ws.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"147" + }, + { + "desc":"This parameter is used to return the number of characters in str1 that appear in str2.The return value is of the BIGINT type.If the value of str1 or str2 is NULL, NULL is", + "product_code":"dli", + "title":"char_matchcount", + "uri":"dli_spark_char_matchcount.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"148" + }, + { + "desc":"This function is used to encode str in charset format.encode(string , string )The return value is of the BINARY type.If the value of str or charset is NULL,", + "product_code":"dli", + "title":"encode", + "uri":"dli_spark_encode.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"149" + }, + { + "desc":"This function is used to return the position (stating from 1) of str1 in str2 separated by commas (,).The return value is of the BIGINT type.If str1 cannot be matched in ", + "product_code":"dli", + "title":"find_in_set", + "uri":"dli_spark_find_in_set.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"150" + }, + { + "desc":"This function is used to parse the JSON object in a specified JSON path. The function will return NULL if the JSON object is invalid.The return value is of the STRING typ", + "product_code":"dli", + "title":"get_json_object", + "uri":"dli_spark_get_json_object.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"151" + }, + { + "desc":"This function is used to return the index of substr that appears earliest in str.It returns NULL if either of the arguments are NULL and returns 0 if substr does not exis", + "product_code":"dli", + "title":"instr", + "uri":"dli_spark_instr.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"152" + }, + { + "desc":"This function is used to return the position of substring str2 in string str1.Similar function: instr. The instr function is used to return the index of substr that appea", + "product_code":"dli", + "title":"instr1", + "uri":"dli_spark_instr1.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"153" + }, + { + "desc":"This function is used to convert the first letter of each word of a string to upper case and all other letters to lower case.The return value is of the STRING type. In th", + "product_code":"dli", + "title":"initcap", + "uri":"dli_spark_initcap.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"154" + }, + { + "desc":"This function is used to split str by split1, convert each group into a key-value pair by split2, and return the value corresponding to the key.The return value is of the", + "product_code":"dli", + "title":"keyvalue", + "uri":"dli_spark_keyvalue.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"155" + }, + { + "desc":"This function is used to return the length of a string.Similar function: lengthb. The lengthb function is used to return the length of string str in bytes and return a va", + "product_code":"dli", + "title":"length", + "uri":"dli_spark_length.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"156" + }, + { + "desc":"This function is used to return the length of a specified string in bytes.Similar function: length. The length function is used to return the length of a string and retur", + "product_code":"dli", + "title":"lengthb", + "uri":"dli_spark_lengthb.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"157" + }, + { + "desc":"This function is used to returns the Levenshtein distance between two strings, for example, levenshtein('kitten','sitting') = 3.Levenshtein distance is a type of edit dis", + "product_code":"dli", + "title":"levenshtein", + "uri":"dli_spark_levenshtein.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"158" + }, + { + "desc":"This function is used to return the position of substr in str. You can specify the starting position of your search using \"start_pos,\" which starts from 1.The return valu", + "product_code":"dli", + "title":"locate", + "uri":"dli_spark_locate.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"159" + }, + { + "desc":"This function is used to convert all characters of a string to the lower case.The return value is of the STRING type.If the value of the input parameter is not of the STR", + "product_code":"dli", + "title":"lower/lcase", + "uri":"dli_spark_lower_lcase.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"160" + }, + { + "desc":"This function is used to return a string of a specified length. If the length of the given string (str1) is shorter than the specified length (length), the given string i", + "product_code":"dli", + "title":"lpad", + "uri":"dli_spark_lpad.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"161" + }, + { + "desc":"This function is used to remove characters from the left of str.If trimChars is not specified, spaces are removed by default.If trimChars is specified, the function remov", + "product_code":"dli", + "title":"ltrim", + "uri":"dli_spark_ltrim.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"162" + }, + { + "desc":"This character is used to return the specified part of a given URL. Valid values of partToExtract include HOST, PATH, QUERY, REF, PROTOCOL, AUTHORITY, FILE, and USERINFO.", + "product_code":"dli", + "title":"parse_url", + "uri":"dli_spark_parse_url.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"163" + }, + { + "desc":"This function is used to print the input in a specific format.The return value is of the STRING type.The value is returned after the parameters that filled in Obj are spe", + "product_code":"dli", + "title":"printf", + "uri":"dli_spark_printf.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"164" + }, + { + "desc":"This function is used to return the number of substrings that match a specified pattern in the source, starting from the start_position position.The return value is of th", + "product_code":"dli", + "title":"regexp_count", + "uri":"dli_spark_regexp_count.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"165" + }, + { + "desc":"This function is used to match the string source based on the pattern grouping rule and return the string content that matches groupid.regexp_extract(string , str", + "product_code":"dli", + "title":"regexp_extract", + "uri":"dli_spark_regexp_extract.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"166" + }, + { + "desc":"This function is used to replace the part in a specified string that is the same as the string old with the string new and return the result.If the string has no same cha", + "product_code":"dli", + "title":"replace", + "uri":"dli_spark_replace.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"167" + }, + { + "desc":"This function has slight variations in its functionality depending on the version of Spark being used.For Spark 2.4.5 or earlier: Replaces the substring that matches patt", + "product_code":"dli", + "title":"regexp_replace", + "uri":"dli_spark_regexp_replace.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"168" + }, + { + "desc":"This function is used to replace the substring that matches pattern for the occurrence time in the source string with the specified string replace_string and return the r", + "product_code":"dli", + "title":"regexp_replace1", + "uri":"dli_spark_regexp_replace1.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"169" + }, + { + "desc":"This function is used to return the start or end position of the substring that matches a specified pattern for the occurrence time, starting from start_position in the s", + "product_code":"dli", + "title":"regexp_instr", + "uri":"dli_spark_regexp_instr.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"170" + }, + { + "desc":"This function is used to return the substring that matches a specified pattern for the occurrence time, starting from start_position in the string source.The return value", + "product_code":"dli", + "title":"regexp_substr", + "uri":"dli_spark_regexp_substr.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"171" + }, + { + "desc":"This function is used to return the string after str is repeated for n times.The return value is of the STRING type.If the value of str is not of the STRING, BIGINT, DOUB", + "product_code":"dli", + "title":"repeat", + "uri":"dli_spark_repeat.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"172" + }, + { + "desc":"This function is used to return a string in reverse order.The return value is of the STRING type.If the value of str is not of the STRING, BIGINT, DOUBLE, DECIMAL, or DAT", + "product_code":"dli", + "title":"reverse", + "uri":"dli_spark_reverse.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"173" + }, + { + "desc":"This function is used to right pad str1 with str2 to the specified length.The return value is of the STRING type.If the value of length is smaller than the number of digi", + "product_code":"dli", + "title":"rpad", + "uri":"dli_spark_rpad.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"174" + }, + { + "desc":"This function is used to remove characters from the right of str.If trimChars is not specified, spaces are removed by default.If trimChars is specified, the function remo", + "product_code":"dli", + "title":"rtrim", + "uri":"dli_spark_rtrim.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"175" + }, + { + "desc":"This function is used to return the soundex string from str, for example, soundex('Miller') = M460.The return value is of the STRING type.If the value of str is NULL, NUL", + "product_code":"dli", + "title":"soundex", + "uri":"dli_spark_soundex.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"176" + }, + { + "desc":"This function is used to return a specified number of spaces.The return value is of the STRING type.If the value of n is empty, an error is reported.If the value of n is ", + "product_code":"dli", + "title":"space", + "uri":"dli_spark_space.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"177" + }, + { + "desc":"This function is used to return the substring of str, starting from start_position and with a length of length.orThe return value is of the STRING type.If the value of st", + "product_code":"dli", + "title":"substr/substring", + "uri":"dli_spark_substr_substring.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"178" + }, + { + "desc":"This function is used to truncate the string before the count separator of str. If the value of count is positive, the string is truncated from the left. If the value of ", + "product_code":"dli", + "title":"substring_index", + "uri":"dli_spark_substring_index.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"179" + }, + { + "desc":"This function is used to split a specified string based on a specified separator and return a substring from the start to end position.The return value is of the STRING t", + "product_code":"dli", + "title":"split_part", + "uri":"dli_spark_split_part.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"180" + }, + { + "desc":"This function is used to translate the input string by replacing the characters or string specified by from with the characters or string specified by to.For example, it ", + "product_code":"dli", + "title":"translate", + "uri":"dli_spark_translate.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"181" + }, + { + "desc":"This function is used to remove characters from the left and right of str.If trimChars is not specified, spaces are removed by default.If trimChars is specified, the func", + "product_code":"dli", + "title":"trim", + "uri":"dli_spark_trim.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"182" + }, + { + "desc":"This function is used to convert all characters of a string to the upper case.orThe return value is of the STRING type.If the value of the input parameter is not of the S", + "product_code":"dli", + "title":"upper/ucase", + "uri":"dli_spark_upper_ucase.html", + "doc_type":"sqlreference", + "p_code":"143", + "code":"183" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Mathematical Functions", + "uri":"dli_08_0473.html", + "doc_type":"sqlreference", + "p_code":"106", + "code":"184" + }, + { + "desc":"Table 1 lists the mathematical functions supported by DLI.", + "product_code":"dli", + "title":"Overview", + "uri":"dli_08_0065.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"185" + }, + { + "desc":"This function is used to calculate the absolute value of an input parameter.The return value is of the DOUBLE or INT type.If the value of a is NULL, NULL is returned.The ", + "product_code":"dli", + "title":"abs", + "uri":"dli_spark_abs.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"186" + }, + { + "desc":"This function is used to return the arc cosine value of a given angle a.The return value is of the DOUBLE type. The value ranges from 0 to π.If the value of a is not with", + "product_code":"dli", + "title":"acos", + "uri":"dli_spark_aocs.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"187" + }, + { + "desc":"This function is used to return the arc sine value of a given angle a.The return value is of the DOUBLE type. The value ranges from -π/2 to π/2.If the value of a is not w", + "product_code":"dli", + "title":"asin", + "uri":"dli_spark_asin.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"188" + }, + { + "desc":"This function is used to return the arc tangent value of a given angle a.The return value is of the DOUBLE type. The value ranges from -π/2 to π/2.If the value of a is no", + "product_code":"dli", + "title":"atan", + "uri":"dli_spark_atan.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"189" + }, + { + "desc":"This function is used to return the binary format of a.The return value is of the STRING type.If the value of a is NULL, NULL is returned.The value 1 is returned.The valu", + "product_code":"dli", + "title":"bin", + "uri":"dli_spark_bin.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"190" + }, + { + "desc":"This function is used to return a value that is rounded off to d decimal places.The return value is of the DOUBLE type.If the value of a or d is NULL, NULL is returned.Th", + "product_code":"dli", + "title":"bround", + "uri":"dli_spark_bround.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"191" + }, + { + "desc":"This function is used to return the cube root of a.The return value is of the DOUBLE type.If the value of a is NULL, NULL is returned.The value 3 is returned.select cbrt(", + "product_code":"dli", + "title":"cbrt", + "uri":"dli_spark_cbrt.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"192" + }, + { + "desc":"This function is used to round up a to the nearest integer.The return value is of the DECIMAL type.If the value of a is NULL, NULL is returned.The value 2 is returned.The", + "product_code":"dli", + "title":"ceil", + "uri":"dli_spark_ceil.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"193" + }, + { + "desc":"This function is used to convert a number from from_base to to_base.The return value is of the STRING type.If the value of num, from_base, or to_base is NULL, NULL is ret", + "product_code":"dli", + "title":"conv", + "uri":"dli_spark_conv.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"194" + }, + { + "desc":"This function is used to calculate the cosine value of a, with input in radians.The return value is of the DOUBLE type.If the value of a is NULL, NULL is returned.The val", + "product_code":"dli", + "title":"cos", + "uri":"dli_spark_cos.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"195" + }, + { + "desc":"This function is used to calculate the cotangent value of a, with input in radians.The return value is of the DOUBLE or DECIMAL type.If the value of a is NULL, NULL is re", + "product_code":"dli", + "title":"cot1", + "uri":"dli_spark_cot1.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"196" + }, + { + "desc":"This function is used to calculate the angle corresponding to the returned radian.The return value is of the DOUBLE type.If the value of a is NULL, NULL is returned.The v", + "product_code":"dli", + "title":"degrees", + "uri":"dli_spark_degress.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"197" + }, + { + "desc":"This function is used to return the value of e.The return value is of the DOUBLE type.The value 2.718281828459045 is returned.select e();", + "product_code":"dli", + "title":"e", + "uri":"dli_spark_e.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"198" + }, + { + "desc":"This function is used to return the value of e raised to the power of a.The return value is of the DOUBLE type.If the value of a is NULL, NULL is returned.The value 7.389", + "product_code":"dli", + "title":"exp", + "uri":"dli_spark_exp.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"199" + }, + { + "desc":"This function is used to return the factorial of a.The return value is of the BIGINT type.If the value of a is 0, 1 is returned.If the value of a is NULL or outside the r", + "product_code":"dli", + "title":"factorial", + "uri":"dli_spark_factorial.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"200" + }, + { + "desc":"This function is used to round down a to the nearest integer.The return value is of the BIGINT type.If the value of a is NULL, NULL is returned.The value 1 is returned.Th", + "product_code":"dli", + "title":"floor", + "uri":"dli_spark_floor.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"201" + }, + { + "desc":"This function is used to return the greatest value in a list of values.The return value is of the DOUBLE type.If the value of a is NULL, NULL is returned.The value 4.0 is", + "product_code":"dli", + "title":"greatest", + "uri":"dli_spark_greatest.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"202" + }, + { + "desc":"This function is used to convert an integer or character into its hexadecimal representation.The return value is of the STRING type.If the value of a is 0, 0 is returned.", + "product_code":"dli", + "title":"hex", + "uri":"dli_spark_hex.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"203" + }, + { + "desc":"This function is used to return the smallest value in a list of values.The return value is of the DOUBLE type.If the value of v1 or v2 is of the STRING type, an error is ", + "product_code":"dli", + "title":"least", + "uri":"dli_spark_least.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"204" + }, + { + "desc":"This function is used to return the natural logarithm of a given value.The return value is of the DOUBLE type.If the value of a is negative or 0, NULL is returned.If the ", + "product_code":"dli", + "title":"ln", + "uri":"dli_spark_ln.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"205" + }, + { + "desc":"This function is used to return the natural logarithm of a given base and exponent.The return value is of the DOUBLE type.If the value of base or a is NULL, NULL is retur", + "product_code":"dli", + "title":"log", + "uri":"dli_spark_log.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"206" + }, + { + "desc":"This function is used to return the natural logarithm of a given value with a base of 10.The return value is of the DOUBLE type.If the value of a is negative, 0, or NULL,", + "product_code":"dli", + "title":"log10", + "uri":"dli_spark_log10.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"207" + }, + { + "desc":"This function is used to return the natural logarithm of a given value with a base of 2.The return value is of the DOUBLE type.If the value of a is negative, 0, or NULL, ", + "product_code":"dli", + "title":"log2", + "uri":"dli_spark_log2.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"208" + }, + { + "desc":"This function is used to calculate the median of input parameters.The return value is of the DOUBLE or DECIMAL type.If the column name does not exist, an error is reporte", + "product_code":"dli", + "title":"median", + "uri":"dli_spark_median.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"209" + }, + { + "desc":"This function is used to return the additive inverse of a.The return value is of the DECIMAL or INT type.If the value of a is NULL, NULL is returned.The value -1 is retur", + "product_code":"dli", + "title":"negative", + "uri":"dli_spark_negative.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"210" + }, + { + "desc":"This function is used to return the exact percentile, which is applicable to a small amount of data. It sorts a specified column in ascending order, and then obtains the ", + "product_code":"dli", + "title":"percentlie", + "uri":"dli_spark_percentlie.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"211" + }, + { + "desc":"This function is used to return the approximate percentile, which is applicable to a large amount of data. It sorts a specified column in ascending order, and then obtain", + "product_code":"dli", + "title":"percentlie_approx", + "uri":"dli_spark_percentlie_approx.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"212" + }, + { + "desc":"This function is used to return the value of π.The return value is of the DOUBLE type.The value 3.141592653589793 is returned.", + "product_code":"dli", + "title":"pi", + "uri":"dli_spark_pi.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"213" + }, + { + "desc":"This function is used to return the positive value of the remainder after division of x by y.pmod(INT a, INT b)The return value is of the DECIMAL or INT type.If the value", + "product_code":"dli", + "title":"pmod", + "uri":"dli_spark_pmod.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"214" + }, + { + "desc":"This function is used to return the value of a.The return value is of the DECIMAL, DOUBLE, or INT type.If the value of a is NULL, NULL is returned.The value 3 is returned", + "product_code":"dli", + "title":"positive", + "uri":"dli_spark_positive.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"215" + }, + { + "desc":"This function is used to calculate and return the pth power of a.The return value is of the DOUBLE type.If the value of a or p is NULL, NULL is returned.The value 16 retu", + "product_code":"dli", + "title":"pow", + "uri":"dli_spark_pow.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"216" + }, + { + "desc":"This function is used to return the radian corresponding to an angle.The return value is of the DOUBLE type.If the value of a is NULL, NULL is returned.The value 1.047197", + "product_code":"dli", + "title":"radians", + "uri":"dli_spark_radians.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"217" + }, + { + "desc":"This function is used to return an evenly distributed random number that is greater than or equal to 0 and less than 1.The return value is of the DOUBLE type.The value 0.", + "product_code":"dli", + "title":"rand", + "uri":"dli_spark_rand.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"218" + }, + { + "desc":"This function is used to calculate the rounded value of a up to d decimal places.The return value is of the DOUBLE type.If the value of d is negative, an error is reporte", + "product_code":"dli", + "title":"round", + "uri":"dli_spark_round.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"219" + }, + { + "desc":"This function is used to perform a signed bitwise left shift. It takes the binary number a and shifts it b positions to the left.shiftleft(BIGINT a, BIGINT b)The return v", + "product_code":"dli", + "title":"shiftleft", + "uri":"dli_spark_shiftleft.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"220" + }, + { + "desc":"This function is used to perform a signed bitwise right shift. It takes the binary number a and shifts it b positions to the right.The return value is of the INT type.If ", + "product_code":"dli", + "title":"shiftright", + "uri":"dli_spark_shiftright.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"221" + }, + { + "desc":"This function is used to perform an unsigned bitwise right shift. It takes the binary number a and shifts it b positions to the right.The return value is of the INT type.", + "product_code":"dli", + "title":"shiftrightunsigned", + "uri":"dli_spark_shiftrightunsigned.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"222" + }, + { + "desc":"This function is used to return the positive and negative signs corresponding to a.The return value is of the DOUBLE type.If the value of a is a positive number, 1 is ret", + "product_code":"dli", + "title":"sign", + "uri":"dli_spark_sign.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"223" + }, + { + "desc":"This function is used to return the sine value of a, with input in radians.The return value is of the DOUBLE type.If the value of a is NULL, NULL is returned.The value 1 ", + "product_code":"dli", + "title":"sin", + "uri":"dli_spark_sin.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"224" + }, + { + "desc":"This function is used to return the square root of a value.The return value is of the DOUBLE type.If the value of a is NULL, NULL is returned.The value 2.8284271247461903", + "product_code":"dli", + "title":"sqrt", + "uri":"dli_spark_sqrt.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"225" + }, + { + "desc":"This function is used to return the tangent value of a, with input in radians.The return value is of the DOUBLE type.If the value of a is NULL, NULL is returned.The value", + "product_code":"dli", + "title":"tan", + "uri":"dli_spark_tan.html", + "doc_type":"sqlreference", + "p_code":"184", + "code":"226" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "product_code":"dli", "title":"Aggregate Functions", - "uri":"dli_08_0068.html", + "uri":"dli_08_0474.html", "doc_type":"sqlreference", - "p_code":"96", - "code":"100" + "p_code":"106", + "code":"227" }, { - "desc":"A window function performs a calculation operation on a set of values related to the current value. A window function can be an aggregate function used in the GROUP BY cl", + "desc":"Table 1 lists the aggregate functions supported by DLI.", + "product_code":"dli", + "title":"Overview", + "uri":"dli_08_0068.html", + "doc_type":"sqlreference", + "p_code":"227", + "code":"228" + }, + { + "desc":"This function is used to return the average value.The return value is of the DOUBLE type.If the value of col is NULL, the column is not involved in calculation.Calculates", + "product_code":"dli", + "title":"avg", + "uri":"dli_spark_avg.html", + "doc_type":"sqlreference", + "p_code":"227", + "code":"229" + }, + { + "desc":"This function is used to return the correlation coefficient between two columns of numerical values.The return value is of the DOUBLE type.Calculates the correlation coef", + "product_code":"dli", + "title":"corr", + "uri":"dli_spark_corr.html", + "doc_type":"sqlreference", + "p_code":"227", + "code":"230" + }, + { + "desc":"This function is used to return the number of records.The return value is of the BIGINT type.If the value of colname is NULL, the row is not involved in calculation.Calcu", + "product_code":"dli", + "title":"count", + "uri":"dli_spark_count.html", + "doc_type":"sqlreference", + "p_code":"227", + "code":"231" + }, + { + "desc":"This function is used to return the covariance between two columns of numerical values.The return value is of the DOUBLE type.Calculates the covariance between the invent", + "product_code":"dli", + "title":"covar_pop", + "uri":"dli_spark_covar_pop.html", + "doc_type":"sqlreference", + "p_code":"227", + "code":"232" + }, + { + "desc":"This function is used to return the sample covariance between two columns of numerical values.The return value is of the DOUBLE type.Calculates the sample covariance betw", + "product_code":"dli", + "title":"covar_samp", + "uri":"dli_spark_covar_samp.html", + "doc_type":"sqlreference", + "p_code":"227", + "code":"233" + }, + { + "desc":"This function is used to return the maximum value.The return value is of the DOUBLE type.The return type is the same as the type of col. The return rules are as follows:I", + "product_code":"dli", + "title":"max", + "uri":"dli_spark_max.html", + "doc_type":"sqlreference", + "p_code":"227", + "code":"234" + }, + { + "desc":"This function is used to return the minimum value.The return value is of the DOUBLE type.The return type is the same as the type of col. The return rules are as follows:I", + "product_code":"dli", + "title":"min", + "uri":"dli_spark_min.html", + "doc_type":"sqlreference", + "p_code":"227", + "code":"235" + }, + { + "desc":"This function is used to return the numerical value at a certain percentage point within a range of values.The return value is of the DOUBLE type.The value should be betw", + "product_code":"dli", + "title":"percentile", + "uri":"dli_spark_percentile.html", + "doc_type":"sqlreference", + "p_code":"227", + "code":"236" + }, + { + "desc":"This function is used to approximate the pth percentile (including floating-point numbers) of a numeric column within a group.The return value is of the DOUBLE type.Calcu", + "product_code":"dli", + "title":"percentile_approx", + "uri":"dli_spark_percentile_approx.html", + "doc_type":"sqlreference", + "p_code":"227", + "code":"237" + }, + { + "desc":"This function is used to return the deviation of a specified column.The return value is of the DOUBLE type.Calculates the deviation of all offering inventories (items). A", + "product_code":"dli", + "title":"stddev_pop", + "uri":"dli_spark_stddev_pop.html", + "doc_type":"sqlreference", + "p_code":"227", + "code":"238" + }, + { + "desc":"This function is used to return the sample deviation of a specified column.The return value is of the DOUBLE type.Calculates the sample covariance between the inventory (", + "product_code":"dli", + "title":"stddev_samp", + "uri":"dli_spark_stddev_samp.html", + "doc_type":"sqlreference", + "p_code":"227", + "code":"239" + }, + { + "desc":"This function is used to calculate the total sum.The return value is of the DOUBLE type.If the value of col is NULL, the row is not involved in calculation.Calculates the", + "product_code":"dli", + "title":"sum", + "uri":"dli_spark_sum.html", + "doc_type":"sqlreference", + "p_code":"227", + "code":"240" + }, + { + "desc":"This function is used to return the variance of a column.The return value is of the DOUBLE type.Calculates the variance of all offering inventories (items). An example co", + "product_code":"dli", + "title":"variance/var_pop", + "uri":"dli_spark_variance_var_pop.html", + "doc_type":"sqlreference", + "p_code":"227", + "code":"241" + }, + { + "desc":"This function is used to return the sample variance of a specified column.The return value is of the DOUBLE type.Calculates the sample variance of all offering inventorie", + "product_code":"dli", + "title":"var_samp", + "uri":"dli_spark_war_samp.html", + "doc_type":"sqlreference", + "p_code":"227", + "code":"242" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "product_code":"dli", "title":"Window Functions", + "uri":"dli_08_0475.html", + "doc_type":"sqlreference", + "p_code":"106", + "code":"243" + }, + { + "desc":"Table 1 lists the window functions supported by DLI.", + "product_code":"dli", + "title":"Overview", "uri":"dli_08_0069.html", "doc_type":"sqlreference", - "p_code":"96", - "code":"101" + "p_code":"243", + "code":"244" + }, + { + "desc":"This function is used to return the cumulative distribution, which is equivalent to calculating the proportion of data in the partition that is greater than or equal to, ", + "product_code":"dli", + "title":"cume_dist", + "uri":"dli_spark_cume_dist.html", + "doc_type":"sqlreference", + "p_code":"243", + "code":"245" + }, + { + "desc":"This function is used to obtain the value of the first data record in the window corresponding to the current row.The restrictions on using window functions are as follow", + "product_code":"dli", + "title":"first_value", + "uri":"dli_spark_first_value.html", + "doc_type":"sqlreference", + "p_code":"243", + "code":"246" + }, + { + "desc":"This function is used to obtain the value of the last data record in the window corresponding to the current row.The restrictions on using window functions are as follows", + "product_code":"dli", + "title":"last_value", + "uri":"dli_spark_last_value.html", + "doc_type":"sqlreference", + "p_code":"243", + "code":"247" + }, + { + "desc":"This function is used to return the value of the nth row upwards within a specified window.The restrictions on using window functions are as follows:Window functions can ", + "product_code":"dli", + "title":"lag", + "uri":"dli_spark_lag.html", + "doc_type":"sqlreference", + "p_code":"243", + "code":"248" + }, + { + "desc":"This function is used to return the value of the nth row downwards within a specified window.The restrictions on using window functions are as follows:Window functions ca", + "product_code":"dli", + "title":"lead", + "uri":"dli_spark_lead.html", + "doc_type":"sqlreference", + "p_code":"243", + "code":"249" + }, + { + "desc":"This function is used to return the value of the column specified in the ORDER BY clause of a window, expressed as a decimal between 0 and 1. It is calculated as (the ran", + "product_code":"dli", + "title":"percent_rank", + "uri":"dli_spark_percent_rank.html", + "doc_type":"sqlreference", + "p_code":"243", + "code":"250" + }, + { + "desc":"This function is used to return the rank of a value in a set of values. When multiple values share the same rank, the next rank in the sequence is not consecutive.The res", + "product_code":"dli", + "title":"rank", + "uri":"dli_spark_rank.html", + "doc_type":"sqlreference", + "p_code":"243", + "code":"251" + }, + { + "desc":"This function is used to return the row number, starting from 1 and increasing incrementally.The restrictions on using window functions are as follows:Window functions ca", + "product_code":"dli", + "title":"row_number", + "uri":"dli_spark_row_number.html", + "doc_type":"sqlreference", + "p_code":"243", + "code":"252" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Other Functions", + "uri":"dli_08_0476.html", + "doc_type":"sqlreference", + "p_code":"106", + "code":"253" + }, + { + "desc":"The following table lists the functions provided by DLI, such as decode1, javahash, and max_pt.", + "product_code":"dli", + "title":"Overview", + "uri":"dli_08_0469.html", + "doc_type":"sqlreference", + "p_code":"253", + "code":"254" + }, + { + "desc":"This function is used to implement if-then-else branch selection.result and default are return values. These values can be of any data type.If they match, the value of re", + "product_code":"dli", + "title":"decode1", + "uri":"dli_spark_decode1.html", + "doc_type":"sqlreference", + "p_code":"253", + "code":"255" + }, + { + "desc":"This function is used to return the hash value of a.The return value is of the STRING type.The hash value is returned. If the value of a is null, an error is reported.The", + "product_code":"dli", + "title":"javahash", + "uri":"dli_spark_javahash.html", + "doc_type":"sqlreference", + "p_code":"253", + "code":"256" + }, + { + "desc":"This function is used to return the name of the largest level-1 partition that contains data in a partitioned table and read the data of this partition.The return value i", + "product_code":"dli", + "title":"max_pt", + "uri":"dli_spark_max_pt.html", + "doc_type":"sqlreference", + "p_code":"253", + "code":"257" + }, + { + "desc":"This function is used to sort input variables in ascending order and return the value at the position specified by nth.The return value is of the DOUBLE or DECIMAL type.V", + "product_code":"dli", + "title":"ordinal", + "uri":"dli_spark_ordinal.html", + "doc_type":"sqlreference", + "p_code":"253", + "code":"258" + }, + { + "desc":"This function is used to convert an array split by a fixed separator in a column into multiple rows.All columns used as keys must be placed before the columns to be trans", + "product_code":"dli", + "title":"trans_array", + "uri":"dli_spark_trans_array.html", + "doc_type":"sqlreference", + "p_code":"253", + "code":"259" + }, + { + "desc":"This function is used to truncate the number value to a specified decimal place.The return value is of the DOUBLE or DECIMAL type.The return rules are as follows:If the n", + "product_code":"dli", + "title":"trunc_numeric", + "uri":"dli_spark_trunc_numeric.html", + "doc_type":"sqlreference", + "p_code":"253", + "code":"260" + }, + { + "desc":"This function is used to convert a string from the application/x-www-form-urlencoded MIME format to regular characters.The return value is of the STRING type.UTF-8-encode", + "product_code":"dli", + "title":"url_decode", + "uri":"dli_spark_url_decode.html", + "doc_type":"sqlreference", + "p_code":"253", + "code":"261" + }, + { + "desc":"This function is used to encode a string in the application/x-www-form-urlencoded MIME format.url_encode(string [, string ])The return value is of the ST", + "product_code":"dli", + "title":"url_encode", + "uri":"dli_spark_url_encode.html", + "doc_type":"sqlreference", + "p_code":"253", + "code":"262" }, { "desc":"This statement is a basic query statement and is used to return the query results.The table to be queried must exist. Otherwise, an error is reported.To filter the record", @@ -915,7 +2364,7 @@ "uri":"dli_08_0150.html", "doc_type":"sqlreference", "p_code":"1", - "code":"102" + "code":"263" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -924,7 +2373,7 @@ "uri":"dli_08_0151.html", "doc_type":"sqlreference", "p_code":"1", - "code":"103" + "code":"264" }, { "desc":"This statement is used to filter the query results using the WHERE clause.All is used to return repeated rows. By default, all repeated rows are returned. It is followed ", @@ -932,8 +2381,8 @@ "title":"WHERE Filtering Clause", "uri":"dli_08_0152.html", "doc_type":"sqlreference", - "p_code":"103", - "code":"104" + "p_code":"264", + "code":"265" }, { "desc":"This statement is used to filter the query results using the HAVING clause.All is used to return repeated rows. By default, all repeated rows are returned. It is followed", @@ -941,8 +2390,8 @@ "title":"HAVING Filtering Clause", "uri":"dli_08_0153.html", "doc_type":"sqlreference", - "p_code":"103", - "code":"105" + "p_code":"264", + "code":"266" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -951,7 +2400,7 @@ "uri":"dli_08_0154.html", "doc_type":"sqlreference", "p_code":"1", - "code":"106" + "code":"267" }, { "desc":"This statement is used to order the result set of a query by the specified field.ASC/DESC: ASC sorts from the lowest value to the highest value. DESC sorts from the highe", @@ -959,8 +2408,8 @@ "title":"ORDER BY", "uri":"dli_08_0155.html", "doc_type":"sqlreference", - "p_code":"106", - "code":"107" + "p_code":"267", + "code":"268" }, { "desc":"This statement is used to achieve the partial sorting of tables according to fields.ASC/DESC: ASC sorts from the lowest value to the highest value. DESC sorts from the hi", @@ -968,8 +2417,8 @@ "title":"SORT BY", "uri":"dli_08_0156.html", "doc_type":"sqlreference", - "p_code":"106", - "code":"108" + "p_code":"267", + "code":"269" }, { "desc":"This statement is used to bucket a table and sort the table within buckets.CLUSTER BY: Buckets are created based on specified fields. Single fields and multiple fields ar", @@ -977,8 +2426,8 @@ "title":"CLUSTER BY", "uri":"dli_08_0157.html", "doc_type":"sqlreference", - "p_code":"106", - "code":"109" + "p_code":"267", + "code":"270" }, { "desc":"This statement is used to bucket a table according to the field.DISTRIBUTE BY: Buckets are created based on specified fields. A single field or multiple fields are suppor", @@ -986,8 +2435,8 @@ "title":"DISTRIBUTE BY", "uri":"dli_08_0158.html", "doc_type":"sqlreference", - "p_code":"106", - "code":"110" + "p_code":"267", + "code":"271" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -996,7 +2445,7 @@ "uri":"dli_08_0159.html", "doc_type":"sqlreference", "p_code":"1", - "code":"111" + "code":"272" }, { "desc":"This statement is used to group a table based on columns.Column-based GROUP BY can be categorized into single-column GROUP BY and multi-column GROUP BY.Single-column GROU", @@ -1004,8 +2453,8 @@ "title":"Column-Based GROUP BY", "uri":"dli_08_0160.html", "doc_type":"sqlreference", - "p_code":"111", - "code":"112" + "p_code":"272", + "code":"273" }, { "desc":"This statement is used to group a table according to expressions.The groupby_expression can contain a single field or multiple fields, and also can call aggregate functio", @@ -1013,8 +2462,8 @@ "title":"Expression-Based GROUP BY", "uri":"dli_08_0161.html", "doc_type":"sqlreference", - "p_code":"111", - "code":"113" + "p_code":"272", + "code":"274" }, { "desc":"This statement filters a table after grouping it using the HAVING clause.The groupby_expression can contain a single field or multiple fields, and can also call aggregate", @@ -1022,8 +2471,8 @@ "title":"GROUP BY Using HAVING", "uri":"dli_08_0162.html", "doc_type":"sqlreference", - "p_code":"111", - "code":"114" + "p_code":"272", + "code":"275" }, { "desc":"This statement is used to generate the aggregate row, super-aggregate row, and the total row. The statement can achieve multi-layer statistics from right to left and disp", @@ -1031,8 +2480,8 @@ "title":"ROLLUP", "uri":"dli_08_0163.html", "doc_type":"sqlreference", - "p_code":"111", - "code":"115" + "p_code":"272", + "code":"276" }, { "desc":"This statement is used to generate the cross-table row and achieve the cross-statistics of the GROUP BY field.GROUPING SETS is the expansion of GROUP BY. For example:SELE", @@ -1040,8 +2489,8 @@ "title":"GROUPING SETS", "uri":"dli_08_0164.html", "doc_type":"sqlreference", - "p_code":"111", - "code":"116" + "p_code":"272", + "code":"277" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1050,7 +2499,7 @@ "uri":"dli_08_0165.html", "doc_type":"sqlreference", "p_code":"1", - "code":"117" + "code":"278" }, { "desc":"This statement is used to join and return the rows that meet the JOIN conditions from two tables as the result set.JOIN/INNER JOIN: Only the records that meet the JOIN co", @@ -1058,8 +2507,8 @@ "title":"INNER JOIN", "uri":"dli_08_0166.html", "doc_type":"sqlreference", - "p_code":"117", - "code":"118" + "p_code":"278", + "code":"279" }, { "desc":"Join the left table with the right table and return all joined records of the left table. If no joined record is found, NULL will be returned.LEFT OUTER JOIN: Returns all", @@ -1067,8 +2516,8 @@ "title":"LEFT OUTER JOIN", "uri":"dli_08_0167.html", "doc_type":"sqlreference", - "p_code":"117", - "code":"119" + "p_code":"278", + "code":"280" }, { "desc":"Match the right table with the left table and return all matched records of the right table. If no matched record is found, NULL will be returned.RIGHT OUTER JOIN: Return", @@ -1076,8 +2525,8 @@ "title":"RIGHT OUTER JOIN", "uri":"dli_08_0168.html", "doc_type":"sqlreference", - "p_code":"117", - "code":"120" + "p_code":"278", + "code":"281" }, { "desc":"Join all records from the right table and the left table and return all joined records. If no joined record is found, NULL will be returned.FULL OUTER JOIN: Matches all r", @@ -1085,8 +2534,8 @@ "title":"FULL OUTER JOIN", "uri":"dli_08_0169.html", "doc_type":"sqlreference", - "p_code":"117", - "code":"121" + "p_code":"278", + "code":"282" }, { "desc":"This statement has the same function as INNER JOIN, that is, the result set that meet the WHERE condition is returned. However, IMPLICIT JOIN does not use the condition s", @@ -1094,8 +2543,8 @@ "title":"IMPLICIT JOIN", "uri":"dli_08_0170.html", "doc_type":"sqlreference", - "p_code":"117", - "code":"122" + "p_code":"278", + "code":"283" }, { "desc":"Cartesian JOIN joins each record of table A with all records in table B. For example, if there are m records in table A and n records in table B, m x n records will be ge", @@ -1103,8 +2552,8 @@ "title":"Cartesian JOIN", "uri":"dli_08_0171.html", "doc_type":"sqlreference", - "p_code":"117", - "code":"123" + "p_code":"278", + "code":"284" }, { "desc":"This statement is used to query the records that meet the JOIN condition from the left table.LEFT SEMI JOIN: Indicates to only return the records from the left table. LEF", @@ -1112,8 +2561,8 @@ "title":"LEFT SEMI JOIN", "uri":"dli_08_0172.html", "doc_type":"sqlreference", - "p_code":"117", - "code":"124" + "p_code":"278", + "code":"285" }, { "desc":"This statement is used to join multiple tables using unequal values and return the result set that meet the condition.The non_equi_join_condition is similar to join_condi", @@ -1121,8 +2570,8 @@ "title":"NON-EQUIJOIN", "uri":"dli_08_0173.html", "doc_type":"sqlreference", - "p_code":"117", - "code":"125" + "p_code":"278", + "code":"286" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1131,7 +2580,7 @@ "uri":"dli_08_0174.html", "doc_type":"sqlreference", "p_code":"1", - "code":"126" + "code":"287" }, { "desc":"Subqueries are nested in the WHERE clause, and the subquery result is used as the filtering condition.All is used to return repeated rows. By default, all repeated rows a", @@ -1139,8 +2588,8 @@ "title":"Subquery Nested by WHERE", "uri":"dli_08_0175.html", "doc_type":"sqlreference", - "p_code":"126", - "code":"127" + "p_code":"287", + "code":"288" }, { "desc":"This statement is used to nest subquery by FROM and use the subquery results as the data source of the external SELECT statement.All is used to return repeated rows. By d", @@ -1148,8 +2597,8 @@ "title":"Subquery Nested by FROM", "uri":"dli_08_0176.html", "doc_type":"sqlreference", - "p_code":"126", - "code":"128" + "p_code":"287", + "code":"289" }, { "desc":"This statement is used to embed a subquery in the HAVING clause. The subquery result is used as a part of the HAVING clause.All is used to return repeated rows. By defaul", @@ -1157,8 +2606,8 @@ "title":"Subquery Nested by HAVING", "uri":"dli_08_0177.html", "doc_type":"sqlreference", - "p_code":"126", - "code":"129" + "p_code":"287", + "code":"290" }, { "desc":"This statement is used to nest queries in the subquery.All is used to return repeated rows. By default, all repeated rows are returned. It is followed by asterisks (*) on", @@ -1166,8 +2615,8 @@ "title":"Multi-Layer Nested Subquery", "uri":"dli_08_0178.html", "doc_type":"sqlreference", - "p_code":"126", - "code":"130" + "p_code":"287", + "code":"291" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1176,7 +2625,7 @@ "uri":"dli_08_0179.html", "doc_type":"sqlreference", "p_code":"1", - "code":"131" + "code":"292" }, { "desc":"This statement is used to specify an alias for a table or the subquery result.table_reference: Can be a table, view, or subquery.As: Is used to connect to table_reference", @@ -1184,8 +2633,8 @@ "title":"AS for Table", "uri":"dli_08_0180.html", "doc_type":"sqlreference", - "p_code":"131", - "code":"132" + "p_code":"292", + "code":"293" }, { "desc":"This statement is used to specify an alias for a column.alias: gives an alias for the attr_expr field.AS: Whether to add AS does not affect the result.The to-be-queried t", @@ -1193,8 +2642,8 @@ "title":"AS for Column", "uri":"dli_08_0181.html", "doc_type":"sqlreference", - "p_code":"131", - "code":"133" + "p_code":"292", + "code":"294" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1203,7 +2652,7 @@ "uri":"dli_08_0182.html", "doc_type":"sqlreference", "p_code":"1", - "code":"134" + "code":"295" }, { "desc":"This statement is used to return the union set of multiple query results.UNION: The set operation is used to join the head and tail of a table based on certain conditions", @@ -1211,8 +2660,8 @@ "title":"UNION", "uri":"dli_08_0183.html", "doc_type":"sqlreference", - "p_code":"134", - "code":"135" + "p_code":"295", + "code":"296" }, { "desc":"This statement is used to return the intersection set of multiple query results.INTERSECT returns the intersection of multiple query results. The number of columns return", @@ -1220,8 +2669,8 @@ "title":"INTERSECT", "uri":"dli_08_0184.html", "doc_type":"sqlreference", - "p_code":"134", - "code":"136" + "p_code":"295", + "code":"297" }, { "desc":"This statement is used to return the difference set of two query results.EXCEPT minus the sets. A EXCEPT B indicates to remove the records that exist in both A and B from", @@ -1229,8 +2678,8 @@ "title":"EXCEPT", "uri":"dli_08_0185.html", "doc_type":"sqlreference", - "p_code":"134", - "code":"137" + "p_code":"295", + "code":"298" }, { "desc":"This statement is used to define the common table expression (CTE) using WITH...AS to simplify the query and make the result easier to read and maintain.cte_name: Name of", @@ -1239,7 +2688,7 @@ "uri":"dli_08_0186.html", "doc_type":"sqlreference", "p_code":"1", - "code":"138" + "code":"299" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1248,7 +2697,7 @@ "uri":"dli_08_0187.html", "doc_type":"sqlreference", "p_code":"1", - "code":"139" + "code":"300" }, { "desc":"This statement is used to display result_expression according to the joined results of input_expression and when_expression.CASE: Subquery is supported in basic CASE stat", @@ -1256,8 +2705,8 @@ "title":"Basic CASE Statement", "uri":"dli_08_0188.html", "doc_type":"sqlreference", - "p_code":"139", - "code":"140" + "p_code":"300", + "code":"301" }, { "desc":"This statement is used to obtain the value of boolean_expression for each WHEN statement in a specified order. Then return the first result_expression with the value TRUE", @@ -1265,8 +2714,8 @@ "title":"CASE Query Statement", "uri":"dli_08_0189.html", "doc_type":"sqlreference", - "p_code":"139", - "code":"141" + "p_code":"300", + "code":"302" }, { "desc":"This statement is used together with the window function. The OVER statement is used to group data and sort the data within the group. The window function is used to gene", @@ -1275,16 +2724,16 @@ "uri":"dli_08_0190.html", "doc_type":"sqlreference", "p_code":"1", - "code":"142" + "code":"303" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", "product_code":"dli", - "title":"Flink Opensource SQL 1.12 Syntax Reference", + "title":"Flink OpenSource SQL 1.12 Syntax Reference", "uri":"dli_08_0370.html", "doc_type":"sqlreference", "p_code":"", - "code":"143" + "code":"304" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1292,8 +2741,8 @@ "title":"Constraints and Definitions", "uri":"dli_08_0371.html", "doc_type":"sqlreference", - "p_code":"143", - "code":"144" + "p_code":"304", + "code":"305" }, { "desc":"STRING, BOOLEAN, BYTES, DECIMAL, TINYINT, SMALLINT, INTEGER, BIGINT, FLOAT, DOUBLE, DATE, TIME, TIMESTAMP, TIMESTAMP WITH LOCAL TIME ZONE, INTERVAL, ARRAY, MULTISET, MAP,", @@ -1301,8 +2750,8 @@ "title":"Supported Data Types", "uri":"dli_08_0372.html", "doc_type":"sqlreference", - "p_code":"144", - "code":"145" + "p_code":"305", + "code":"306" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1310,8 +2759,8 @@ "title":"Syntax", "uri":"dli_08_0373.html", "doc_type":"sqlreference", - "p_code":"144", - "code":"146" + "p_code":"305", + "code":"307" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1319,8 +2768,8 @@ "title":"Data Definition Language (DDL)", "uri":"dli_08_0374.html", "doc_type":"sqlreference", - "p_code":"146", - "code":"147" + "p_code":"307", + "code":"308" }, { "desc":"Create a table with a specified name.COMPUTED COLUMNA computed column is a virtual column generated using column_name AS computed_column_expression. A computed column eva", @@ -1328,8 +2777,8 @@ "title":"CREATE TABLE", "uri":"dli_08_0375.html", "doc_type":"sqlreference", - "p_code":"147", - "code":"148" + "p_code":"308", + "code":"309" }, { "desc":"Create a view with multiple layers nested in it to simplify the development process.IF NOT EXISTSIf the view already exists, nothing happens.Create a view named viewName.", @@ -1337,8 +2786,8 @@ "title":"CREATE VIEW", "uri":"dli_08_0376.html", "doc_type":"sqlreference", - "p_code":"147", - "code":"149" + "p_code":"308", + "code":"310" }, { "desc":"Create a user-defined function.For details about how to create a user-defined function, see User-Defined Functions (UDFs).IF NOT EXISTSIf the function already exists, not", @@ -1346,17 +2795,17 @@ "title":"CREATE FUNCTION", "uri":"dli_08_0377.html", "doc_type":"sqlreference", - "p_code":"147", - "code":"150" + "p_code":"308", + "code":"311" }, { - "desc":"SyntaxPrecautionsFlink SQL uses a lexical policy for identifier (table, attribute, function names) similar to Java:The case of identifiers is preserved whether or not the", + "desc":"SyntaxPrecautionsFlink SQL uses a lexical policy for identifier (table, attribute, function names) similar to Java:The case of identifiers is preserved whether they are q", "product_code":"dli", "title":"Data Manipulation Language (DML)", "uri":"dli_08_0378.html", "doc_type":"sqlreference", - "p_code":"146", - "code":"151" + "p_code":"307", + "code":"312" }, { "desc":"This section describes the Flink open source SQL 1.12 syntax supported by DLI. For details about the parameters and examples, see the syntax description.", @@ -1364,8 +2813,8 @@ "title":"Overview", "uri":"dli_08_0379.html", "doc_type":"sqlreference", - "p_code":"143", - "code":"152" + "p_code":"304", + "code":"313" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1373,8 +2822,8 @@ "title":"DDL Syntax", "uri":"dli_08_0380.html", "doc_type":"sqlreference", - "p_code":"143", - "code":"153" + "p_code":"304", + "code":"314" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1382,8 +2831,8 @@ "title":"Creating Source Tables", "uri":"dli_08_0381.html", "doc_type":"sqlreference", - "p_code":"153", - "code":"154" + "p_code":"314", + "code":"315" }, { "desc":"DataGen is used to generate random data for debugging and testing.NoneWhen you create a DataGen table, the table field type cannot be Array, Map, or Row. You can use COMP", @@ -1391,8 +2840,8 @@ "title":"DataGen Source Table", "uri":"dli_08_0382.html", "doc_type":"sqlreference", - "p_code":"154", - "code":"155" + "p_code":"315", + "code":"316" }, { "desc":"DLI reads data of Flink jobs from GaussDB(DWS). GaussDB(DWS) database kernel is compliant with PostgreSQL. The PostgreSQL database can store data of more complex types an", @@ -1400,8 +2849,8 @@ "title":"GaussDB(DWS) Source Table", "uri":"dli_08_0383.html", "doc_type":"sqlreference", - "p_code":"154", - "code":"156" + "p_code":"315", + "code":"317" }, { "desc":"Create a source stream to obtain data from HBase as input for jobs. HBase is a column-oriented distributed cloud storage system that features enhanced reliability, excell", @@ -1409,8 +2858,8 @@ "title":"HBase Source Table", "uri":"dli_08_0384.html", "doc_type":"sqlreference", - "p_code":"154", - "code":"157" + "p_code":"315", + "code":"318" }, { "desc":"The JDBC connector is a Flink's built-in connector to read data from a database.An enhanced datasource connection with the instances has been established, so that you can", @@ -1418,8 +2867,8 @@ "title":"JDBC Source Table", "uri":"dli_08_0385.html", "doc_type":"sqlreference", - "p_code":"154", - "code":"158" + "p_code":"315", + "code":"319" }, { "desc":"Create a source stream to obtain data from Kafka as input data for jobs.Apache Kafka is a fast, scalable, and fault-tolerant distributed message publishing and subscripti", @@ -1427,8 +2876,8 @@ "title":"Kafka Source Table", "uri":"dli_08_0386.html", "doc_type":"sqlreference", - "p_code":"154", - "code":"159" + "p_code":"315", + "code":"320" }, { "desc":"The MySQL CDC source table, that is, the MySQL streaming source table, reads all historical data in the database first and then smoothly switches data read to the Binlog ", @@ -1436,8 +2885,8 @@ "title":"MySQL CDC Source Table", "uri":"dli_08_0387.html", "doc_type":"sqlreference", - "p_code":"154", - "code":"160" + "p_code":"315", + "code":"321" }, { "desc":"The Postgres CDC source table, that is, Postgres streaming source table, is used to read the full snapshot data and changed data of the PostgreSQL database in sequence. T", @@ -1445,8 +2894,8 @@ "title":"Postgres CDC Source Table", "uri":"dli_08_0388.html", "doc_type":"sqlreference", - "p_code":"154", - "code":"161" + "p_code":"315", + "code":"322" }, { "desc":"Create a source stream to obtain data from Redis as input for jobs.An enhanced datasource connection has been created for DLI to connect to the Redis database, so that yo", @@ -1454,8 +2903,8 @@ "title":"Redis Source Table", "uri":"dli_08_0389.html", "doc_type":"sqlreference", - "p_code":"154", - "code":"162" + "p_code":"315", + "code":"323" }, { "desc":"Apache Kafka is a fast, scalable, and fault-tolerant distributed message publishing and subscription system. It delivers high throughput and built-in partitions and provi", @@ -1463,8 +2912,8 @@ "title":"Upsert Kafka Source Table", "uri":"dli_08_0390.html", "doc_type":"sqlreference", - "p_code":"154", - "code":"163" + "p_code":"315", + "code":"324" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1472,8 +2921,8 @@ "title":"Creating Result Tables", "uri":"dli_08_0391.html", "doc_type":"sqlreference", - "p_code":"153", - "code":"164" + "p_code":"314", + "code":"325" }, { "desc":"The BlackHole connector allows for swallowing all input records. It is designed for high-performance testing and UDF output. It is not a substantive sink. The BlackHole r", @@ -1481,8 +2930,8 @@ "title":"BlackHole Result Table", "uri":"dli_08_0392.html", "doc_type":"sqlreference", - "p_code":"164", - "code":"165" + "p_code":"325", + "code":"326" }, { "desc":"DLI can output Flink job data to the ClickHouse database. ClickHouse is a column-based database oriented to online analysis and processing. It supports SQL query and prov", @@ -1490,8 +2939,8 @@ "title":"ClickHouse Result Table", "uri":"dli_08_0393.html", "doc_type":"sqlreference", - "p_code":"164", - "code":"166" + "p_code":"325", + "code":"327" }, { "desc":"DLI outputs the Flink job output data to GaussDB(DWS). GaussDB(DWS) database kernel is compliant with PostgreSQL. The PostgreSQL database can store data of more complex t", @@ -1499,8 +2948,8 @@ "title":"GaussDB(DWS) Result Table", "uri":"dli_08_0394.html", "doc_type":"sqlreference", - "p_code":"164", - "code":"167" + "p_code":"325", + "code":"328" }, { "desc":"DLI outputs Flink job output data to Elasticsearch of Cloud Search Service (CSS). Elasticsearch is a popular enterprise-class Lucene-powered search server and provides th", @@ -1508,8 +2957,8 @@ "title":"Elasticsearch Result Table", "uri":"dli_08_0395.html", "doc_type":"sqlreference", - "p_code":"164", - "code":"168" + "p_code":"325", + "code":"329" }, { "desc":"DLI outputs the job data to HBase. HBase is a column-oriented distributed cloud storage system that features enhanced reliability, excellent performance, and elastic scal", @@ -1517,8 +2966,8 @@ "title":"HBase Result Table", "uri":"dli_08_0396.html", "doc_type":"sqlreference", - "p_code":"164", - "code":"169" + "p_code":"325", + "code":"330" }, { "desc":"DLI outputs the Flink job output data to RDS through the JDBC result table.An enhanced datasource connection with the instances has been established, so that you can conf", @@ -1526,8 +2975,8 @@ "title":"JDBC Result Table", "uri":"dli_08_0397.html", "doc_type":"sqlreference", - "p_code":"164", - "code":"170" + "p_code":"325", + "code":"331" }, { "desc":"DLI outputs the Flink job output data to Kafka through the Kafka result table.Apache Kafka is a fast, scalable, and fault-tolerant distributed message publishing and subs", @@ -1535,8 +2984,8 @@ "title":"Kafka Result Table", "uri":"dli_08_0398.html", "doc_type":"sqlreference", - "p_code":"164", - "code":"171" + "p_code":"325", + "code":"332" }, { "desc":"The Print connector is used to print output data to the error file or TaskManager file, making it easier for you to view the result in code debugging.NoneThe Print result", @@ -1544,8 +2993,8 @@ "title":"Print Result Table", "uri":"dli_08_0399.html", "doc_type":"sqlreference", - "p_code":"164", - "code":"172" + "p_code":"325", + "code":"333" }, { "desc":"DLI outputs the Flink job output data to Redis. Redis is a key-value storage system that supports multiple types of data structures. It can be used in scenarios such as c", @@ -1553,8 +3002,8 @@ "title":"Redis Result Table", "uri":"dli_08_0400.html", "doc_type":"sqlreference", - "p_code":"164", - "code":"173" + "p_code":"325", + "code":"334" }, { "desc":"Apache Kafka is a fast, scalable, and fault-tolerant distributed message publishing and subscription system. It delivers high throughput and built-in partitions and provi", @@ -1562,8 +3011,17 @@ "title":"Upsert Kafka Result Table", "uri":"dli_08_0401.html", "doc_type":"sqlreference", - "p_code":"164", - "code":"174" + "p_code":"325", + "code":"335" + }, + { + "desc":"The FileSystem result (sink) table is used to export data to the HDFS or OBS file system. It is applicable to scenarios such as data dumping, big data analysis, data back", + "product_code":"dli", + "title":"FileSystem Result Table", + "uri":"dli_08_0439.html", + "doc_type":"sqlreference", + "p_code":"325", + "code":"336" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1571,8 +3029,8 @@ "title":"Creating Dimension Tables", "uri":"dli_08_0402.html", "doc_type":"sqlreference", - "p_code":"153", - "code":"175" + "p_code":"314", + "code":"337" }, { "desc":"Create a GaussDB(DWS) table to connect to source streams for wide table generation.Ensure that you have created a GaussDB(DWS) cluster using your account.A DWS database t", @@ -1580,8 +3038,8 @@ "title":"GaussDB(DWS) Dimension Table", "uri":"dli_08_0403.html", "doc_type":"sqlreference", - "p_code":"175", - "code":"176" + "p_code":"337", + "code":"338" }, { "desc":"Create a Hbase dimension table to connect to the source streams for wide table generation.An enhanced datasource connection has been created for DLI to connect to HBase, ", @@ -1589,8 +3047,8 @@ "title":"HBase Dimension Table", "uri":"dli_08_0404.html", "doc_type":"sqlreference", - "p_code":"175", - "code":"177" + "p_code":"337", + "code":"339" }, { "desc":"Create a JDBC dimension table to connect to the source stream.You have created a JDBC instance for your account.When you create a Flink OpenSource SQL job, set Flink Vers", @@ -1598,8 +3056,8 @@ "title":"JDBC Dimension Table", "uri":"dli_08_0405.html", "doc_type":"sqlreference", - "p_code":"175", - "code":"178" + "p_code":"337", + "code":"340" }, { "desc":"Create a Redis table to connect to source streams for wide table generation.An enhanced datasource connection with Redis has been established, so that you can configure s", @@ -1607,8 +3065,8 @@ "title":"Redis Dimension Table", "uri":"dli_08_0406.html", "doc_type":"sqlreference", - "p_code":"175", - "code":"179" + "p_code":"337", + "code":"341" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1616,8 +3074,8 @@ "title":"Format", "uri":"dli_08_0407.html", "doc_type":"sqlreference", - "p_code":"153", - "code":"180" + "p_code":"314", + "code":"342" }, { "desc":"Apache Avro is supported for you to read and write Avro data based on an Avro schema with Flink. The Avro schema is derived from the table schema.KafkaUpsert KafkaCurrent", @@ -1625,8 +3083,8 @@ "title":"Avro", "uri":"dli_08_0408.html", "doc_type":"sqlreference", - "p_code":"180", - "code":"181" + "p_code":"342", + "code":"343" }, { "desc":"Canal is a Changelog Data Capture (CDC) tool that can stream changes in real-time from MySQL into other systems. Canal provides a unified format schema for changelog and ", @@ -1634,8 +3092,8 @@ "title":"Canal", "uri":"dli_08_0409.html", "doc_type":"sqlreference", - "p_code":"180", - "code":"182" + "p_code":"342", + "code":"344" }, { "desc":"The Avro Schema Registry (avro-confluent) format allows you to read records that were serialized by the io.confluent.kafka.serializers.KafkaAvroSerializer and to write re", @@ -1643,8 +3101,8 @@ "title":"Confluent Avro", "uri":"dli_08_0410.html", "doc_type":"sqlreference", - "p_code":"180", - "code":"183" + "p_code":"342", + "code":"345" }, { "desc":"The CSV format allows you to read and write CSV data based on a CSV schema. Currently, the CSV schema is derived from table schema.KafkaUpsert KafkaUse Kafka to send data", @@ -1652,8 +3110,8 @@ "title":"CSV", "uri":"dli_08_0411.html", "doc_type":"sqlreference", - "p_code":"180", - "code":"184" + "p_code":"342", + "code":"346" }, { "desc":"Debezium is a Changelog Data Capture (CDC) tool that can stream changes in real-time from other databases into Kafka. Debezium provides a unified format schema for change", @@ -1661,8 +3119,8 @@ "title":"Debezium", "uri":"dli_08_0412.html", "doc_type":"sqlreference", - "p_code":"180", - "code":"185" + "p_code":"342", + "code":"347" }, { "desc":"The JSON format allows you to read and write JSON data based on a JSON schema. Currently, the JSON schema is derived from table schema.KafkaUpsert KafkaElasticsearchIn th", @@ -1670,8 +3128,8 @@ "title":"JSON", "uri":"dli_08_0413.html", "doc_type":"sqlreference", - "p_code":"180", - "code":"186" + "p_code":"342", + "code":"348" }, { "desc":"Flink supports to interpret Maxwell JSON messages as INSERT/UPDATE/DELETE messages into Flink SQL system. This is useful in many cases to leverage this feature,such as:Sy", @@ -1679,8 +3137,8 @@ "title":"Maxwell", "uri":"dli_08_0414.html", "doc_type":"sqlreference", - "p_code":"180", - "code":"187" + "p_code":"342", + "code":"349" }, { "desc":"The raw format allows you to read and write raw (byte based) values as a single column.Note: This format encodes null values as null of the byte[] type. This may have lim", @@ -1688,8 +3146,8 @@ "title":"Raw", "uri":"dli_08_0415.html", "doc_type":"sqlreference", - "p_code":"180", - "code":"188" + "p_code":"342", + "code":"350" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1697,8 +3155,8 @@ "title":"DML Snytax", "uri":"dli_08_0416.html", "doc_type":"sqlreference", - "p_code":"143", - "code":"189" + "p_code":"304", + "code":"351" }, { "desc":"SyntaxDescriptionSELECT is used to select data from a table.ALL indicates that all results are returned.DISTINCT indicates that the duplicated results are removed.Precaut", @@ -1706,8 +3164,8 @@ "title":"SELECT", "uri":"dli_08_0417.html", "doc_type":"sqlreference", - "p_code":"189", - "code":"190" + "p_code":"351", + "code":"352" }, { "desc":"SyntaxDescriptionUNION is used to return the union set of multiple query results.INTERSECT is used to return the intersection of multiple query results.EXCEPT is used to ", @@ -1715,8 +3173,8 @@ "title":"Set Operations", "uri":"dli_08_0418.html", "doc_type":"sqlreference", - "p_code":"189", - "code":"191" + "p_code":"351", + "code":"353" }, { "desc":"DescriptionGroup Window is defined in GROUP BY. One record is generated from each group. Group Window involves the following functions:Array functionsArray functionsGroup", @@ -1724,8 +3182,8 @@ "title":"Window", "uri":"dli_08_0419.html", "doc_type":"sqlreference", - "p_code":"189", - "code":"192" + "p_code":"351", + "code":"354" }, { "desc":"SyntaxPrecautionsCurrently, only equi-joins are supported, for example, joins that have at least one conjunctive condition with an equality predicate. Arbitrary cross or ", @@ -1733,8 +3191,8 @@ "title":"JOIN", "uri":"dli_08_0420.html", "doc_type":"sqlreference", - "p_code":"189", - "code":"193" + "p_code":"351", + "code":"355" }, { "desc":"FunctionThis clause is used to sort data in ascending order on a time attribute.PrecautionsCurrently, only sorting by time attribute is supported.ExampleSort data in asce", @@ -1742,8 +3200,8 @@ "title":"OrderBy & Limit", "uri":"dli_08_0421.html", "doc_type":"sqlreference", - "p_code":"189", - "code":"194" + "p_code":"351", + "code":"356" }, { "desc":"Top-N queries ask for the N smallest or largest values ordered by columns. Both smallest and largest values sets are considered Top-N queries. Top-N queries are useful in", @@ -1751,8 +3209,8 @@ "title":"Top-N", "uri":"dli_08_0422.html", "doc_type":"sqlreference", - "p_code":"189", - "code":"195" + "p_code":"351", + "code":"357" }, { "desc":"Deduplication removes rows that duplicate over a set of columns, keeping only the first one or the last one.ROW_NUMBER(): Assigns a unique, sequential number to each row,", @@ -1760,8 +3218,8 @@ "title":"Deduplication", "uri":"dli_08_0423.html", "doc_type":"sqlreference", - "p_code":"189", - "code":"196" + "p_code":"351", + "code":"358" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1769,8 +3227,8 @@ "title":"Functions", "uri":"dli_08_0424.html", "doc_type":"sqlreference", - "p_code":"143", - "code":"197" + "p_code":"304", + "code":"359" }, { "desc":"DLI supports the following three types of user-defined functions (UDFs):Regular UDF: takes in one or more input parameters and returns a single result.User-defined table-", @@ -1778,8 +3236,8 @@ "title":"User-Defined Functions (UDFs)", "uri":"dli_08_0425.html", "doc_type":"sqlreference", - "p_code":"197", - "code":"198" + "p_code":"359", + "code":"360" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1787,8 +3245,8 @@ "title":"Built-In Functions", "uri":"dli_08_0426.html", "doc_type":"sqlreference", - "p_code":"197", - "code":"199" + "p_code":"359", + "code":"361" }, { "desc":"All data types can be compared by using relational operators and the result is returned as a BOOLEAN value.Relationship operators are binary operators. Two compared data ", @@ -1796,8 +3254,8 @@ "title":"Mathematical Operation Functions", "uri":"dli_08_0427.html", "doc_type":"sqlreference", - "p_code":"199", - "code":"200" + "p_code":"361", + "code":"362" }, { "desc":"SyntaxExampleTest input data.Test the data source kafka. The message content is as follows:{name:James,age:24,sex:male,grade:{math:95,science:[80,85],english:100}}\n{name:", @@ -1805,8 +3263,8 @@ "title":"String Functions", "uri":"dli_08_0428.html", "doc_type":"sqlreference", - "p_code":"199", - "code":"201" + "p_code":"361", + "code":"363" }, { "desc":"Table 1 lists the time functions supported by Flink OpenSource SQL.FunctionReturns a SQL date parsed from string in form of yyyy-MM-dd.Returns a SQL date parsed from stri", @@ -1814,8 +3272,8 @@ "title":"Temporal Functions", "uri":"dli_08_0429.html", "doc_type":"sqlreference", - "p_code":"199", - "code":"202" + "p_code":"361", + "code":"364" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1823,17 +3281,17 @@ "title":"Conditional Functions", "uri":"dli_08_0430.html", "doc_type":"sqlreference", - "p_code":"199", - "code":"203" + "p_code":"361", + "code":"365" }, { - "desc":"This function is used to forcibly convert types.If the input is NULL, NULL is returned.The following example converts the amount value to an integer.Flink jobs do not sup", + "desc":"This function is used to forcibly convert types.If the input is NULL, NULL is returned.The cast function does not support converting a string to the JSON format.The follo", "product_code":"dli", "title":"Type Conversion Functions", "uri":"dli_08_0431.html", "doc_type":"sqlreference", - "p_code":"199", - "code":"204" + "p_code":"361", + "code":"366" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1841,8 +3299,8 @@ "title":"Collection Functions", "uri":"dli_08_0432.html", "doc_type":"sqlreference", - "p_code":"199", - "code":"205" + "p_code":"361", + "code":"367" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1850,8 +3308,8 @@ "title":"Value Construction Functions", "uri":"dli_08_0433.html", "doc_type":"sqlreference", - "p_code":"199", - "code":"206" + "p_code":"361", + "code":"368" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1859,8 +3317,8 @@ "title":"Value Access Functions", "uri":"dli_08_0434.html", "doc_type":"sqlreference", - "p_code":"199", - "code":"207" + "p_code":"361", + "code":"369" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1868,8 +3326,8 @@ "title":"Hash Functions", "uri":"dli_08_0435.html", "doc_type":"sqlreference", - "p_code":"199", - "code":"208" + "p_code":"361", + "code":"370" }, { "desc":"An aggregate function performs a calculation operation on a set of input values and returns a value. For example, the COUNT function counts the number of rows retrieved b", @@ -1877,8 +3335,8 @@ "title":"Aggregate Functions", "uri":"dli_08_0436.html", "doc_type":"sqlreference", - "p_code":"199", - "code":"209" + "p_code":"361", + "code":"371" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1886,8 +3344,8 @@ "title":"Table-Valued Functions", "uri":"dli_08_0437.html", "doc_type":"sqlreference", - "p_code":"199", - "code":"210" + "p_code":"361", + "code":"372" }, { "desc":"The string_split function splits a target string into substrings based on the specified separator and returns a substring list.Create a Flink OpenSource SQL job by referr", @@ -1895,8 +3353,8 @@ "title":"string_split", "uri":"dli_08_0438.html", "doc_type":"sqlreference", - "p_code":"210", - "code":"211" + "p_code":"372", + "code":"373" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1905,7 +3363,7 @@ "uri":"dli_08_0289.html", "doc_type":"sqlreference", "p_code":"", - "code":"212" + "code":"374" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1913,8 +3371,8 @@ "title":"Constraints and Definitions", "uri":"dli_08_0290.html", "doc_type":"sqlreference", - "p_code":"212", - "code":"213" + "p_code":"374", + "code":"375" }, { "desc":"STRING, BOOLEAN, BYTES, DECIMAL, TINYINT, SMALLINT, INTEGER, BIGINT, FLOAT, DOUBLE, DATE, TIME, TIMESTAMP, TIMESTAMP WITH LOCAL TIME ZONE, INTERVAL, ARRAY, MULTISET, MAP,", @@ -1922,8 +3380,8 @@ "title":"Supported Data Types", "uri":"dli_08_0291.html", "doc_type":"sqlreference", - "p_code":"213", - "code":"214" + "p_code":"375", + "code":"376" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1931,8 +3389,8 @@ "title":"Syntax Definition", "uri":"dli_08_0292.html", "doc_type":"sqlreference", - "p_code":"213", - "code":"215" + "p_code":"375", + "code":"377" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1940,8 +3398,8 @@ "title":"Data Definition Language (DDL)", "uri":"dli_08_0293.html", "doc_type":"sqlreference", - "p_code":"215", - "code":"216" + "p_code":"377", + "code":"378" }, { "desc":"This clause is used to create a table with a specified name.COMPUTED COLUMNA computed column is a virtual column generated using column_name AS computed_column_expression", @@ -1949,8 +3407,8 @@ "title":"CREATE TABLE", "uri":"dli_08_0294.html", "doc_type":"sqlreference", - "p_code":"216", - "code":"217" + "p_code":"378", + "code":"379" }, { "desc":"Create a view with multiple layers nested in it to simplify the development process.IF NOT EXISTSIf the view already exists, nothing happens.Create a view named viewName.", @@ -1958,8 +3416,8 @@ "title":"CREATE VIEW", "uri":"dli_08_0295.html", "doc_type":"sqlreference", - "p_code":"216", - "code":"218" + "p_code":"378", + "code":"380" }, { "desc":"Create a user-defined function.IF NOT EXISTSIf the function already exists, nothing happens.LANGUAGE JAVA|SCALALanguage tag is used to instruct Flink runtime how to execu", @@ -1967,8 +3425,8 @@ "title":"CREATE FUNCTION", "uri":"dli_08_0296.html", "doc_type":"sqlreference", - "p_code":"216", - "code":"219" + "p_code":"378", + "code":"381" }, { "desc":"SyntaxPrecautionsFlink SQL uses a lexical policy for identifier (table, attribute, function names) similar to Java:The case of identifiers is preserved whether they are q", @@ -1976,8 +3434,8 @@ "title":"Data Manipulation Language (DML)", "uri":"dli_08_0297.html", "doc_type":"sqlreference", - "p_code":"215", - "code":"220" + "p_code":"377", + "code":"382" }, { "desc":"This section describes the Flink OpenSource SQL syntax supported by DLI. For details about the parameters and examples, see the syntax description.", @@ -1985,8 +3443,8 @@ "title":"Flink OpenSource SQL 1.10 Syntax", "uri":"dli_08_0298.html", "doc_type":"sqlreference", - "p_code":"212", - "code":"221" + "p_code":"374", + "code":"383" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -1994,8 +3452,8 @@ "title":"Data Definition Language (DDL)", "uri":"dli_08_0299.html", "doc_type":"sqlreference", - "p_code":"212", - "code":"222" + "p_code":"374", + "code":"384" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2003,8 +3461,8 @@ "title":"Creating a Source Table", "uri":"dli_08_0300.html", "doc_type":"sqlreference", - "p_code":"222", - "code":"223" + "p_code":"384", + "code":"385" }, { "desc":"Create a source stream to obtain data from Kafka as input data for jobs.Apache Kafka is a fast, scalable, and fault-tolerant distributed message publishing and subscripti", @@ -2012,8 +3470,8 @@ "title":"Kafka Source Table", "uri":"dli_08_0301.html", "doc_type":"sqlreference", - "p_code":"223", - "code":"224" + "p_code":"385", + "code":"386" }, { "desc":"Create a source stream to read data from DIS. DIS accesses user data and Flink job reads data from the DIS stream as input data for jobs. Flink jobs can quickly remove da", @@ -2021,8 +3479,8 @@ "title":"DIS Source Table", "uri":"dli_08_0302.html", "doc_type":"sqlreference", - "p_code":"223", - "code":"225" + "p_code":"385", + "code":"387" }, { "desc":"The JDBC connector is a Flink's built-in connector to read data from a database.An enhanced datasource connection with the database has been established, so that you can ", @@ -2030,8 +3488,8 @@ "title":"JDBC Source Table", "uri":"dli_08_0303.html", "doc_type":"sqlreference", - "p_code":"223", - "code":"226" + "p_code":"385", + "code":"388" }, { "desc":"DLI reads data of Flink jobs from GaussDB(DWS). GaussDB(DWS) database kernel is compliant with PostgreSQL. The PostgreSQL database can store data of more complex types an", @@ -2039,8 +3497,8 @@ "title":"GaussDB(DWS) Source Table", "uri":"dli_08_0304.html", "doc_type":"sqlreference", - "p_code":"223", - "code":"227" + "p_code":"385", + "code":"389" }, { "desc":"Create a source stream to obtain data from Redis as input for jobs.An enhanced datasource connection with Redis has been established, so that you can configure security g", @@ -2048,8 +3506,8 @@ "title":"Redis Source Table", "uri":"dli_08_0305.html", "doc_type":"sqlreference", - "p_code":"223", - "code":"228" + "p_code":"385", + "code":"390" }, { "desc":"Create a source stream to obtain data from HBase as input for jobs. HBase is a column-oriented distributed cloud storage system that features enhanced reliability, excell", @@ -2057,8 +3515,8 @@ "title":"HBase Source Table", "uri":"dli_08_0306.html", "doc_type":"sqlreference", - "p_code":"223", - "code":"229" + "p_code":"385", + "code":"391" }, { "desc":"You can call APIs to obtain data from the cloud ecosystem or an open source ecosystem and use the obtained data as input of Flink jobs.The customized source class needs t", @@ -2066,8 +3524,8 @@ "title":"userDefined Source Table", "uri":"dli_08_0358.html", "doc_type":"sqlreference", - "p_code":"223", - "code":"230" + "p_code":"385", + "code":"392" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2075,8 +3533,8 @@ "title":"Creating a Result Table", "uri":"dli_08_0307.html", "doc_type":"sqlreference", - "p_code":"222", - "code":"231" + "p_code":"384", + "code":"393" }, { "desc":"DLI exports Flink job data to ClickHouse result tables.ClickHouse is a column-based database oriented to online analysis and processing. It supports SQL query and provide", @@ -2084,8 +3542,8 @@ "title":"ClickHouse Result Table", "uri":"dli_08_0344.html", "doc_type":"sqlreference", - "p_code":"231", - "code":"232" + "p_code":"393", + "code":"394" }, { "desc":"DLI exports the output data of the Flink job to Kafka.Apache Kafka is a fast, scalable, and fault-tolerant distributed message publishing and subscription system. It deli", @@ -2093,8 +3551,8 @@ "title":"Kafka Result Table", "uri":"dli_08_0308.html", "doc_type":"sqlreference", - "p_code":"231", - "code":"233" + "p_code":"393", + "code":"395" }, { "desc":"DLI exports the output data of the Flink job to Kafka in upsert mode.Apache Kafka is a fast, scalable, and fault-tolerant distributed message publishing and subscription ", @@ -2102,8 +3560,8 @@ "title":"Upsert Kafka Result Table", "uri":"dli_08_0309.html", "doc_type":"sqlreference", - "p_code":"231", - "code":"234" + "p_code":"393", + "code":"396" }, { "desc":"DLI writes the Flink job output data into DIS. The data is filtered and imported to the DIS stream for future processing.DIS addresses the challenge of transmitting data ", @@ -2111,8 +3569,8 @@ "title":"DIS Result Table", "uri":"dli_08_0310.html", "doc_type":"sqlreference", - "p_code":"231", - "code":"235" + "p_code":"393", + "code":"397" }, { "desc":"DLI exports the output data of the Flink job to RDS.An enhanced datasource connection with the database has been established, so that you can configure security group rul", @@ -2120,8 +3578,8 @@ "title":"JDBC Result Table", "uri":"dli_08_0311.html", "doc_type":"sqlreference", - "p_code":"231", - "code":"236" + "p_code":"393", + "code":"398" }, { "desc":"DLI outputs the Flink job output data to GaussDB(DWS). GaussDB(DWS) database kernel is compliant with PostgreSQL. The PostgreSQL database can store data of more complex t", @@ -2129,8 +3587,8 @@ "title":"GaussDB(DWS) Result Table", "uri":"dli_08_0312.html", "doc_type":"sqlreference", - "p_code":"231", - "code":"237" + "p_code":"393", + "code":"399" }, { "desc":"DLI exports the output data of the Flink job to Redis. Redis is a storage system that supports multiple types of data structures such as key-value. It can be used in scen", @@ -2138,8 +3596,8 @@ "title":"Redis Result Table", "uri":"dli_08_0313.html", "doc_type":"sqlreference", - "p_code":"231", - "code":"238" + "p_code":"393", + "code":"400" }, { "desc":"DLI exports Flink job output data to SMN.SMN provides reliable and flexible large-scale message notification services to DLI. It significantly simplifies system coupling ", @@ -2147,8 +3605,8 @@ "title":"SMN Result Table", "uri":"dli_08_0314.html", "doc_type":"sqlreference", - "p_code":"231", - "code":"239" + "p_code":"393", + "code":"401" }, { "desc":"DLI outputs the job data to HBase. HBase is a column-oriented distributed cloud storage system that features enhanced reliability, excellent performance, and elastic scal", @@ -2156,8 +3614,8 @@ "title":"HBase Result Table", "uri":"dli_08_0315.html", "doc_type":"sqlreference", - "p_code":"231", - "code":"240" + "p_code":"393", + "code":"402" }, { "desc":"DLI exports Flink job output data to Elasticsearch of Cloud Search Service (CSS). Elasticsearch is a popular enterprise-class Lucene-powered search server and provides th", @@ -2165,8 +3623,8 @@ "title":"Elasticsearch Result Table", "uri":"dli_08_0316.html", "doc_type":"sqlreference", - "p_code":"231", - "code":"241" + "p_code":"393", + "code":"403" }, { "desc":"OpenTSDB is a distributed, scalable time series database based on HBase. OpenTSDB is designed to collect monitoring information of a large-scale cluster and query data in", @@ -2174,8 +3632,8 @@ "title":"OpenTSDB Result Table", "uri":"dli_08_0348.html", "doc_type":"sqlreference", - "p_code":"231", - "code":"242" + "p_code":"393", + "code":"404" }, { "desc":"Write your Java code to insert the processed data into a specified database supported by your cloud service.Implement the custom sink class :The custom sink class is inhe", @@ -2183,8 +3641,8 @@ "title":"User-defined Result Table", "uri":"dli_08_0347.html", "doc_type":"sqlreference", - "p_code":"231", - "code":"243" + "p_code":"393", + "code":"405" }, { "desc":"The print connector exports your data output to the error file or the out file of TaskManager. It is mainly used for code debugging and output viewing.Read data from Kafk", @@ -2192,8 +3650,8 @@ "title":"Print Result Table", "uri":"dli_08_0345.html", "doc_type":"sqlreference", - "p_code":"231", - "code":"244" + "p_code":"393", + "code":"406" }, { "desc":"You can create a file system result table to export data to a file system such as HDFS or OBS. After the data is generated, a non-DLI table can be created directly accord", @@ -2201,8 +3659,8 @@ "title":"File System Result Table", "uri":"dli_08_0346.html", "doc_type":"sqlreference", - "p_code":"231", - "code":"245" + "p_code":"393", + "code":"407" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2210,8 +3668,8 @@ "title":"Creating a Dimension Table", "uri":"dli_08_0317.html", "doc_type":"sqlreference", - "p_code":"222", - "code":"246" + "p_code":"384", + "code":"408" }, { "desc":"Create a JDBC dimension table to connect to the source stream.You have created a JDBC instance for your account.The RDS table is used to connect to the source stream.CREA", @@ -2219,8 +3677,8 @@ "title":"JDBC Dimension Table", "uri":"dli_08_0318.html", "doc_type":"sqlreference", - "p_code":"246", - "code":"247" + "p_code":"408", + "code":"409" }, { "desc":"Create a GaussDB(DWS) dimension table to connect to the input stream.You have created a GaussDB(DWS) instance for your account.Use an RDS table to connect to the source s", @@ -2228,8 +3686,8 @@ "title":"GaussDB(DWS) Dimension Table", "uri":"dli_08_0319.html", "doc_type":"sqlreference", - "p_code":"246", - "code":"248" + "p_code":"408", + "code":"410" }, { "desc":"Create a Hbase dimension table to connect to the source stream.An enhanced datasource connection has been created for DLI to connect to HBase, so that jobs can run on the", @@ -2237,8 +3695,8 @@ "title":"HBase Dimension Table", "uri":"dli_08_0320.html", "doc_type":"sqlreference", - "p_code":"246", - "code":"249" + "p_code":"408", + "code":"411" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2246,8 +3704,8 @@ "title":"Data Manipulation Language (DML)", "uri":"dli_08_0321.html", "doc_type":"sqlreference", - "p_code":"212", - "code":"250" + "p_code":"374", + "code":"412" }, { "desc":"SyntaxDescriptionThis clause is used to select data from a table.ALL indicates that all results are returned.DISTINCT indicates that the duplicated results are removed.Pr", @@ -2255,8 +3713,8 @@ "title":"SELECT", "uri":"dli_08_0322.html", "doc_type":"sqlreference", - "p_code":"250", - "code":"251" + "p_code":"412", + "code":"413" }, { "desc":"SyntaxDescriptionUNION is used to return the union set of multiple query results.INTERSECT is used to return the intersection of multiple query results.EXCEPT is used to ", @@ -2264,8 +3722,8 @@ "title":"Set Operations", "uri":"dli_08_0323.html", "doc_type":"sqlreference", - "p_code":"250", - "code":"252" + "p_code":"412", + "code":"414" }, { "desc":"DescriptionGroup Window is defined in GROUP BY. One record is generated from each group. Group Window involves the following functions:Array functionsArray functionsGroup", @@ -2273,8 +3731,8 @@ "title":"Window", "uri":"dli_08_0324.html", "doc_type":"sqlreference", - "p_code":"250", - "code":"253" + "p_code":"412", + "code":"415" }, { "desc":"SyntaxPrecautionsCurrently, only equi-joins are supported, for example, joins that have at least one conjunctive condition with an equality predicate. Arbitrary cross or ", @@ -2282,8 +3740,8 @@ "title":"JOIN", "uri":"dli_08_0325.html", "doc_type":"sqlreference", - "p_code":"250", - "code":"254" + "p_code":"412", + "code":"416" }, { "desc":"FunctionThis clause is used to sort data in ascending order on a time attribute.PrecautionsCurrently, only sorting by time attribute is supported.ExampleSort data in asce", @@ -2291,8 +3749,8 @@ "title":"OrderBy & Limit", "uri":"dli_08_0326.html", "doc_type":"sqlreference", - "p_code":"250", - "code":"255" + "p_code":"412", + "code":"417" }, { "desc":"Top-N queries ask for the N smallest or largest values ordered by columns. Both smallest and largest values sets are considered Top-N queries. Top-N queries are useful in", @@ -2300,8 +3758,8 @@ "title":"Top-N", "uri":"dli_08_0327.html", "doc_type":"sqlreference", - "p_code":"250", - "code":"256" + "p_code":"412", + "code":"418" }, { "desc":"Deduplication removes rows that duplicate over a set of columns, keeping only the first one or the last one.ROW_NUMBER(): Assigns a unique, sequential number to each row,", @@ -2309,8 +3767,8 @@ "title":"Deduplication", "uri":"dli_08_0328.html", "doc_type":"sqlreference", - "p_code":"250", - "code":"257" + "p_code":"412", + "code":"419" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2318,8 +3776,8 @@ "title":"Functions", "uri":"dli_08_0329.html", "doc_type":"sqlreference", - "p_code":"212", - "code":"258" + "p_code":"374", + "code":"420" }, { "desc":"DLI supports the following three types of user-defined functions (UDFs):Regular UDF: takes in one or more input parameters and returns a single result.User-defined table-", @@ -2327,8 +3785,8 @@ "title":"User-Defined Functions", "uri":"dli_08_0330.html", "doc_type":"sqlreference", - "p_code":"258", - "code":"259" + "p_code":"420", + "code":"421" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2336,8 +3794,8 @@ "title":"Built-In Functions", "uri":"dli_08_0331.html", "doc_type":"sqlreference", - "p_code":"258", - "code":"260" + "p_code":"420", + "code":"422" }, { "desc":"All data types can be compared by using relational operators and the result is returned as a BOOLEAN value.Relationship operators are binary operators. Two compared data ", @@ -2345,8 +3803,8 @@ "title":"Mathematical Operation Functions", "uri":"dli_08_0332.html", "doc_type":"sqlreference", - "p_code":"260", - "code":"261" + "p_code":"422", + "code":"423" }, { "desc":"SyntaxExampleTest input data.Test the data source kafka. The message content is as follows:\"{name:James,age:24,sex:male,grade:{math:95,science:[80,85],english:100}}\"\n\"{na", @@ -2354,8 +3812,8 @@ "title":"String Functions", "uri":"dli_08_0333.html", "doc_type":"sqlreference", - "p_code":"260", - "code":"262" + "p_code":"422", + "code":"424" }, { "desc":"Table 1 lists the temporal functions supported by Flink OpenSource SQL.FunctionReturns a date parsed from string in form of yyyy-MM-dd.Returns a date parsed from string i", @@ -2363,8 +3821,8 @@ "title":"Temporal Functions", "uri":"dli_08_0334.html", "doc_type":"sqlreference", - "p_code":"260", - "code":"263" + "p_code":"422", + "code":"425" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2372,8 +3830,8 @@ "title":"Conditional Functions", "uri":"dli_08_0335.html", "doc_type":"sqlreference", - "p_code":"260", - "code":"264" + "p_code":"422", + "code":"426" }, { "desc":"This function is used to forcibly convert types.If the input is NULL, NULL is returned.The following example converts the amount value to an integer.Flink jobs do not sup", @@ -2381,8 +3839,8 @@ "title":"Type Conversion Function", "uri":"dli_08_0336.html", "doc_type":"sqlreference", - "p_code":"260", - "code":"265" + "p_code":"422", + "code":"427" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2390,8 +3848,8 @@ "title":"Collection Functions", "uri":"dli_08_0337.html", "doc_type":"sqlreference", - "p_code":"260", - "code":"266" + "p_code":"422", + "code":"428" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2399,8 +3857,8 @@ "title":"Value Construction Functions", "uri":"dli_08_0338.html", "doc_type":"sqlreference", - "p_code":"260", - "code":"267" + "p_code":"422", + "code":"429" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2408,8 +3866,8 @@ "title":"Value Access Functions", "uri":"dli_08_0339.html", "doc_type":"sqlreference", - "p_code":"260", - "code":"268" + "p_code":"422", + "code":"430" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2417,8 +3875,8 @@ "title":"Hash Functions", "uri":"dli_08_0340.html", "doc_type":"sqlreference", - "p_code":"260", - "code":"269" + "p_code":"422", + "code":"431" }, { "desc":"An aggregate function performs a calculation operation on a set of input values and returns a value. For example, the COUNT function counts the number of rows retrieved b", @@ -2426,8 +3884,8 @@ "title":"Aggregate Function", "uri":"dli_08_0341.html", "doc_type":"sqlreference", - "p_code":"260", - "code":"270" + "p_code":"422", + "code":"432" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2435,8 +3893,8 @@ "title":"Table-Valued Functions", "uri":"dli_08_0342.html", "doc_type":"sqlreference", - "p_code":"260", - "code":"271" + "p_code":"422", + "code":"433" }, { "desc":"The split_cursor function can convert one row of records into multiple rows or convert one column of records into multiple columns. Table-valued functions can only be use", @@ -2444,8 +3902,8 @@ "title":"split_cursor", "uri":"dli_08_0357.html", "doc_type":"sqlreference", - "p_code":"271", - "code":"272" + "p_code":"433", + "code":"434" }, { "desc":"The string_split function splits a target string into substrings based on the specified separator and returns a substring list.Prepare test input data.Source table disSou", @@ -2453,8 +3911,8 @@ "title":"string_split", "uri":"dli_08_0356.html", "doc_type":"sqlreference", - "p_code":"271", - "code":"273" + "p_code":"433", + "code":"435" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2463,7 +3921,7 @@ "uri":"dli_08_0450.html", "doc_type":"sqlreference", "p_code":"", - "code":"274" + "code":"436" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2471,8 +3929,8 @@ "title":"Flink SQL Syntax", "uri":"dli_08_0233.html", "doc_type":"sqlreference", - "p_code":"274", - "code":"275" + "p_code":"436", + "code":"437" }, { "desc":"Currently, Flink SQL only supports the following operations: SELECT, FROM, WHERE, UNION, aggregation, window, JOIN between stream and table data, and JOIN between streams", @@ -2480,8 +3938,8 @@ "title":"SQL Syntax Constraints and Definitions", "uri":"dli_08_0075.html", "doc_type":"sqlreference", - "p_code":"275", - "code":"276" + "p_code":"437", + "code":"438" }, { "desc":"This section describes the Flink SQL syntax list provided by DLI. For details about the parameters and examples, see the syntax description.", @@ -2489,8 +3947,8 @@ "title":"SQL Syntax Overview of Stream Jobs", "uri":"dli_08_0275.html", "doc_type":"sqlreference", - "p_code":"275", - "code":"277" + "p_code":"437", + "code":"439" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2498,8 +3956,8 @@ "title":"Creating a Source Stream", "uri":"dli_08_0234.html", "doc_type":"sqlreference", - "p_code":"275", - "code":"278" + "p_code":"437", + "code":"440" }, { "desc":"Create a source stream to obtain data from HBase of CloudTable as input data of the job. HBase is a column-oriented distributed cloud storage system that features enhance", @@ -2507,8 +3965,8 @@ "title":"CloudTable HBase Source Stream", "uri":"dli_08_0237.html", "doc_type":"sqlreference", - "p_code":"278", - "code":"279" + "p_code":"440", + "code":"441" }, { "desc":"Create a source stream to read data from DIS. DIS accesses user data and Flink job reads data from the DIS stream as input data for jobs. Flink jobs can quickly remove da", @@ -2516,8 +3974,8 @@ "title":"DIS Source Stream", "uri":"dli_08_0235.html", "doc_type":"sqlreference", - "p_code":"278", - "code":"280" + "p_code":"440", + "code":"442" }, { "desc":"DMS (Distributed Message Service) is a message middleware service based on distributed, high-availability clustering technology. It provides reliable, scalable, fully man", @@ -2525,8 +3983,8 @@ "title":"DMS Source Stream", "uri":"dli_08_0270.html", "doc_type":"sqlreference", - "p_code":"278", - "code":"281" + "p_code":"440", + "code":"443" }, { "desc":"Create a source stream to obtain data from Kafka as input data for jobs.Apache Kafka is a fast, scalable, and fault-tolerant distributed message publishing and subscripti", @@ -2534,8 +3992,8 @@ "title":"MRS Kafka Source Stream", "uri":"dli_08_0238.html", "doc_type":"sqlreference", - "p_code":"278", - "code":"282" + "p_code":"440", + "code":"444" }, { "desc":"Create a source stream to obtain data from Kafka as input data for jobs.Apache Kafka is a fast, scalable, and fault-tolerant distributed message publishing and subscripti", @@ -2543,8 +4001,8 @@ "title":"Open-Source Kafka Source Stream", "uri":"dli_08_0239.html", "doc_type":"sqlreference", - "p_code":"278", - "code":"283" + "p_code":"440", + "code":"445" }, { "desc":"Create a source stream to obtain data from OBS. DLI reads data stored by users in OBS as input data for jobs. OBS applies to various scenarios, such as big data analysis,", @@ -2552,8 +4010,8 @@ "title":"OBS Source Stream", "uri":"dli_08_0236.html", "doc_type":"sqlreference", - "p_code":"278", - "code":"284" + "p_code":"440", + "code":"446" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2561,8 +4019,8 @@ "title":"Creating a Sink Stream", "uri":"dli_08_0240.html", "doc_type":"sqlreference", - "p_code":"275", - "code":"285" + "p_code":"437", + "code":"447" }, { "desc":"DLI exports the job output data to HBase of CloudTable. HBase is a column-oriented distributed cloud storage system that features enhanced reliability, excellent performa", @@ -2570,8 +4028,8 @@ "title":"CloudTable HBase Sink Stream", "uri":"dli_08_0243.html", "doc_type":"sqlreference", - "p_code":"285", - "code":"286" + "p_code":"447", + "code":"448" }, { "desc":"DLI exports the job output data to OpenTSDB of CloudTable. OpenTSDB is a distributed, scalable time series database based on HBase. It stores time series data. Time serie", @@ -2579,8 +4037,8 @@ "title":"CloudTable OpenTSDB Sink Stream", "uri":"dli_08_0244.html", "doc_type":"sqlreference", - "p_code":"285", - "code":"287" + "p_code":"447", + "code":"449" }, { "desc":"DLI exports the output data of the Flink job to OpenTSDB of MRS.OpenTSDB has been installed in the MRS cluster.In this scenario, jobs must run on the dedicated queue of D", @@ -2588,8 +4046,8 @@ "title":"MRS OpenTSDB Sink Stream", "uri":"dli_08_0286.html", "doc_type":"sqlreference", - "p_code":"285", - "code":"288" + "p_code":"447", + "code":"450" }, { "desc":"DLI exports Flink job output data to Elasticsearch of Cloud Search Service (CSS). Elasticsearch is a popular enterprise-class Lucene-powered search server and provides th", @@ -2597,8 +4055,8 @@ "title":"CSS Elasticsearch Sink Stream", "uri":"dli_08_0252.html", "doc_type":"sqlreference", - "p_code":"285", - "code":"289" + "p_code":"447", + "code":"451" }, { "desc":"DLI exports the Flink job output data to Redis of DCS. Redis is a storage system that supports multiple types of data structures such as key-value. It can be used in scen", @@ -2606,8 +4064,8 @@ "title":"DCS Sink Stream", "uri":"dli_08_0253.html", "doc_type":"sqlreference", - "p_code":"285", - "code":"290" + "p_code":"447", + "code":"452" }, { "desc":"DLI outputs the job output data to Document Database Service (DDS).DDS is compatible with the MongoDB protocol and is secure, highly available, reliable, scalable, and ea", @@ -2615,8 +4073,8 @@ "title":"DDS Sink Stream", "uri":"dli_08_0249.html", "doc_type":"sqlreference", - "p_code":"285", - "code":"291" + "p_code":"447", + "code":"453" }, { "desc":"DLI writes the Flink job output data into DIS. This cloud ecosystem is applicable to scenarios where data is filtered and imported to the DIS stream for future processing", @@ -2624,8 +4082,8 @@ "title":"DIS Sink Stream", "uri":"dli_08_0241.html", "doc_type":"sqlreference", - "p_code":"285", - "code":"292" + "p_code":"447", + "code":"454" }, { "desc":"DMS (Distributed Message Service) is a message middleware service based on distributed, high-availability clustering technology. It provides reliable, scalable, fully man", @@ -2633,8 +4091,8 @@ "title":"DMS Sink Stream", "uri":"dli_08_0271.html", "doc_type":"sqlreference", - "p_code":"285", - "code":"293" + "p_code":"447", + "code":"455" }, { "desc":"DLI outputs the Flink job output data to Data Warehouse Service (DWS). DWS database kernel is compliant with PostgreSQL. The PostgreSQL database can store data of more co", @@ -2642,8 +4100,8 @@ "title":"DWS Sink Stream (JDBC Mode)", "uri":"dli_08_0247.html", "doc_type":"sqlreference", - "p_code":"285", - "code":"294" + "p_code":"447", + "code":"456" }, { "desc":"Create a sink stream to export Flink job data to DWS through OBS-based dumping, specifically, output Flink job data to OBS and then import data from OBS to DWS. For detai", @@ -2651,8 +4109,8 @@ "title":"DWS Sink Stream (OBS-based Dumping)", "uri":"dli_08_0248.html", "doc_type":"sqlreference", - "p_code":"285", - "code":"295" + "p_code":"447", + "code":"457" }, { "desc":"DLI exports the output data of the Flink job to HBase of MRS.An MRS cluster has been created by using your account. DLI can interconnect with HBase clusters with Kerberos", @@ -2660,8 +4118,8 @@ "title":"MRS HBase Sink Stream", "uri":"dli_08_0255.html", "doc_type":"sqlreference", - "p_code":"285", - "code":"296" + "p_code":"447", + "code":"458" }, { "desc":"DLI exports the output data of the Flink job to Kafka.Apache Kafka is a fast, scalable, and fault-tolerant distributed message publishing and subscription system. It deli", @@ -2669,8 +4127,8 @@ "title":"MRS Kafka Sink Stream", "uri":"dli_08_0254.html", "doc_type":"sqlreference", - "p_code":"285", - "code":"297" + "p_code":"447", + "code":"459" }, { "desc":"DLI exports the output data of the Flink job to Kafka.Apache Kafka is a fast, scalable, and fault-tolerant distributed message publishing and subscription system. It deli", @@ -2678,8 +4136,8 @@ "title":"Open-Source Kafka Sink Stream", "uri":"dli_08_0257.html", "doc_type":"sqlreference", - "p_code":"285", - "code":"298" + "p_code":"447", + "code":"460" }, { "desc":"You can create a sink stream to export data to a file system such as HDFS or OBS. After the data is generated, a non-DLI table can be created directly according to the ge", @@ -2687,8 +4145,8 @@ "title":"File System Sink Stream (Recommended)", "uri":"dli_08_0267.html", "doc_type":"sqlreference", - "p_code":"285", - "code":"299" + "p_code":"447", + "code":"461" }, { "desc":"Create a sink stream to export DLI data to OBS. DLI can export the job analysis results to OBS. OBS applies to various scenarios, such as big data analysis, cloud-native ", @@ -2696,8 +4154,8 @@ "title":"OBS Sink Stream", "uri":"dli_08_0242.html", "doc_type":"sqlreference", - "p_code":"285", - "code":"300" + "p_code":"447", + "code":"462" }, { "desc":"DLI outputs the Flink job output data to RDS. Currently, PostgreSQL and MySQL databases are supported. The PostgreSQL database can store data of more complex types and de", @@ -2705,8 +4163,8 @@ "title":"RDS Sink Stream", "uri":"dli_08_0245.html", "doc_type":"sqlreference", - "p_code":"285", - "code":"301" + "p_code":"447", + "code":"463" }, { "desc":"DLI exports Flink job output data to SMN.SMN provides reliable and flexible large-scale message notification services to DLI. It significantly simplifies system coupling ", @@ -2714,8 +4172,8 @@ "title":"SMN Sink Stream", "uri":"dli_08_0251.html", "doc_type":"sqlreference", - "p_code":"285", - "code":"302" + "p_code":"447", + "code":"464" }, { "desc":"The temporary stream is used to simplify SQL logic. If complex SQL logic is followed, write SQL statements concatenated with temporary streams. The temporary stream is ju", @@ -2723,8 +4181,8 @@ "title":"Creating a Temporary Stream", "uri":"dli_08_0258.html", "doc_type":"sqlreference", - "p_code":"275", - "code":"303" + "p_code":"437", + "code":"465" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2732,8 +4190,8 @@ "title":"Creating a Dimension Table", "uri":"dli_08_0259.html", "doc_type":"sqlreference", - "p_code":"275", - "code":"304" + "p_code":"437", + "code":"466" }, { "desc":"Create a Redis table to connect to the source stream.For details about the JOIN syntax, see JOIN Between Stream Data and Table Data.Redis clusters are not supported.Ensur", @@ -2741,8 +4199,8 @@ "title":"Creating a Redis Table", "uri":"dli_08_0260.html", "doc_type":"sqlreference", - "p_code":"304", - "code":"305" + "p_code":"466", + "code":"467" }, { "desc":"Create an RDS/DWS table to connect to the source stream.For details about the JOIN syntax, see JOIN.Ensure that you have created a PostgreSQL or MySQL RDS instance in RDS", @@ -2750,8 +4208,8 @@ "title":"Creating an RDS Table", "uri":"dli_08_0261.html", "doc_type":"sqlreference", - "p_code":"304", - "code":"306" + "p_code":"466", + "code":"468" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2759,8 +4217,8 @@ "title":"Custom Stream Ecosystem", "uri":"dli_08_0272.html", "doc_type":"sqlreference", - "p_code":"275", - "code":"307" + "p_code":"437", + "code":"469" }, { "desc":"Compile code to obtain data from the desired cloud ecosystem or open-source ecosystem as the input data of Flink jobs.The user-defined source class needs to inherit the R", @@ -2768,8 +4226,8 @@ "title":"Custom Source Stream", "uri":"dli_08_0273.html", "doc_type":"sqlreference", - "p_code":"307", - "code":"308" + "p_code":"469", + "code":"470" }, { "desc":"Compile code to write the data processed by DLI to a specified cloud ecosystem or open-source ecosystem.The user-defined sink class needs to inherit the RichSinkFunction ", @@ -2777,8 +4235,8 @@ "title":"Custom Sink Stream", "uri":"dli_08_0274.html", "doc_type":"sqlreference", - "p_code":"307", - "code":"309" + "p_code":"469", + "code":"471" }, { "desc":"Data type is a basic attribute of data and used to distinguish different types of data. Different data types occupy different storage space and support different operatio", @@ -2786,8 +4244,8 @@ "title":"Data Type", "uri":"dli_08_0207.html", "doc_type":"sqlreference", - "p_code":"275", - "code":"310" + "p_code":"437", + "code":"472" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2795,8 +4253,8 @@ "title":"Built-In Functions", "uri":"dli_08_0086.html", "doc_type":"sqlreference", - "p_code":"275", - "code":"311" + "p_code":"437", + "code":"473" }, { "desc":"All data types can be compared by using relational operators and the result is returned as a BOOLEAN value.Relationship operators are binary operators. Two compared data ", @@ -2804,17 +4262,17 @@ "title":"Mathematical Operation Functions", "uri":"dli_08_0191.html", "doc_type":"sqlreference", - "p_code":"311", - "code":"312" + "p_code":"473", + "code":"474" }, { - "desc":"The common character string functions of DLI are as follows:FunctionConcatenates two character strings.Concatenates two character strings.SyntaxVARCHAR VARCHAR a || VARCH", + "desc":"The common string functions of DLI are as follows:FunctionConcatenates two strings.Concatenates two strings.SyntaxVARCHAR VARCHAR a || VARCHAR bParametersa: string.b: str", "product_code":"dli", "title":"String Functions", "uri":"dli_08_0096.html", "doc_type":"sqlreference", - "p_code":"311", - "code":"313" + "p_code":"473", + "code":"475" }, { "desc":"Table 1 lists the time functions supported by Flink SQL.None", @@ -2822,8 +4280,8 @@ "title":"Temporal Functions", "uri":"dli_08_0097.html", "doc_type":"sqlreference", - "p_code":"311", - "code":"314" + "p_code":"473", + "code":"476" }, { "desc":"This function is used to forcibly convert types.If the input is NULL, NULL is returned.Flink jobs do not support the conversion of bigint to timestamp using CAST. You can", @@ -2831,8 +4289,8 @@ "title":"Type Conversion Functions", "uri":"dli_08_0112.html", "doc_type":"sqlreference", - "p_code":"311", - "code":"315" + "p_code":"473", + "code":"477" }, { "desc":"An aggregate function performs a calculation operation on a set of input values and returns a value. For example, the COUNT function counts the number of rows retrieved b", @@ -2840,8 +4298,8 @@ "title":"Aggregate Functions", "uri":"dli_08_0104.html", "doc_type":"sqlreference", - "p_code":"311", - "code":"316" + "p_code":"473", + "code":"478" }, { "desc":"Table-valued functions can convert one row of records into multiple rows or convert one column of records into multiple columns. Table-valued functions can only be used i", @@ -2849,8 +4307,8 @@ "title":"Table-Valued Functions", "uri":"dli_08_0206.html", "doc_type":"sqlreference", - "p_code":"311", - "code":"317" + "p_code":"473", + "code":"479" }, { "desc":"Example:The returned number of elements in the array is 3.HELLO WORLD is returned.", @@ -2858,8 +4316,8 @@ "title":"Other Functions", "uri":"dli_08_0101.html", "doc_type":"sqlreference", - "p_code":"311", - "code":"318" + "p_code":"473", + "code":"480" }, { "desc":"DLI supports the following three types of user-defined functions (UDFs):Regular UDF: takes in one or more input parameters and returns a single result.User-defined table-", @@ -2867,8 +4325,8 @@ "title":"User-Defined Functions", "uri":"dli_08_0099.html", "doc_type":"sqlreference", - "p_code":"275", - "code":"319" + "p_code":"437", + "code":"481" }, { "desc":"Table 1 describes the basic geospatial geometric elements.You can build complex geospatial geometries based on basic geospatial geometric elements. Table 2 describes the ", @@ -2876,8 +4334,8 @@ "title":"Geographical Functions", "uri":"dli_08_0209.html", "doc_type":"sqlreference", - "p_code":"275", - "code":"320" + "p_code":"437", + "code":"482" }, { "desc":"SyntaxDescriptionThe SELECT statement is used to select data from a table or insert constant data into a table.PrecautionsThe table to be queried must exist. Otherwise, a", @@ -2885,8 +4343,8 @@ "title":"SELECT", "uri":"dli_08_0102.html", "doc_type":"sqlreference", - "p_code":"275", - "code":"321" + "p_code":"437", + "code":"483" }, { "desc":"SyntaxorDescriptionIf the value of value is value1, result1 is returned. If the value is not any of the values listed in the clause, resultZ is returned. If no else state", @@ -2894,8 +4352,8 @@ "title":"Condition Expression", "uri":"dli_08_0103.html", "doc_type":"sqlreference", - "p_code":"275", - "code":"322" + "p_code":"437", + "code":"484" }, { "desc":"DescriptionGroup Window is defined in GROUP BY. One record is generated from each group. Group Window involves the following functions:time_attr can be processing-time or", @@ -2903,8 +4361,8 @@ "title":"Window", "uri":"dli_08_0218.html", "doc_type":"sqlreference", - "p_code":"275", - "code":"323" + "p_code":"437", + "code":"485" }, { "desc":"The JOIN operation allows you to query data from a table and write the query result to the sink stream. Currently, only RDSs and DCS Redis tables are supported. The ON ke", @@ -2912,8 +4370,8 @@ "title":"JOIN Between Stream Data and Table Data", "uri":"dli_08_0106.html", "doc_type":"sqlreference", - "p_code":"275", - "code":"324" + "p_code":"437", + "code":"486" }, { "desc":"Flink provides two time models: processing time and event time.DLI allows you to specify the time model during creation of the source stream and temporary stream.Processi", @@ -2921,8 +4379,8 @@ "title":"Configuring Time Models", "uri":"dli_08_0107.html", "doc_type":"sqlreference", - "p_code":"275", - "code":"325" + "p_code":"437", + "code":"487" }, { "desc":"Complex event processing (CEP) is used to detect complex patterns in endless data streams so as to identify and search patterns in various data rows. Pattern matching is ", @@ -2930,8 +4388,8 @@ "title":"Pattern Matching", "uri":"dli_08_0108.html", "doc_type":"sqlreference", - "p_code":"275", - "code":"326" + "p_code":"437", + "code":"488" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2939,8 +4397,8 @@ "title":"StreamingML", "uri":"dli_08_0109.html", "doc_type":"sqlreference", - "p_code":"275", - "code":"327" + "p_code":"437", + "code":"489" }, { "desc":"Anomaly detection applies to various scenarios, including intrusion detection, financial fraud detection, sensor data monitoring, medical diagnosis, natural data detectio", @@ -2948,8 +4406,8 @@ "title":"Anomaly Detection", "uri":"dli_08_0110.html", "doc_type":"sqlreference", - "p_code":"327", - "code":"328" + "p_code":"489", + "code":"490" }, { "desc":"Modeling and forecasting time series is a common task in many business verticals. Modeling is used to extract meaningful statistics and other characteristics of the data.", @@ -2957,8 +4415,8 @@ "title":"Time Series Forecasting", "uri":"dli_08_0111.html", "doc_type":"sqlreference", - "p_code":"327", - "code":"329" + "p_code":"489", + "code":"491" }, { "desc":"Clustering algorithms belong to unsupervised algorithms. K-Means, a clustering algorithm, partitions data points into related clusters by calculating the distance between", @@ -2966,8 +4424,8 @@ "title":"Real-Time Clustering", "uri":"dli_08_0216.html", "doc_type":"sqlreference", - "p_code":"327", - "code":"330" + "p_code":"489", + "code":"492" }, { "desc":"Deep learning has a wide range of applications in many industries, such as image classification, image recognition, and speech recognition. DLI provides several functions", @@ -2975,17 +4433,17 @@ "title":"Deep Learning Model Prediction", "uri":"dli_08_0088.html", "doc_type":"sqlreference", - "p_code":"327", - "code":"331" + "p_code":"489", + "code":"493" }, { - "desc":"Flink SQL reserves some strings as keywords. If you want to use the following character strings as field names, ensure that they are enclosed by back quotes, for example,", + "desc":"Flink SQL reserves some strings as keywords. If you want to use the following strings as field names, ensure that they are enclosed by back quotes, for example, `value` a", "product_code":"dli", "title":"Reserved Keywords", "uri":"dli_08_0125.html", "doc_type":"sqlreference", - "p_code":"275", - "code":"332" + "p_code":"437", + "code":"494" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -2994,7 +4452,7 @@ "uri":"dli_08_0001.html", "doc_type":"sqlreference", "p_code":"", - "code":"333" + "code":"495" }, { "desc":"None.Aggregate function.", @@ -3002,8 +4460,8 @@ "title":"aggregate_func", "uri":"dli_08_0002.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"334" + "p_code":"495", + "code":"496" }, { "desc":"None.Alias, which must be STRING type. It can be assigned to a field, table, view, or subquery.", @@ -3011,8 +4469,8 @@ "title":"alias", "uri":"dli_08_0003.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"335" + "p_code":"495", + "code":"497" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -3020,8 +4478,8 @@ "title":"attr_expr", "uri":"dli_08_0004.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"336" + "p_code":"495", + "code":"498" }, { "desc":"None.List of attr_expr, which is separated by commas (,).", @@ -3029,8 +4487,8 @@ "title":"attr_expr_list", "uri":"dli_08_0005.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"337" + "p_code":"495", + "code":"499" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -3038,8 +4496,8 @@ "title":"attrs_value_set_expr", "uri":"dli_08_0006.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"338" + "p_code":"495", + "code":"500" }, { "desc":"None.Return a boolean expression.", @@ -3047,8 +4505,8 @@ "title":"boolean_expression", "uri":"dli_08_0007.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"339" + "p_code":"495", + "code":"501" }, { "desc":"None.Formal parameter for function call. It is usually a field name, which is the same as col_name.", @@ -3056,8 +4514,8 @@ "title":"col", "uri":"dli_08_0009.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"340" + "p_code":"495", + "code":"502" }, { "desc":"None.Column (field) description, which must be STRING type and cannot exceed 256 bytes.", @@ -3065,8 +4523,8 @@ "title":"col_comment", "uri":"dli_08_0010.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"341" + "p_code":"495", + "code":"503" }, { "desc":"None.Column name, which must be STRING type and cannot exceed 128 bytes.", @@ -3074,8 +4532,8 @@ "title":"col_name", "uri":"dli_08_0011.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"342" + "p_code":"495", + "code":"504" }, { "desc":"None.Field list, which consists of one col_name or more. If there is more than one col_name, separate them by using a comma (,).", @@ -3083,8 +4541,8 @@ "title":"col_name_list", "uri":"dli_08_0012.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"343" + "p_code":"495", + "code":"505" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -3092,8 +4550,8 @@ "title":"condition", "uri":"dli_08_0013.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"344" + "p_code":"495", + "code":"506" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -3101,8 +4559,8 @@ "title":"condition_list", "uri":"dli_08_0014.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"345" + "p_code":"495", + "code":"507" }, { "desc":"None.Common expression name.", @@ -3110,8 +4568,8 @@ "title":"cte_name", "uri":"dli_08_0015.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"346" + "p_code":"495", + "code":"508" }, { "desc":"None.Data type. Currently, only the primitive data types are supported.", @@ -3119,8 +4577,8 @@ "title":"data_type", "uri":"dli_08_0016.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"347" + "p_code":"495", + "code":"509" }, { "desc":"None.Database description, which must be STRING type and cannot exceed 256 characters.", @@ -3128,8 +4586,8 @@ "title":"db_comment", "uri":"dli_08_0017.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"348" + "p_code":"495", + "code":"510" }, { "desc":"None.Database name, which must be STRING type and cannot exceed 128 bytes.", @@ -3137,8 +4595,8 @@ "title":"db_name", "uri":"dli_08_0018.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"349" + "p_code":"495", + "code":"511" }, { "desc":"None.Returned result for the ELSE clause of the CASE WHEN statement.", @@ -3146,8 +4604,8 @@ "title":"else_result_expression", "uri":"dli_08_0019.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"350" + "p_code":"495", + "code":"512" }, { "desc":"| AVRO| CSV| JSON| ORC| PARQUETCurrently, the preceding formats are supported.Both USING and STORED AS can be used for specifying the data format. You can specify the pre", @@ -3155,8 +4613,8 @@ "title":"file_format", "uri":"dli_08_0020.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"351" + "p_code":"495", + "code":"513" }, { "desc":"None.File path, which is the OBS path", @@ -3164,8 +4622,8 @@ "title":"file_path", "uri":"dli_08_0021.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"352" + "p_code":"495", + "code":"514" }, { "desc":"None.Function name, which must be STRING type.", @@ -3173,8 +4631,8 @@ "title":"function_name", "uri":"dli_08_0022.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"353" + "p_code":"495", + "code":"515" }, { "desc":"None.Expression that includes GROUP BY.", @@ -3182,8 +4640,8 @@ "title":"groupby_expression", "uri":"dli_08_0023.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"354" + "p_code":"495", + "code":"516" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -3191,8 +4649,8 @@ "title":"having_condition", "uri":"dli_08_0024.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"355" + "p_code":"495", + "code":"517" }, { "desc":"None.Input expression of the CASE WHEN statement.", @@ -3200,8 +4658,8 @@ "title":"input_expression", "uri":"dli_08_0026.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"356" + "p_code":"495", + "code":"518" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -3209,8 +4667,8 @@ "title":"join_condition", "uri":"dli_08_0029.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"357" + "p_code":"495", + "code":"519" }, { "desc":"None.The condition of an inequality join.", @@ -3218,8 +4676,8 @@ "title":"non_equi_join_condition", "uri":"dli_08_0030.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"358" + "p_code":"495", + "code":"520" }, { "desc":"None.Maximum number of output lines specified by LIMIT. Which must be INT type.", @@ -3227,8 +4685,8 @@ "title":"number", "uri":"dli_08_0031.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"359" + "p_code":"495", + "code":"521" }, { "desc":"None.Partition column name, that is, partition field name, which must be STRING type.", @@ -3236,8 +4694,8 @@ "title":"partition_col_name", "uri":"dli_08_0034.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"360" + "p_code":"495", + "code":"522" }, { "desc":"None.Partition column value, that is, partition field value.", @@ -3245,8 +4703,8 @@ "title":"partition_col_value", "uri":"dli_08_0035.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"361" + "p_code":"495", + "code":"523" }, { "desc":"partition_specs : (partition_col_name = partition_col_value, partition_col_name = partition_col_value, ...);Table partition list, which is expressed by using key=value pa", @@ -3254,8 +4712,8 @@ "title":"partition_specs", "uri":"dli_08_0036.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"362" + "p_code":"495", + "code":"524" }, { "desc":"None.Property name, which must be STRING type.", @@ -3263,8 +4721,8 @@ "title":"property_name", "uri":"dli_08_0037.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"363" + "p_code":"495", + "code":"525" }, { "desc":"None.Property value, which must be STRING type.", @@ -3272,8 +4730,8 @@ "title":"property_value", "uri":"dli_08_0038.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"364" + "p_code":"495", + "code":"526" }, { "desc":"None.Pattern matching string, which supports wildcard matching.", @@ -3281,8 +4739,8 @@ "title":"regex_expression", "uri":"dli_08_0039.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"365" + "p_code":"495", + "code":"527" }, { "desc":"None.Returned result for the THEN clause of the CASE WHEN statement.", @@ -3290,8 +4748,8 @@ "title":"result_expression", "uri":"dli_08_0040.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"366" + "p_code":"495", + "code":"528" }, { "desc":"None.Query clause for the basic SELECT statement.", @@ -3299,8 +4757,8 @@ "title":"select_statement", "uri":"dli_08_0042.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"367" + "p_code":"495", + "code":"529" }, { "desc":"None.Separator, which can be customized by users, for example, comma (,), semicolon (;), and colon (:). Which must be CHAR type.", @@ -3308,8 +4766,8 @@ "title":"separator", "uri":"dli_08_0043.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"368" + "p_code":"495", + "code":"530" }, { "desc":"None.SQL statement containing the common expression defined by cte_name.", @@ -3317,8 +4775,8 @@ "title":"sql_containing_cte_name", "uri":"dli_08_0045.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"369" + "p_code":"495", + "code":"531" }, { "desc":"None.Subquery.", @@ -3326,8 +4784,8 @@ "title":"sub_query", "uri":"dli_08_0046.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"370" + "p_code":"495", + "code":"532" }, { "desc":"None.Table description, which must be STRING type and cannot exceed 256 bytes.", @@ -3335,8 +4793,8 @@ "title":"table_comment", "uri":"dli_08_0047.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"371" + "p_code":"495", + "code":"533" }, { "desc":"NoneTable name, which cannot exceed 128 bytes. The string type and \"$\" symbol are supported.", @@ -3344,8 +4802,8 @@ "title":"table_name", "uri":"dli_08_0048.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"372" + "p_code":"495", + "code":"534" }, { "desc":"None.Table property list, which is expressed by using key=value pairs. key represents property_name, and value represents property_value. If there is more than one key=va", @@ -3353,8 +4811,8 @@ "title":"table_properties", "uri":"dli_08_0049.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"373" + "p_code":"495", + "code":"535" }, { "desc":"None.Table or view name, which must be STRING type. It can also be a subquery. If it is subquery, an alias must also be provided.", @@ -3362,8 +4820,8 @@ "title":"table_reference", "uri":"dli_08_0050.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"374" + "p_code":"495", + "code":"536" }, { "desc":"None.When expression of the CASE WHEN statement. It is used for matching with the input expression.", @@ -3371,8 +4829,8 @@ "title":"when_expression", "uri":"dli_08_0053.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"375" + "p_code":"495", + "code":"537" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -3380,17 +4838,17 @@ "title":"where_condition", "uri":"dli_08_0054.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"376" + "p_code":"495", + "code":"538" }, { - "desc":"None.Analysis window function. For details, see Window Functions.", + "desc":"NoneAnalysis window function.", "product_code":"dli", "title":"window_function", "uri":"dli_08_0055.html", "doc_type":"sqlreference", - "p_code":"333", - "code":"377" + "p_code":"495", + "code":"539" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -3399,7 +4857,7 @@ "uri":"dli_08_0060.html", "doc_type":"sqlreference", "p_code":"", - "code":"378" + "code":"540" }, { "desc":"All data types can be compared by using relational operators and the result is returned as a BOOLEAN value.Relationship operators are binary operators. Two compared data ", @@ -3407,8 +4865,8 @@ "title":"Relational Operators", "uri":"dli_08_0061.html", "doc_type":"sqlreference", - "p_code":"378", - "code":"379" + "p_code":"540", + "code":"541" }, { "desc":"Arithmetic operators include binary operators and unary operators. For both types of operators, the returned results are numbers. Table 1 lists the arithmetic operators s", @@ -3416,8 +4874,8 @@ "title":"Arithmetic Operators", "uri":"dli_08_0062.html", "doc_type":"sqlreference", - "p_code":"378", - "code":"380" + "p_code":"540", + "code":"542" }, { "desc":"Common logical operators include AND, OR, and NOT. The operation result can be TRUE, FALSE, or NULL (which means unknown). The priorities of the operators are as follows:", @@ -3425,8 +4883,8 @@ "title":"Logical Operators", "uri":"dli_08_0063.html", "doc_type":"sqlreference", - "p_code":"378", - "code":"381" + "p_code":"540", + "code":"543" }, { "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", @@ -3435,6 +4893,6 @@ "uri":"dli_08_00005.html", "doc_type":"sqlreference", "p_code":"", - "code":"382" + "code":"544" } ] \ No newline at end of file diff --git a/docs/dli/sqlreference/dli_08_00005.html b/docs/dli/sqlreference/dli_08_00005.html index 7d779899..016ac09d 100644 --- a/docs/dli/sqlreference/dli_08_00005.html +++ b/docs/dli/sqlreference/dli_08_00005.html @@ -8,9 +8,15 @@ -

2023-05-11

+

2024-02-27

-

This is the first official release.

+

Modified the following section:

+

Added the description that DDS is fully compatible with the MongoDB protocol to Creating a DLI Table and Associating It with DDS.

+ + +

2023-05-11

+ +

This issue is the first official release.

diff --git a/docs/dli/sqlreference/dli_08_0055.html b/docs/dli/sqlreference/dli_08_0055.html index e5e8d7d6..dc002022 100644 --- a/docs/dli/sqlreference/dli_08_0055.html +++ b/docs/dli/sqlreference/dli_08_0055.html @@ -1,9 +1,9 @@

window_function

-

Syntax

None.

+

Syntax

None

-

Description

Analysis window function. For details, see Window Functions.

+

Description

Analysis window function.

diff --git a/docs/dli/sqlreference/dli_08_0058.html b/docs/dli/sqlreference/dli_08_0058.html index 276beb5b..5d41f854 100644 --- a/docs/dli/sqlreference/dli_08_0058.html +++ b/docs/dli/sqlreference/dli_08_0058.html @@ -32,7 +32,7 @@

STRING

-

Character string

+

String

-

@@ -152,7 +152,7 @@

CHAR

-

Fixed-length character string

+

Fixed-length string

-

@@ -165,7 +165,7 @@

VARCHAR

-

Variable-length character string

+

Variable-length string

-

@@ -196,7 +196,7 @@

INT

Signed integer with a storage space of 4 bytes. Its value ranges from –2147483648 to 2147483647. If this field is NULL, value 0 is used by default.

-

STRING

Character string.

+

STRING

String.

FLOAT

Single-precision floating point with a storage space of 4 bytes. If this field is NULL, value 0 is used by default.

Due to the limitation of storage methods of floating point data, do not use the formula a==b to check whether two floating point values are the same. You are advised to use the formula: absolute value of (a-b) <= EPSILON. EPSILON indicates the allowed error range which is usually 1.19209290E-07F. If the formula is satisfied, the compared two floating point values are considered the same.

@@ -214,9 +214,9 @@

BIGINT/LONG

Signed integer with a storage space of 8 bytes. Its value ranges from –9223372036854775808 to 9223372036854775807. It does not support scientific notation. If this field is NULL, value 0 is used by default.

-

TIMESTAMP

Legacy UNIX TIMESTAMP is supported, providing the precision up to the microsecond level. TIMESTAMP is defined by the difference between the specified time and UNIX epoch (UNIX epoch time: 1970-01-01 00:00:00) in seconds. Data of the STRING type supports implicit conversion to TIMESTAMP. (The STRING must in the yyyy-MM-dd HH:MM:SS[.ffffff] format. The precision after the decimal point is optional.)

+

TIMESTAMP

Legacy UNIX TIMESTAMP is supported, providing the precision up to the microsecond level. TIMESTAMP is defined by the difference between the specified time and UNIX epoch (UNIX epoch time: 1970-01-01 00:00:00) in seconds. The data type STRING can be implicitly converted to TIMESTAMP, but it must be in the yyyy-MM-dd HH:mm:SS[.ffffff] format. The precision after the decimal point is optional.)

-

CHAR

Character string with a fixed length. In DLI, the STRING type is used.

+

CHAR

String with a fixed length. In DLI, the STRING type is used.

VARCHAR

VARCHAR is declared with a length that indicates the maximum number of characters in a string. During conversion from STRING to VARCHAR, if the number of characters in STRING exceeds the specified length, the excess characters of STRING are automatically trimmed. Similar to STRING, the spaces at the end of VARCHAR are meaningful and affect the comparison result. In DLI, the STRING type is used.

diff --git a/docs/dli/sqlreference/dli_08_0064.html b/docs/dli/sqlreference/dli_08_0064.html index c1426d18..c2ee41b8 100644 --- a/docs/dli/sqlreference/dli_08_0064.html +++ b/docs/dli/sqlreference/dli_08_0064.html @@ -4,15 +4,17 @@
diff --git a/docs/dli/sqlreference/dli_08_0065.html b/docs/dli/sqlreference/dli_08_0065.html index 66f7001f..ca0c5dc8 100644 --- a/docs/dli/sqlreference/dli_08_0065.html +++ b/docs/dli/sqlreference/dli_08_0065.html @@ -1,300 +1,315 @@ -

Mathematical Functions

-

Table 1 lists the mathematical functions supported in DLI.

+

Overview

+

Table 1 lists the mathematical functions supported by DLI.

-
Table 1 Mathematical functions

Function

+
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + @@ -303,7 +318,7 @@
diff --git a/docs/dli/sqlreference/dli_08_0066.html b/docs/dli/sqlreference/dli_08_0066.html index 9dccc808..e13fe7f3 100644 --- a/docs/dli/sqlreference/dli_08_0066.html +++ b/docs/dli/sqlreference/dli_08_0066.html @@ -1,198 +1,261 @@ -

Date Functions

-

Table 1 lists the date functions supported in DLI.

+

Overview

+

Table 1 lists the date functions supported by DLI.

-
Table 1 Mathematical functions

Syntax

Return Type

+

Value Type

Description

+

Description

round(DOUBLE a)

+

abs(DOUBLE a)

DOUBLE

+

DOUBLE or INT

Round a.

+

Returns the absolute value.

round(DOUBLE a, INT d)

+

acos(DOUBLE a)

DOUBLE

+

DOUBLE

Round a to d decimal places. Example: round(21.263,2) = 21.26.

+

Returns the arc cosine value of a.

bround(DOUBLE a)

+

asin(DOUBLE a)

DOUBLE

+

DOUBLE

Round off a figure using the HALF_EVEN rounding mode.

-

If the figure to be rounded off ends in 5, the HALF_EVEN rounding mode is as follows:

-
  • Round up if the digit in the place preceding 5 is odd.
  • Round down if the digit in the place preceding 5 is even.
-

Example: bround(7.5) = 8.0, bround(6.5) = 6.0.

+

Returns the arc sine value of a.

bround(DOUBLE a, INT d)

+

atan(DOUBLE a)

DOUBLE

+

DOUBLE

Retain d decimal places and round the d+1 decimal place using the HALF_EVEN rounding mode.

-

If the figure to be rounded off ends in 5, it will be rounded off as follows:

-
  • Round up if the d decimal digit is odd.
  • Round down if the d decimal digit is even.
-

Example: bround(8.25, 1) = 8.2, bround(8.35, 1) = 8.4.

+

Returns the arc tangent value of a.

floor(DOUBLE a)

+

bin(BIGINT a)

BIGINT

+

STRING

Return the largest integer that is less than or equal to a. Example: floor(21.2) = 21.

+

Returns a number in binary format.

ceil(DOUBLE a), ceiling(DOUBLE a)

+

bround(DOUBLE a)

BIGINT

+

DOUBLE

Return the smallest integer that is greater than or equal to a. Example: ceil(21.2) = 22.

+

In HALF_EVEN rounding, the digit 5 is rounded up if the digit before 5 is an odd number and rounded down if the digit before 5 is an even number. For example, bround(7.5) = 8.0, bround(6.5) = 6.0.

rand(), rand(INT seed)

+

bround(DOUBLE a, INT d)

DOUBLE

+

DOUBLE

Return a random number that is distributed uniformly from 0 through 1 (1 is exclusive). If the seed is specified, a stable random number sequence is displayed.

+

The value is rounded off to d decimal places in HALF_EVEN mode. The digit 5 is rounded up if the digit before 5 is an odd number and rounded down if the digit before 5 is an even number. For example, bround(8.25, 1) = 8.2, bround(8.35, 1) = 8.4.

exp(DOUBLE a), exp(DECIMAL a)

+

cbrt(DOUBLE a)

DOUBLE

+

DOUBLE

Return the value of e raised to the power of a.

+

Returns the cube root of a.

ln(DOUBLE a), ln(DECIMAL a)

+

ceil(DOUBLE a)

DOUBLE

+

DECIMAL

Return the natural logarithm of the argument a.

+

Returns the smallest integer that is greater than or equal to a. For example, ceil(21.2) = 22.

log10(DOUBLE a), log10(DECIMAL a)

+

conv(BIGINT num, INT from_base, INT to_base), conv(STRING num, INT from_base, INT to_base)

DOUBLE

+

STRING

Return the base 10 logarithm of the argument a.

+

Converts a number from from_base to to_base. For example, convert 5 from decimal to quaternary using conv(5,10,4) = 11.

log2(DOUBLE a), log2(DECIMAL a)

+

cos(DOUBLE a)

DOUBLE

+

DOUBLE

Return the base 2 logarithm of the argument a.

+

Returns the cosine value of a.

log(DOUBLE base, DOUBLE a)

-

log(DECIMAL base, DECIMAL a)

+

cot1(DOUBLE a)

DOUBLE

+

DOUBLE or DECIMAL

Return the base base logarithm of the argument a.

+

Returns the cotangent of a specified radian value.

pow(DOUBLE a, DOUBLE p), power(DOUBLE a, DOUBLE p)

+

degrees(DOUBLE a)

DOUBLE

+

DOUBLE

Return the value of a raised to the power of p.

+

Returns the angle corresponding to the radian.

sqrt(DOUBLE a), sqrt(DECIMAL a)

+

e()

DOUBLE

+

DOUBLE

Return the square root of a.

+

Returns the value of e.

bin(BIGINT a)

+

exp(DOUBLE a)

STRING

+

DOUBLE

Return a number in binary format.

+

Returns the value of e raised to the power of a.

hex(BIGINT a) hex(STRING a)

+

factorial(INT a)

STRING

+

BIGINT

Convert an integer or character to its hexadecimal representation.

+

Returns the factorial of a.

conv(BIGINT num, INT from_base, INT to_base), conv(STRING num, INT from_base, INT to_base)

+

floor(DOUBLE a)

STRING

+

BIGINT

Convert a number from from_base to to_base. Example: Convert 5 from decimal to quaternary using conv(5,10,4) = 11.

+

Returns the largest integer that is less than or equal to A. For example, floor(21.2) = 21.

abs(DOUBLE a)

+

greatest(T v1, T v2, ...)

DOUBLE

+

DOUBLE

Return the absolute value.

+

Returns the greatest value of a list of values.

pmod(INT a, INT b), pmod(DOUBLE a, DOUBLE b)

+

hex(BIGINT a) hex(STRING a)

INT or DOUBLE

+

STRING

Return the positive value of the remainder after division of a by b.

+

Converts an integer or character into its hexadecimal representation.

sin(DOUBLE a), sin(DECIMAL a)

+

least(T v1, T v2, ...)

DOUBLE

+

DOUBLE

Return the sine value of a.

+

Returns the least value of a list of values.

asin(DOUBLE a), asin(DECIMAL a)

+

ln(DOUBLE a)

DOUBLE

+

DOUBLE

Return the arc sine value of a.

+

Returns the natural logarithm of a given value.

cos(DOUBLE a), cos(DECIMAL a)

+

log(DOUBLE base, DOUBLE a)

DOUBLE

+

DOUBLE

Return the cosine value of a.

+

Returns the natural logarithm of a given base and exponent.

acos(DOUBLE a), acos(DECIMAL a)

+

log10(DOUBLE a)

DOUBLE

+

DOUBLE

Return the arc cosine value of a.

+

Returns the base-10 logarithm of a given value.

tan(DOUBLE a), tan(DECIMAL a)

+

log2(DOUBLE a)

DOUBLE

+

DOUBLE

Return the tangent value of a.

+

Returns the base-2 logarithm of a given value.

atan(DOUBLE a), atan(DECIMAL a)

+

median(colname)

DOUBLE

+

DOUBLE or DECIMAL

Return the arc tangent value of a.

+

Returns the median.

degrees(DOUBLE a), degrees(DECIMAL a)

+

negative(INT a)

DOUBLE

+

DECIMAL or INT

Convert the value of a from radians to degrees.

+

Returns the opposite number of a. For example, if negative(2) is given, –2 is returned.

radians(DOUBLE a), radians(DECIMAL a)

+

percentile(colname,DOUBLE p)

DOUBLE

+

DOUBLE or ARRAY

Convert the value of a from degrees to radians.

+

Returns the exact percentile, which is applicable to a small amount of data. Sorts a specified column in ascending order, and then obtains the exact pth percentage. The value of p must be between 0 and 1.

positive(INT a), positive(DOUBLE a)

+

percentile_approx (colname,DOUBLE p)

INT or DOUBLE

+

DOUBLE or ARRAY

Return a. Example: positive(2) = 2.

+

Returns the approximate percentile, which is applicable to a large amount of data. Sorts a specified column in ascending order, and then obtains the value corresponding to the pth percentile.

negative(INT a), negative(DOUBLE a)

+

pi()

INT or DOUBLE

+

DOUBLE

Return –a. Example: negative(2) = –2.

+

Returns the value of pi.

sign(DOUBLE a), sign(DECIMAL a)

+

pmod(INT a, INT b)

DOUBLE or INT

+

DECIMAL or INT

Return the sign of a. 1.0 is returned if a is positive. –1.0 is returned if a is negative. Otherwise, 0.0 is returned.

+

Returns the positive value of the remainder after division of x by y.

e()

+

positive(INT a)

DOUBLE

+

DECIMAL, DOUBLE, or INT

Return the value of e.

+

Returns the value of a, for example, positive(2) = 2.

pi()

+

pow(DOUBLE a, DOUBLE p), power(DOUBLE a, DOUBLE p)

DOUBLE

+

DOUBLE

Return the value of pi.

+

Returns the value of a raised to the power of p.

factorial(INT a)

+

radians(DOUBLE a)

BIGINT

+

DOUBLE

Return the factorial of a.

+

Returns the radian corresponding to the angle.

cbrt(DOUBLE a)

+

rand(INT seed)

DOUBLE

+

DOUBLE

Return the cube root of a.

+

Returns an evenly distributed random number that is greater than or equal to 0 and less than 1. If the seed is specified, a stable random number sequence is displayed.

shiftleft(TINYINT|SMALLINT|INT a, INT b)

-

shiftleft(BIGINT a, INT b)

+

round(DOUBLE a)

INT

-

BIGINT

+

DOUBLE

Bitwise signed left shift. Interpret a as a binary number and shift the binary number b positions to the left.

+

Round off

shiftright(TINYINT|SMALLINT|INT a, INT b)

-

shiftright(BIGINT a, INT b)

+

round(DOUBLE a, INT d)

INT

-

BIGINT

+

DOUBLE

Bitwise signed right shift. Interpret a as a binary number and shift the binary number b positions to the right.

+

Rounds a to d decimal places, for example, round(21.263,2) = 21.26.

shiftrightunsigned(TINYINT|SMALLINT|INT a, INT b),

-

shiftrightunsigned(BIGINT a, INT b)

+

shiftleft(BIGINT a, INT b)

INT

-

BIGINT

+

INT

Bitwise unsigned right shift. Interpret a as a binary number and shift the binary number b positions to the right.

+

Bitwise signed left shift. Interprets a as a binary number and shifts the binary number b positions to the left.

greatest(T v1, T v2, ...)

+

shiftright(BIGINT a, INT b)

T

+

INT

Return the maximum value of a list of values.

+

Bitwise signed right shift. Interprets a as a binary number and shifts the binary number b positions to the right.

least(T v1, T v2, ...)

+

shiftrightunsigned(BIGINT a, INT b)

T

+

INT

Return the minimum value of a list of values.

+

Bitwise unsigned right shift. Interprets a as a binary number and shifts the binary number b positions to the right.

+

sign(DOUBLE a)

+

DOUBLE

+

Returns the sign of a. 1.0 is returned if a is positive. –1.0 is returned if a is negative. Otherwise, 0.0 is returned.

+

sin(DOUBLE a)

+

DOUBLE

+

Returns the sine value of the given angle a.

+

sqrt(DOUBLE a)

+

DOUBLE

+

Returns the square root of a.

+

tan(DOUBLE a)

+

DOUBLE

+

Returns the tangent value of the given angle a.

Table 1 Date/time functions

Function

+
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -201,7 +264,7 @@
diff --git a/docs/dli/sqlreference/dli_08_0067.html b/docs/dli/sqlreference/dli_08_0067.html index 8199d1bb..d78376c1 100644 --- a/docs/dli/sqlreference/dli_08_0067.html +++ b/docs/dli/sqlreference/dli_08_0067.html @@ -1,226 +1,291 @@ -

String Functions

-

Table 1 lists the string functions supported by DLI.

+

Overview

+

Table 1 lists the string functions supported by DLI.

-
Table 1 Date/time functions

Syntax

Return Type

+

Value Type

Description

+

Description

from_unixtime(bigint unixtime[, string format])

+

add_months(string start_date, int num_months)

STRING

+

STRING

Convert a timestamp to the time format "yyyy-MM-dd HH:mm:ss" or "yyyyMMddHHmmss.uuuuuu".

-

For example, select FROM_UNIXTIME(1608135036,'yyyy-MM-dd HH:mm:ss').

+

Returns the date that is num_months after start_date.

unix_timestamp()

+

current_date()

BIGINT

+

DATE

Return a Unix timestamp (the number of seconds that have elapsed since 1970-01-01 00:00:00) represented by an unsigned integer when the function is called without arguments.

+

Returns the current date, for example, 2016-07-04.

unix_timestamp(string date)

+

current_timestamp()

BIGINT

+

TIMESTAMP

Return the number of seconds between a specified date and 1970-01-01 00:00:00.

+

Returns the current time, for example, 2016-07-04 11:18:11.685.

unix_timestamp(string date, string pattern)

+

date_add(string startdate, int days)

BIGINT

+

STRING or DATE

Convert a time string with a given pattern to a Unix timestamp. Example: unix_timestamp("2009-03-20", "yyyy-MM-dd") = 1237532400.

+

Adds a number of days to a date.

to_date(string timestamp)

+

dateadd(string date, bigint delta, string datepart)

STRING

+

STRING or DATE

Return the date part of a time string. Example: to_date("1970-01-01 00:00:00") = "1970-01-01".

+

Changes a date based on datepart and delta.

+

date: This parameter is mandatory. Date value, which is of the STRING type.

+

The time format is yyyy-mm-dd hh:mi:ss, for example, 2021-08-28 00:00:00.

+

delta: This parameter is mandatory. Adjustment amplitude, which is of the BIGINT type.

+

datepart: Adjustment unit, which is a constant of the STRING type. This parameter is mandatory.

year(string date)

+

date_sub(string startdate, int days)

INT

+

STRING

Return the year part of a date.

+

Subtracts a number of days from a date.

quarter(string date/timestamp/string)

+

date_format(string date, string format)

INT

+

STRING

Return the quarter of the year for a date, timestamp, or string. Example: quarter('2015-04-01')=2.

+

Converts a date into a string based on the format specified by format.

month(string date)

+

datediff(string date1, string date2)

INT

+

BIGINT

Return the month (from 1 to 12) part of a date.

+

Calculates the difference between date1 and date2.

day(string date) dayofmonth(string date)

+

datediff1(string date1, string date2, string datepart)

INT

+

BIGINT

Return the day part of a date.

+

Calculates the difference between date1 and date2 and returns the difference in a specified datepart.

hour(string date)

+

datepart (string date, string datepart)

INT

+

BIGINT

Return the hour (from 0 to 23) part of a date.

+

Returns the value of the field that meets a specified time unit in the date.

minute(string date)

+

datetrunc (string date, string datepart)

INT

+

STRING

Return the minute (from 0 to 59) part of a date.

+

Calculates the date otained through the truncation of a specified date based on a specified datepart.

+

date: The value is in the yyyy-mm-dd or yyyy-mm-dd hh:mi:ss format. This parameter is mandatory.

+

datepart: Supported date format, which is a STRING constant. This parameter is mandatory.

second(string date)

+

day(string date), dayofmonth(string date)

INT

+

INT

Return the second (from 0 to 59) part of a date.

+

Returns the date of a specified time.

weekofyear(string date)

+

from_unixtime(bigint unixtime)

INT

+

STRING

Return the week number (from 0 to 53) of a date.

+

Converts a timestamp to a time, in yyyy-MM-dd HH:mm:ss or yyyyMMddHHmmss.uuuuuu format.

+

For example, select FROM_UNIXTIME(1608135036,'yyyy-MM-dd HH:mm:ss').

datediff(string enddate, string startdate)

+

from_utc_timestamp(string timestamp, string timezone)

INT

+

TIMESTAMP

Return the number of days from startdate to enddate.

+

Converts a UTC timestamp to the corresponding timestamp in a specific time zone.

date_add(string startdate, int days)

+

getdate()

STRING

+

STRING

Add a number of days to a date.

+

Obtains the current system time.

date_sub(string startdate, int days)

+

hour(string date)

STRING

+

INT

Subtract a number of days from a date.

+

Returns the hour (from 0 to 23) of a specified time.

from_utc_timestamp(string timestamp, string timezone)

+

isdate(string date , string format)

TIMESTAMP

+

BOOLEAN

Convert a UTC timestamp to a timestamp in a given time zone. For example, from_utc_timestamp('1970-01-01 08:00:00','PST') returns 1970-01-01 00:00:00.

+

date: Date, which is implicitly converted to the STRING type and then used for calculation. This parameter is mandatory.

+

format: Unsupported date extension format, which is a STRING constant. This parameter is mandatory. If there are redundant format strings in format, only the date value corresponding to the first format string is used. Other format strings are used as separators. For example, isdate("1234-yyyy", "yyyy-yyyy") returns True.

to_utc_timestamp(string timestamp, string timezone)

+

last_day(string date)

TIMESTAMP

+

DATE

Convert a timestamp in a given time zone to a UTC timestamp. For example, to_utc_timestamp('1970-01-01 00:00:00','PST') returns 1970-01-01 08:00:00.

+

Returns the last day of the month a date belongs to, in the format of yyyy-MM-dd, for example, 2015-08-31.

current_date()

+

lastday(string date)

DATE

+

STRING

Return the current date, for example, 2016-07-04.

+

Returns the last day of the month a date belongs to. The value is of the STRING type and is in the yyyy-mm-dd hh:mi:ss format.

current_timestamp()

+

minute(string date)

TIMESTAMP

+

INT

Return the current time, for example, 2016-07-04 11:18:11.685.

+

Returns the minute (from 0 to 59) of a specified time.

add_months(string start_date, int num_months)

+

month(string date)

STRING

+

INT

Return the date that is num_months after start_date.

+

Returns the month (from January to December) of a specified time.

last_day(string date)

+

months_between(string date1, string date2)

STRING

+

DOUBLE

Return the last day of the month to which a date belongs. The returned date is in the format of yyyy-MM-dd, for example, 2015-08-31.

+

Returns the month difference between date1 and date2.

next_day(string start_date, string day_of_week)

+

next_day(string start_date, string day_of_week)

STRING

+

DATE

Return the first date that is later than start_date and nearest to day_of_week. The returned date in the format of yyyy-MM-dd. day_of_week specifies a day of a week. For example, the value of day_of_week can be Monday or FRIDAY.

+

Returns the date closest to day_of_week after start_date, in the yyyy-MM-dd format. day_of_week indicates a day in a week, for example, Monday or Friday.

trunc(string date, string format)

+

quarter(string date)

STRING

+

INT

Reset the date in a specified format. Supported formats are MONTH/MON/MM and YEAR/YYYY/YY. Example: trunc('2015-03-17', 'MM') = 2015-03-01.

+

Returns the quarter of the date, timestamp, or string, for example, quarter('2015-04-01')=2.

months_between(string date1, string date2)

+

second(string date)

DOUBLE

+

INT

Return number of months between dates date1 and date2.

+

Returns the second (from 0 to 59) of a specified time.

date_format(date/timestamp/string ts, string fmt)

+

to_char(string date, string format)

STRING

+

STRING

Return the formatted value of date/timestamp/string. The Java SimpleDateFormat format is supported. Example: date_format('2015-04-08', 'y') = '2015'.

-

In the format, y indicates the year. Y indicates the year when the current week is located. A week starts from Sunday and ends on Saturday. If a week crosses years, this week is counted as the next year.

+

Converts a date into a string in a specified format.

+

to_date(string timestamp)

+

DATE

+

Returns the date part of a time string, for example, to_date("1970-01-01 00:00:00") = "1970-01-01".

+

to_date1(string date, string format)

+

STRING

+

The value is of the STRING type, in the yyyy-mm-dd hh:mi:ss format. If the value of date or format is NULL, NULL is returned.

+

Converts a string in a specified format to a date value.

+

to_utc_timestamp(string timestamp, string timezone)

+

TIMESTAMP

+

Converts a timestamp in a given time zone to a UTC timestamp.

+

trunc(string date, string format)

+

DTAE

+

Resets the date in a specified format. Supported formats are MONTH/MON/MM and YEAR/YYYY/YY, for example, trunc('2015-03-17', 'MM') = 2015-03-01.

+

unix_timestamp(string timestamp, string pattern)

+

BIGINT

+

Returns a Unix timestamp (the number of seconds since 1970-01-01 00:00:00) as an unsigned integer if the function is called without parameters.

+

weekday (string date)

+

INT

+

Returns the day of the current week.

+

weekofyear(string date)

+

INT

+

Returns the week number (from 0 to 53) of a specified date.

+

year(string date)

+

INT

+

Returns the year of a specified date.

Table 1 String functions

Function

+
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -229,7 +294,7 @@
diff --git a/docs/dli/sqlreference/dli_08_0068.html b/docs/dli/sqlreference/dli_08_0068.html index 55c28034..a784f770 100644 --- a/docs/dli/sqlreference/dli_08_0068.html +++ b/docs/dli/sqlreference/dli_08_0068.html @@ -1,123 +1,121 @@ -

Aggregate Functions

-

An aggregate function performs a calculation operation on a set of input values and returns a value. For example, the COUNT function counts the number of rows retrieved by an SQL statement. Table 1 lists aggregate functions.

+

Overview

+

Table 1 lists the aggregate functions supported by DLI.

-
Table 1 String functions

Syntax

Return Type

+

Value Type

Description

+

Description

ascii(string str)

+

ascii(string <str>)

INT

+

BIGINT

Returns the numeric value of the first character in a string.

+

Returns the numeric value of the first character in a string.

concat(string A, string B...)

+

concat(array<T> <a>, array<T> <b>[,...]), concat(string <str1>, string <str2>[,...])

STRING

+

ARRAY or STRING

Return a string resulting from concatenating the input strings. This function can take any number of input strings.

+

Returns a string concatenated from multiple input strings. This function can take any number of input strings.

concat_ws(string SEP, string A, string B...)

+

concat_ws(string <separator>, string <str1>, string <str2>[,...]), concat_ws(string <separator>, array<string> <a>)

STRING

+

ARRAY or STRUCT

Return a string resulting from concatenating the input strings, which are separated by specified separators.

+

Returns a string concatenated from multiple input strings that are separated by specified separators.

encode(string src, string charset)

+

char_matchcount(string <str1>, string <str2>)

BINARY

+

BIGINT

Encode src in the encoding mode specified by charset.

+

Returns the number of characters in str1 that appear in str2.

find_in_set(string str, string strList)

+

encode(string <str>, string <charset>)

INT

+

BINARY

Return the position of the first occurrence of str in strList. If the value of any parameter is NULL, NULL is returned. If the first parameter contains a comma (,), 0 is returned.

+

Returns strs encoded in charset format.

get_json_object(string json_string, string path)

+

find_in_set(string <str1>, string <str2>)

STRING

+

BIGINT

Parse the JSON object in a specified JSON path. The function will return NULL if the JSON object is invalid.

+

Returns the position (stating from 1) of str1 in str2 separated by commas (,).

instr(string str, string substr)

+

get_json_object(string <json>, string <path>)

INT

+

STRING

Return the position of the first occurrence of substr in str. Return NULL if NULL is contained in the parameters and return 0 if substr does not exist in str. Note that the subscripts start from 1.

+

Parses the JSON object in a specified JSON path. The function will return NULL if the JSON object is invalid.

length(string A)

+

instr(string <str>, string <substr>)

INT

+

INT

Return the length of a string.

+

Returns the index of substr that appears earliest in str. Returns NULL if either of the arguments are NULL and returns 0 if substr does not exist in str. Note that the first character in str has index 1.

locate(string substr, string str[, int pos])

+

instr1(string <str1>, string <str2>[, bigint <start_position>[, bigint <nth_appearance>]])

INT

+

BIGINT

Return the position of the first occurrence of substr in str after position pos (starting from 1).

+

Returns the position of str2 in str1.

lower(string A) lcase(string A)

+

initcap(string A)

STRING

+

STRING

Convert all characters of a string to lower case.

+

Converts the first letter of each word of a string to upper case and all other letters to lower case.

lpad(string str, int len, string pad)

+

keyvalue(string <str>,[string <split1>,string <split2>,] string <key>)

STRING

+

STRING

Return a string of a specified length. If the length of the given string (str) is shorter than the specified length (len), the given string is left-padded with pad to the specified length.

+

Splits str by split1, converts each group into a key-value pair by split2, and returns the value corresponding to the key.

ltrim(string A)

+

length(string <str>)

STRING

+

BIGINT

Trim spaces from the left hand side of a string.

+

Returns the length of a string.

parse_url(string urlString, string partToExtract [, string keyToExtract])

+

lengthb(string <str>)

STRING

+

STRING

Return the specified part of the specified URL. Valid values of partToExtract include HOST, PATH, QUERY, REF, PROTOCOL, AUTHORITY, FILE, and USERINFO.

-

For example, parse_url ('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1 ',' HOST ') returns 'facebook.com'.

-

When the second parameter is QUERY, the third parameter can be used to extract the value of a specific parameter. For example, parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k1') returns 'v1'.

+

Returns the length of a specified string in bytes.

printf(String format, Obj... args)

+

levenshtein(string A, string B)

STRING

+

INT

Print the input according to a specified format.

+

Returns the Levenshtein distance between two strings, for example, levenshtein('kitten','sitting') = 3.

regexp_extract(string subject, string pattern, int index)

+

locate(string <substr>, string <str>[, bigint <start_pos>])

STRING

+

BIGINT

Extract the string specified by the regular expression. regexp_extract ('foothebar ',' foo (.*?) (bar) '2) returns 'bar.'

+

Returns the position of substr in str.

regexp_replace(string A, string B, string C)

+

lower(string A) , lcase(string A)

STRING

+

STRING

Replace character B in string A with character C.

+

Converts all characters of a string to the lower case.

repeat(string str, int n)

+

lpad(string <str1>, int <length>, string <str2>)

STRING

+

STRING

Repeat a string N times.

+

Returns a string of a specified length. If the length of the given string (str1) is shorter than the specified length (length), the given string is left-padded with str2 to the specified length.

reverse(string A)

+

ltrim([<trimChars>,] string <str>)

STRING

+

STRING

Return the reversed string.

+

Trims spaces from the left hand side of a string.

rpad(string str, int len, string pad)

+

parse_url(string urlString, string partToExtract [, string keyToExtract])

STRING

+

STRING

Return a string of a specified length. If the length of the given string (str) is shorter than the specified length (len), the given string is right-padded with pad to the specified length.

+

Returns the specified part of a given URL. Valid values of partToExtract include HOST, PATH, QUERY, REF, PROTOCOL, AUTHORITY, FILE, and USERINFO.

+

For example, parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'HOST') returns 'facebook.com'.

+

When the second parameter is set to QUERY, the third parameter can be used to extract the value of a specific parameter. For example, parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k1') returns 'v1'.

rtrim(string A)

+

printf(String format, Obj... args)

STRING

+

STRING

Trim spaces from the right hand side of a string.

+

Prints the input in a specific format.

space(int n)

+

regexp_count(string <source>, string <pattern>[, bigint <start_position>])

STRING

+

BIGINT

Returns a specified number of spaces.

+

Returns the number of substrings that match a specified pattern in the source, starting from the start_position position.

substr(string A, int start) substring(string A, int start)

+

regexp_extract(string <source>, string <pattern>[, bigint <groupid>])

STRING

+

STRING

Return the substring starting from the specified start position in string A till the end of the string.

+

Matches the string source based on the pattern grouping rule and returns the string content that matches groupid.

substr(string A, int start, int len) substring(string A, int start, int len)

+

replace(string <str>, string <old>, string <new>)

STRING

+

STRING

Return the substring of a specified length starting from the specified start position in A string.

+

Replaces the substring that matches a specified string in a string with another string.

substring_index(string A, string delim, int count)

+
  • For Spark 2.4.5: regexp_replace(string <source>, string <pattern>, string <replace_string>)
  • For Spark 3.3.1: regexp_replace(string <source>, string <pattern>, string <replace_string>[, bigint <occurrence>])

STRING

+

STRING

Return the substring from string A before count occurrences of the delimiter delim.

+
  • For Spark 2.4.5: Replaces the substring that matches the pattern for the occurrence time in the source string and the substring that matches the pattern later with the specified string replace_string and returns the result string.
  • For Spark 3.3.1: Replaces the substring that matches the pattern for the occurrence time in the source string and the substring that matches the pattern later with the specified string replace_string and returns the result string.

translate(string|char|varchar input, string|char|varchar from, string|char|varchar to)

+

regexp_replace1(string <source>, string <pattern>, string <replace_string>[, bigint <occurrence>])

STRING

+

STRING

Translate the input string by replacing the characters or string specified by from with the characters or string specified by to. For example, replace bcd in abcde with BCD using translate ("abcde", "bcd", "BCD").

+

Replaces the substring that matches pattern for the occurrence time in the source string with the specified string replace_string and returns the result string.

trim(string A)

+

regexp_instr(string <source>, string <pattern>[,bigint <start_position>[, bigint <occurrence>[, bigint <return_option>]]])

STRING

+

BIGINT

Trim spaces from both ends of a string.

+

Returns the start or end position of the substring that matches a specified pattern for the occurrence time, starting from start_position in the source string.

upper(string A) ucase(string A)

+

regexp_substr(string <source>, string <pattern>[, bigint <start_position>[, bigint <occurrence>]])

STRING

+

STRING

Convert all characters of a string to upper case.

+

Returns the substring that matches a specified pattern for the occurrence time, starting from start_position in the source string.

initcap(string A)

+

repeat(string <str>, bigint <n>)

STRING

+

STRING

Convert the first letter of each word of a string to upper case and all other letters to lower case.

+

Repeats a string for N times.

levenshtein(string A, string B)

+

reverse(string <str>)

INT

+

STRING

Return the Levenshtein distance between two strings. Example: levenshtein ('kitten ',' sitting ') = 3.

+

Returns a string in reverse order.

soundex(string A)

+

rpad(string <str1>, int <length>, string <str2>)

STRING

+

STRING

Return the soundex string from str. Example: soundex ('Miller ') = M460.

+

Right-pads str1 with str2 to the specified length.

+

rtrim([<trimChars>, ]string <str>),

+

rtrim(trailing [<trimChars>] from <str>)

+

STRING

+

Trims spaces from the right hand side of a string.

+

soundex(string <str>)

+

STRING

+

Returns the soundex string from str, for example, soundex('Miller') = M460.

+

space(bigint <n>)

+

STRING

+

Returns a specified number of spaces.

+

substr(string <str>, bigint <start_position>[, bigint <length>]), substring(string <str>, bigint <start_position>[, bigint <length>])

+

STRING

+

Returns the substring of str, starting from start_position and with a length of length.

+

substring_index(string <str>, string <separator>, int <count>)

+

STRING

+

Truncates the string before the count separator of str. If the value of count is positive, the string is truncated from the left. If the value of count is negative, the string is truncated from the right.

+

split_part(string <str>, string <separator>, bigint <start>[, bigint <end>])

+

STRING

+

Splits a specified string based on a specified separator and returns a substring from the start to end position.

+

translate(string|char|varchar input, string|char|varchar from, string|char|varchar to)

+

STRING

+

Translates the input string by replacing the characters or string specified by from with the characters or string specified by to. For example, replaces bcd in abcde with BCD using translate("abcde", "bcd", "BCD").

+

trim([<trimChars>,]string <str>),

+

trim([BOTH] [<trimChars>] from <str>)

+

STRING

+

Trims spaces from both ends of a string.

+

upper(string A), ucase(string A)

+

STRING

+

Converts all characters of a string to the upper case.

Table 1 Aggregate functions

Function

+
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Table 1 Aggregate functions

Syntax

Return Type

+

Value Type

Description

+

Description

count(*), count(expr), count(DISTINCT expr[, expr...])

+

avg(col), avg(DISTINCT col)

BIGINT

+

DOUBLE

Return the total number of retrieved records.

+

Returns the average value.

sum(col), sum(DISTINCT col)

+

corr(col1, col2)

DOUBLE

+

DOUBLE

Return the sum of the values in a column.

+

Returns the coefficient of correlation of a pair of numeric columns.

avg(col), avg(DISTINCT col)

+

count([distinct|all] <colname>)

DOUBLE

+

BIGINT

Return the average of the values in a column.

+

Returns the number of records.

min(col)

+

covar_pop(col1, col2)

DOUBLE

+

DOUBLE

Return the minimum value of a column.

+

Returns the covariance of a pair of numeric columns.

max(col)

+

covar_samp(col1, col2)

DOUBLE

+

DOUBLE

Return the maximum value of a column.

+

Returns the sample covariance of a pair of numeric columns.

variance(col), var_pop(col)

+

max(col)

DOUBLE

+

DOUBLE

Return the variance of a numeric column.

+

Returns the maximum value.

var_samp(col)

+

min(col)

DOUBLE

+

DOUBLE

Return the sample variance of a numeric column.

+

Returns the minimum value.

stddev_pop(col)

+

percentile(BIGINT col, p)

DOUBLE

+

DOUBLE

Return the deviation of a numeric column.

+

Returns the percentage value point of the value area. The value of p must be between 0 and 1. Otherwise, NULL is returned. The value cannot be a float.

stddev_samp(col)

+

percentile_approx(DOUBLE col, p [, B])

DOUBLE

+

DOUBLE

Return the sample deviation of a numeric column.

+

Returns the approximate pth percentile of a numerical column within the group, including floating-point numbers. The value of p should be between 0 and 1. The parameter B controls the accuracy of the approximation, with a higher value of B resulting in a higher level of approximation. The default value is 10000. If the number of non-repeating values in the column is less than B, an exact percentile is returned.

covar_pop(col1, col2)

+

stddev_pop(col)

DOUBLE

+

DOUBLE

Return the covariance of a pair of numeric columns.

+

Returns the deviation of a specified column.

covar_samp(col1, col2)

+

stddev_samp(col)

DOUBLE

+

DOUBLE

Return the sample covariance of a pair of numeric columns.

+

Returns the sample deviation of a specified column.

corr(col1, col2)

+

sum(col), sum(DISTINCT col)

DOUBLE

+

DOUBLE

Return the coefficient of correlation of a pair of numeric columns.

+

Returns the sum of the values in a column.

percentile(BIGINT col, p)

+

variance(col), var_pop(col)

DOUBLE

+

DOUBLE

Return the exact pth percentile of a column. p must be between 0 and 1. Otherwise, this function returns null. This function does not work with floating point types.

+

Returns the variance of a column.

percentile_approx(DOUBLE col, p [, B])

+

var_samp(col)

DOUBLE

+

DOUBLE

Return an approximate pth percentile of a numeric column (including floating point types) in a group. p must be between 0 and 1. B controls approximation accuracy. Higher values of B mean better approximations, and the default value is 10,000. When the number of distinct values in the numeric column is smaller than B, an exact percentile value is returned.

+

Returns the sample variance of a specified column.

-

Functions such as var_pop, stddev_pop, var_samp, stddev_samp, covar_pop, covar_samp, corr, and percentile_approx, do not support non-numeral data types, such as TimeStamp.

-
diff --git a/docs/dli/sqlreference/dli_08_0069.html b/docs/dli/sqlreference/dli_08_0069.html index f90a555e..3071badf 100644 --- a/docs/dli/sqlreference/dli_08_0069.html +++ b/docs/dli/sqlreference/dli_08_0069.html @@ -1,70 +1,70 @@ -

Window Functions

-

A window function performs a calculation operation on a set of values related to the current value. A window function can be an aggregate function used in the GROUP BY clause, such as sum, max, min, count, and avg functions. The window functions also include the functions listed in Table 1. A window contains multiple rows defined by an OVER clause. A window function works on one window.

+

Overview

+

Table 1 lists the window functions supported by DLI.

-
Table 1 Functions

Function

+
- - - - - - - - - - - - - - - - - - - - - - - - - - @@ -73,7 +73,7 @@
diff --git a/docs/dli/sqlreference/dli_08_0071.html b/docs/dli/sqlreference/dli_08_0071.html index 827c5786..a3e6156e 100644 --- a/docs/dli/sqlreference/dli_08_0071.html +++ b/docs/dli/sqlreference/dli_08_0071.html @@ -12,11 +12,11 @@ -

Keyword

  • IF NOT EXISTS: Prevents system errors if the database to be created exists.
  • COMMENT: Describes a database.
+

Keywords

  • IF NOT EXISTS: Prevents system errors if the database to be created exists.
  • COMMENT: Describes a database.
  • DBPROPERTIES: Specifies database attributes. The attribute name and attribute value appear in pairs.

Parameters

-
Table 1 Window functions

Syntax

Return Type

+

Value Type

Description

+

Description

first_value(col)

+

cume_dist()

Data type of the argument.

+

DOUBLE

Return the value of the first data record from a column.

+

Returns the cumulative distribution, which is equivalent to calculating the proportion of data in the partition that is greater than or equal to, or less than or equal to, the current row.

last_value(col)

+

first_value(col)

Data type of the argument.

+

Data type of the argument

Return the value of the last data record from a column.

+

Returns the value of the first data record in a column in a result set.

lag (col,n,DEFAULT)

+

last_value(col)

Data type of the argument.

+

Data type of the argument

Return the value from the nth row preceding the current row. The first argument specifies the column name. The second argument specifies the nth row preceding the current row. The configuration of the second argument is optional, and the default argument value is 1 if the argument is not specified. The third argument is set to a default value. If the nth row preceding the current row is null, the default value is used. The default value of the third argument is NULL if the argument is not specified.

+

Returns the value of the last data record from a column.

lead (col,n,DEFAULT)

+

lag (col,n,DEFAULT)

Data type of the argument.

+

Data type of the argument

Return the value from the nth row following the current row. The first argument specifies the column name. The second argument specifies the nth row following the current row. The configuration of the second argument is optional, and the default argument value is 1 if the argument is not specified. The third argument is set to a default value. If the nth row following the current row is null, the default value is used. The default value of the third argument is NULL if the argument is not specified.

+

Returns the value from the nth row preceding the current row. The first argument specifies the column name. The second argument specifies the nth row preceding the current row. The configuration of the second argument is optional, and the default argument value is 1 if the argument is not specified. The third argument is set to a default value. If the nth row preceding the current row is null, the default value is used. The default value of the third argument is NULL if the argument is not specified.

row_number() over (order by col_1[,col_2 ...])

+

lead (col,n,DEFAULT)

INT

+

Data type of the argument

Assign a unique number to each row.

+

Returns the value from the nth row following the current row. The first argument specifies the column name. The second argument specifies the nth row following the current row. The configuration of the second argument is optional, and the default argument value is 1 if the argument is not specified. The third argument is set to a default value. If the nth row following the current row is null, the default value is used. The default value of the third argument is NULL if the argument is not specified.

rank()

+

percent_rank()

INT

+

DOUBLE

Return the rank of a value in a set of values. When multiple values share the same rank, the next rank in the sequence is not consecutive.

+

Returns the rank of a value from the column specified by the ORDER BY clause of the window. The return value is a decimal between 0 and 1, which is calculated using (RANK - 1)/(- 1).

cume_dist()

+

rank()

DOUBLE

+

INT

Calculate the relative position of a value in a row.

+

Returns the rank of a value in a set of values. When multiple values share the same rank, the next rank in the sequence is not consecutive.

percent_rank()

+

row_number() over (order by col_1[,col_2 ...])

DOUBLE

+

INT

Return the rank of a value from the column specified by the ORDER BY clause of the window. The return value is a decimal between 0 and 1, which is calculated using (RANK - 1)/(- 1).

+

Assigns a unique number to each row.

Table 1 Parameter description

Parameter

+
diff --git a/docs/dli/sqlreference/dli_08_0072.html b/docs/dli/sqlreference/dli_08_0072.html index 8d7c3d1d..39d339a1 100644 --- a/docs/dli/sqlreference/dli_08_0072.html +++ b/docs/dli/sqlreference/dli_08_0072.html @@ -8,12 +8,12 @@ -

Keyword

IF EXISTS: Prevents system errors if the database to be deleted does not exist.

+

Keywords

IF EXISTS: Prevents system errors if the database to be deleted does not exist.

Precautions

  • DATABASE and SCHEMA can be used interchangeably. You are advised to use DATABASE.
  • RESTRICT: If the database is not empty (tables exist), an error is reported and the DROP operation fails. RESTRICT is the default logic.
  • CASCADE: Even if the database is not empty (tables exist), the DROP will delete all the tables in the database. Therefore, exercise caution when using this function.

Parameters

-
Table 1 Parameters

Parameter

Description

Table 1 Parameter description

Parameter

+
diff --git a/docs/dli/sqlreference/dli_08_0073.html b/docs/dli/sqlreference/dli_08_0073.html index 0f6c10d6..f588b2de 100644 --- a/docs/dli/sqlreference/dli_08_0073.html +++ b/docs/dli/sqlreference/dli_08_0073.html @@ -8,10 +8,10 @@ -

Keyword

EXTENDED: Displays the database properties.

+

Keywords

EXTENDED: Displays the database properties.

Parameters

-
Table 1 Parameter

Parameter

Description

Table 1 Parameter description

Parameter

+
diff --git a/docs/dli/sqlreference/dli_08_0074.html b/docs/dli/sqlreference/dli_08_0074.html index 23567df1..5fa60800 100644 --- a/docs/dli/sqlreference/dli_08_0074.html +++ b/docs/dli/sqlreference/dli_08_0074.html @@ -8,10 +8,10 @@ -

Keyword

None

+

Keywords

None

Parameters

-
Table 1 Parameter

Parameter

Description

Table 1 Parameter description

Parameter

+
diff --git a/docs/dli/sqlreference/dli_08_0076.html b/docs/dli/sqlreference/dli_08_0076.html index bfb5a9f8..e5cebdf7 100644 --- a/docs/dli/sqlreference/dli_08_0076.html +++ b/docs/dli/sqlreference/dli_08_0076.html @@ -3,12 +3,14 @@

Creating an OBS Table Using the DataSource Syntax

Function

Create an OBS table using the DataSource syntax.

The main differences between the DataSource and the Hive syntax lie in the supported data formats and the number of supported partitions. For details, see syntax and precautions.

+

You are advised to use the OBS parallel file system for storage. A parallel file system is a high-performance file system that provides latency in milliseconds, TB/s-level bandwidth, and millions of IOPS. It applies to interactive big data analysis scenarios.

+
-

Usage

  • The size of the table will not be calculated during table creation.
  • When data is added, the table size will be changed to 0.
  • You can view the table size on OBS.
-
-

Precautions

  • The table and column names are case-insensitive.
  • Descriptions of table names and column names support only string constants.
  • During table creation, you need to specify the column name and corresponding data type. The data type is primitive type.
  • If a folder and a file have the same name in the OBS directory, the file is preferred as the path when creating an OBS table.
  • During table creation, if the specified path is an OBS directory and it contains subdirectories (or nested subdirectories), all file types and content in the subdirectories are considered table content.

    Ensure that all file types in the specified directory and its subdirectories are consistent with the storage format specified in the table creation statement. All file content must be consistent with the fields in the table. Otherwise, errors will be reported in the query.

    -

    You can set multiLevelDirEnable to true in the OPTIONS statement to query the content in the subdirectory. The default value is false (Note that this configuration item is a table attribute, exercise caution when performing this operation). Hive tables do not support this configuration item.

    -
  • The OBS storage path must be a directory on the OBS. The directory must be created in advance and be empty.
  • When a partitioned table is created, the column specified in PARTITIONED BY must be a column in the table, and the partition type must be specified. The partition column supports only the string, boolean, tinyint, smallint, short, int, bigint, long, decimal, float, double, date, and timestamp type.
  • When a partitioned table is created, the partition field must be the last one or several fields of the table field, and the sequence of the partition fields must be the same. Otherwise, an error occurs.
  • A maximum of 7,000 partitions can be created in a single table.
  • The CREATE TABLE AS statement cannot specify table attributes or create partitioned tables.
+

Precautions

  • The size of a table is not calculated when the table is created.
  • When data is added, the table size will be changed to 0.
  • You can check the table size on OBS.
  • Table properties cannot be specified using CTAS table creation statements.
  • An OBS directory containing subdirectories:

    If you specify an OBS directory that contains subdirectories when creating a table, all file types and content within those subdirectories will also be included as table content.

    +

    Ensure that all file types in the specified directory and its subdirectories are consistent with the storage format specified in the table creation statement. All file content must be consistent with the fields in the table. Otherwise, errors will be reported in the query.

    +

    You can set multiLevelDirEnable to true in the OPTIONS statement to query the content in the subdirectory. The default value is false (Note that this configuration item is a table attribute, exercise caution when performing this operation. Hive tables do not support this configuration item.)

    +
  • Instructions on using partitioned tables:
    • When a partitioned table is created, the column specified in PARTITIONED BY must be a column in the table, and the partition type must be specified. The partition column supports only the string, boolean, tinyint, smallint, short, int, bigint, long, decimal, float, double, date, and timestamp type.
    • When a partitioned table is created, the partition field must be the last one or several fields of the table field, and the sequence of the partition fields must be the same. Otherwise, an error occurs.
    • A maximum of 200,000 partitions can be created in a single table.
    • CTAS table creation statements cannot be used to create partitioned tables.
    +

Syntax

Table 1 Parameter

Parameter

Description

1
 2
@@ -22,233 +24,438 @@
   [OPTIONS (path 'obs_path', key1=val1, key2=val2, ...)] 
   [PARTITIONED BY (col_name1, col_name2, ...)]
   [COMMENT table_comment]
-  [AS select_statement];
+  [AS select_statement]
 
-

Keyword

  • IF NOT EXISTS: Prevents system errors when the created table exists.
  • USING: Specifies the storage format.
  • OPTIONS: Specifies the attribute name and attribute value when a table is created.
  • COMMENT: Field or table description.
  • PARTITIONED BY: Partition field.
  • AS: Run the CREATE TABLE AS statement to create a table.
+

Keywords

  • IF NOT EXISTS: Prevents system errors when the created table exists.
  • USING: Storage format.
  • OPTIONS: Property name and property value when a table is created.
  • COMMENT: Field or table description.
  • PARTITIONED BY: Partition field.
  • AS: Run the CREATE TABLE AS statement to create a table.
-

Parameter

-
Table 1 Parameter description

Parameter

+

Parameters

+
- + - - + - - + - - + - - + - - + - - + - - + - - + - - + + + +
Table 1 Parameters

Parameter

Description

+

Mandatory

+

Description

db_name

+

db_name

Database name

-

The value can contain letters, numbers, and underscores (_), but cannot contain only numbers or start with a number or underscore (_).

+

No

+

Database name

+

The value can contain letters, numbers, and underscores (_), but it cannot contain only numbers or start with a number or underscore (_).

table_name

+

table_name

Name of the table to be created in the database

-

The value can contain letters, numbers, and underscores (_), but cannot contain only numbers or start with a number or underscore (_). The matching rule is ^(?!_)(?![0-9]+$)[A-Za-z0-9_$]*$.

-

Special characters must be enclosed in single quotation marks ('').

+

Yes

+

Name of the table to be created in the database

+

The value can contain letters, numbers, and underscores (_), but it cannot contain only numbers or start with a number or underscore (_). The matching rule is ^(?!_)(?![0-9]+$)[A-Za-z0-9_$]*$.

+

Special characters must be enclosed in single quotation marks ('').

+

The table name is case insensitive.

col_name

+

col_name

Column names with data types separated by commas (,)

-

The column name contains letters, digits, and underscores (_). It cannot contain only digits and must contain at least one letter.

+

Yes

+

Column names with data types separated by commas (,)

+

The column name can contain letters, numbers, and underscores (_), but it cannot contain only numbers and must contain at least one letter.

+

The column name is case insensitive.

col_type

+

col_type

Data type of a column field

+

Yes

+

Data type of a column field, which is primitive.

col_comment

+

col_comment

Column field description

+

No

+

Column field description, which can only be string constants.

file_format

+

file_format

Input format of the table. The value can be orc, parquet, json, csv, or avro.

+

Yes

+

Format of the table to be created, which can be orc, parquet, json, csv, or avro.

path

+

path

OBS storage path where data files are stored

-

Format: obs://bucketName/tblPath

-

bucketName: bucket name

-

tblPath: directory name. You do not need to specify the file name following the directory.

-

For details about attribute names and values during table creation, see Table 2.

-

For details about the table attribute names and values when file_format is set to csv, see Table 2 and Table 3.

+

Yes

+

OBS storage path where data files are stored. You are advised to use an OBS parallel file system for storage.

+

Format: obs://bucketName/tblPath

+

bucketName: bucket name

+

tblPath: directory name. You do not need to specify the file name following the directory.

+

Refer to Table 2 for details about property names and values during table creation.

+

Refer to Table 2 and Table 3 for details about the table property names and values when file_format is set to csv.

+

If there is a folder and a file with the same name in the OBS directory, the path pointed to by the OBS table will prioritize the file over the folder.

table_comment

+

table_comment

Description of the table

+

No

+

Table description, which can only be string constants.

select_statement

+

select_statement

The CREATE TABLE AS statement is used to insert the SELECT query result of the source table or a data record to a new table in OBS bucket.

+

No

+

Used in the CREATE TABLE AS statement to insert the SELECT query results of the source table or a data record to a table newly created in the OBS bucket.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + +
Table 2 OPTIONS parameters

Parameter

+

Mandatory

+

Description

+

path

+

No

+

Path where the table is stored, which currently can only be an OBS directory

+

multiLevelDirEnable

+

No

+

Whether data in subdirectories is iteratively queried when there are nested subdirectories. When this parameter is set to true, all files in the table path, including files in subdirectories, are iteratively read when a table is queried.

+

Default value: false

+

dataDelegated

+

No

+

Whether data in the path is cleared when deleting a table or partition

+

Default value: false

+

compression

+

No

+

Compression format. This parameter is typically required for Parquet files and is set to zstd.

+
+
+
When file_format is set to csv, you can set the following OPTIONS parameters: +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 OPTIONS parameters of the CSV data format

Parameter

+

Mandatory

+

Description

+

delimiter

+

No

+

Data separator

+

Default value: comma (,)

+

quote

+

No

+

Quotation character

+

Default value: double quotation marks ("")

+

escape

+

No

+

Escape character

+

Default value: backslash (\)

+

multiLine

+

No

+

Whether the column data contains carriage return characters or transfer characters. The value true indicates yes and the value false indicates no.

+

Default value: false

+

dateFormat

+

No

+

Date format of the date field in a CSV file

+

Default value: yyyy-MM-dd

+

timestampFormat

+

No

+

Date format of the timestamp field in a CSV file

+

Default value:

+

yyyy-MM-dd HH:mm:ss

+

mode

+

No

+

Mode for parsing CSV files. The options are as follows: Default value: PERMISSIVE

+
  • PERMISSIVE: Permissive mode. If an incorrect field is encountered, set the line to Null.
  • DROPMALFORMED: When an incorrect field is encountered, the entire line is discarded.
  • FAILFAST: Error mode. If an error occurs, it is automatically reported.
+

header

+

No

+

Whether the CSV file contains header information. The value true indicates that the table header information is contained, and the value false indicates that the information is not included.

+

Default value: false

+

nullValue

+

No

+

Character that represents the null value. For example, nullValue="nl" indicates that nl represents the null value.

+

comment

+

No

+

Character that indicates the beginning of the comment. For example, comment= '#' indicates that the line starting with # is a comment.

+

compression

+

No

+

Data compression format. Currently, gzip, bzip2, and deflate are supported. If you do not want to compress data, enter none.

+

Default value: none

+

encoding

+

No

+

Data encoding format. Available values are utf-8, gb2312, and gbk. Value utf-8 will be used if this parameter is left empty.

+

Default value: utf-8

- -
- - - - - - - - - - - - - - - - - - - - - -
Table 2 OPTIONS parameter description

Parameter

-

Description

-

Default Value

-

path

-

Specified table storage location. Currently, only OBS is supported.

-

-

-

multiLevelDirEnable

-

Whether to iteratively query data in subdirectories when subdirectories are nested. When this parameter is set to true, all files in the table path, including files in subdirectories, are iteratively read when a table is queried.

-

false

-

dataDelegated

-

Whether to clear data in the path when deleting a table or partition

-

false

-

compression

-

Specified compression format. Generally, you need to set this parameter to zstd for parquet files.

-

-

-
-
When the file format is set to CSV, you can set the following OPTIONS parameters: -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Table 3 OPTIONS parameter description of the CSV data format

Parameter

-

Description

-

Default Value

-

delimiter

-

Data separator

-

Comma (,)

-

quote

-

Quotation character

-

Double quotation marks (" ")

-

escape

-

Escape character

-

Backslash (\)

-

multiLine

-

Whether the column data contains carriage return characters or transfer characters. The value true indicates yes and the value false indicates no.

-

false

-

dateFormat

-

Date format of the date field in a CSV file

-

yyyy-MM-dd

-

timestampFormat

-

Date format of the timestamp field in a CSV file

-

yyyy-MM-dd HH:mm:ss

-

mode

-

Mode for parsing CSV files. The options are as follows:

-
  • PERMISSIVE: Permissive mode. If an incorrect field is encountered, set the line to Null.
  • DROPMALFORMED: When an incorrect field is encountered, the entire line is discarded.
  • FAILFAST: Error mode. If an error occurs, it is automatically reported.
-

PERMISSIVE

-

header

-

Whether CSV contains header information. The value true indicates that the table header information is contained, and the value false indicates that the information is not included.

-

false

-

nullValue

-

Character that represents the null value. For example, nullValue= "\\N" indicates that \N represents the null value.

-

-

-

comment

-

Character that indicates the beginning of the comment. For example, comment= '#' indicates that the line starting with # is a comment.

-

-

-

compression

-

Data compression format. Currently, gzip, bzip2, and deflate are supported. If you do not want to compress data, enter none.

-

none

-

encoding

-

Data encoding format. Available values are utf-8, gb2312, and gbk. Value utf-8 will be used if this parameter is left empty.

-

utf-8

-
-
-
-

Example

  • Create a parquetTable OBS table.
    1
    CREATE TABLE parquetTable (name string, id int) USING parquet OPTIONS (path "obs://bucketName/filePath");
    +

    Example 1: Creating an OBS Non-Partitioned Table

    Example description: Create an OBS non-partitioned table named table1 and use the USING keyword to set the storage format of the table to orc.

    +

    You can store OBS tables in parquet, json, or avro format.

    +
    1
    +2
    +3
    +4
    +5
    CREATE TABLE IF NOT EXISTS table1 (
    +    col_1   STRING,
    +    col_2   INT)
    +USING orc
    +OPTIONS (path 'obs://bucketName/filePath');
     
    -
  • Create a parquetZstdTable OBS table and set the compression format to zstd.
    CREATE TABLE parquetZstdTable (name string, id string) USING parquet OPTIONS (path "obs://bucketName/filePath",compression='zstd');
    -
  • Create a student table that has two fields name and scoreand partition the table by classNo.
    1
    CREATE TABLE IF NOT EXISTS student(name STRING, score DOUBLE, classNo INT) USING csv OPTIONS (PATH 'obs://bucketName/filePath') PARTITIONED BY (classNo);
    +
    +

    Example 2: Creating an OBS Partitioned Table

    Example description: Create a partitioned table named student. The partitioned table is partitioned using facultyNo and classNo. The student table is partitioned by faculty number (facultyNo) and class number (classNo).

    +

    In practice, you can select a proper partitioning field and add it to the brackets following the PARTITIONED BY keyword.

    +
    1
    +2
    +3
    +4
    +5
    +6
    +7
    CREATE TABLE IF NOT EXISTS student (
    +    Name        STRING,
    +    facultyNo   INT,
    +    classNo     INT)
    +USING csv
    +OPTIONS (path 'obs://bucketName/filePath')
    +PARTITIONED BY (facultyNo, classNo);
     
    -

    The classNo field is a partition field and must be placed at the end of the table field, that is, student(name STRING, score DOUBLE, classNo INT).

    -
    -
  • To create table t1 and insert data of table t2 into table t1, run the following statement:
    1
    CREATE TABLE t1 USING parquet OPTIONS(path 'obs://bucketName/tblPath') AS select * from t2;
    +
    +

    Example 3: Using CTAS to Create an OBS Non-Partitioned Table Using All or Part of the Data in the Source Table

    Example description: Based on the OBS table table1 created in Example 1: Creating an OBS Non-Partitioned Table, use the CTAS syntax to copy data from table1 to table1_ctas.

    +

    When using CTAS to create a table, you can ignore the syntax used to create the table being copied. This means that regardless of the syntax used to create table1, you can use the DataSource syntax to create table1_ctas.

    +

    In addition, in this example, the storage format of table1 is orc, and the storage format of table1_ctas may be parquet. This means that the storage format of the table created by CTAS may be different from that of the original table.

    +

    Use the SELECT statement following the AS keyword to select required data and insert the data to table1_ctas.

    +

    The SELECT syntax is as follows: SELECT <Column name > FROM <Table name > WHERE <Related filter criteria>.

    +
    • In this example, SELECT * FROM table1 is used. * indicates that all columns are selected from table1 and all data in table1 is inserted into table1_ctas.
      1
      +2
      +3
      +4
      +5
      +6
      CREATE TABLE IF NOT EXISTS table1_ctas
      +USING parquet
      +OPTIONS (path 'obs:// bucketName/filePath')
      +AS
      +SELECT  *
      +FROM    table1;
      +
      + +
      +
    • To filter and insert data into table1_ctas in a customized way, you can use the following SELECT statement: SELECT col_1 FROM table1 WHERE col_1 = 'Ann'. This will allow you to select only col_1 from table1 and insert data into table1_ctas where the value equals 'Ann'.
      1
      +2
      +3
      +4
      +5
      +6
      +7
      CREATE TABLE IF NOT EXISTS table1_ctas
      +USING parquet
      +OPTIONS (path 'obs:// bucketName/filePath')
      +AS
      +SELECT  col_1
      +FROM    table1
      +WHERE   col_1 = 'Ann';
       
    +

    Example 4: Creating an OBS Non-Partitioned Table and Customizing the Data Type of a Column Field

    Example description: Create an OBS non-partitioned table named table2. You can customize the native data types of column fields based on service requirements.

    +
    • STRING, CHAR, or VARCHAR can be used for text characters.
    • TIMESTAMP or DATE can be used for time characters.
    • INT, SMALLINT/SHORT, BIGINT/LONG, or TINYINT can be used for integer characters.
    • FLOAT, DOUBLE, or DECIMAL can be used for decimal calculation.
    • BOOLEAN can be used if only logical switches are involved.
    +

    For details, see "Data Types" > "Primitive Data Types".

    +
     1
    + 2
    + 3
    + 4
    + 5
    + 6
    + 7
    + 8
    + 9
    +10
    +11
    +12
    +13
    +14
    +15
    +16
    +17
    CREATE TABLE IF NOT EXISTS table2 (
    +    col_01  STRING,
    +    col_02  CHAR (2),
    +    col_03  VARCHAR (32),
    +    col_04  TIMESTAMP,
    +    col_05  DATE,
    +    col_06  INT,
    +    col_07  SMALLINT,
    +    col_08  BIGINT,
    +    col_09  TINYINT,
    +    col_10  FLOAT,
    +    col_11  DOUBLE,
    +    col_12  DECIMAL (10, 3),
    +    col_13  BOOLEAN
    +)
    +USING parquet
    +OPTIONS (path 'obs://bucketName/filePath');
    +
    + +
    +
    +

    Example 5: Creating an OBS Partitioned Table and Customizing OPTIONS Parameters

    Example description: When creating an OBS table, you can customize property names and values. For details about OPTIONS parameters, see Table 2.

    +

    In this example, an OBS partitioned table named table3 is created and partitioned based on col_2. Configure path, multiLevelDirEnable, dataDelegated, and compression in OPTIONS.

    +
    • path: OBS storage path. In this example, the value is obs://bucketName/filePath, where bucketName indicates the bucket name and filePath indicates the actual directory name.
    • In big data scenarios, you are advised to use the OBS parallel file system for storage.
    • multiLevelDirEnable: In this example, set this parameter to true, indicating that all files and subdirectories in the table path are read iteratively when the table is queried. If this parameter is not required, set it to false or leave it blank (the default value is false).
    • dataDelegated: In this example, set this parameter to true, indicating that all data in the path is deleted when a table or partition is deleted. If this parameter is not required, set it to false or leave it blank (the default value is false).
    • compression: If the created OBS table needs to be compressed, you can use the keyword compression to configure the compression format. In this example, the zstd compression format is used.
    +
     1
    + 2
    + 3
    + 4
    + 5
    + 6
    + 7
    + 8
    + 9
    +10
    +11
    +12
    CREATE TABLE IF NOT EXISTS table3 (
    +    col_1   STRING,
    +    col_2   int
    +)
    +USING parquet
    +PARTITIONED BY (col_2)
    +OPTIONS (
    +    path 'obs://bucketName/filePath',
    +    multiLeveldirenable = true,
    +    datadelegated = true,
    +    compression = 'zstd'
    +);
    +
    + +
    +
    +

    Example 6: Creating an OBS Non-Partitioned Table and Customizing OPTIONS Parameters

    Example description: A CSV table is a file format that uses commas to separate data values in plain text. It is commonly used for storing and sharing data, but it is not ideal for complex data types due to its lack of structured data concepts. So, when file_format is set to csv, more OPTIONS parameters can be configured. For details, see Table 3.

    +

    In this example, a non-partitioned table named table4 is created with a csv storage format, and additional OPTIONS parameters are used to constrain the data.

    +
    • delimiter: data separator, indicating that commas (,) are used as separators between data
    • quote: quotation character, indicating that double quotation marks (") are used to quota the reference information in the data
    • escape: escape character, indicating that backslashes (\) are used as the escape character for data storage
    • multiLine: This parameter is used to set the column data to be stored does not include carriage return or newline characters.
    • dataFormat: indicates that the date format of the data field in the CSV file is yyyy-MM-dd.
    • timestamoFormat: indicates that the timestamp format in the CSV file is yyyy-MM-dd HH:mm:ss.
    • header: indicates that the CSV table contains the table header information.
    • nullValue: indicates that null is set to indicate the null value in the CSV table.
    • comment: indicates that the CSV table uses a slash (/) to indicate the beginning of a comment.
    • compression: indicates that the CSV table is compressed in the gzip, bzip2, or deflate format. If compression is not required, set this parameter to none.
    • encoding: indicates that the table uses the utf-8 encoding format. You can choose utf-8, gb2312, or gbk as needed. The default encoding format is utf-8.
    +
     1
    + 2
    + 3
    + 4
    + 5
    + 6
    + 7
    + 8
    + 9
    +10
    +11
    +12
    +13
    +14
    +15
    +16
    +17
    +18
    +19
    +20
    CREATE TABLE IF NOT EXISTS table4 (
    +    col_1 STRING,
    +    col_2 INT
    +)
    +USING csv
    +OPTIONS (
    +    path 'obs://bucketName/filePath',
    +    delimiter       = ',',
    +    quote            = '#',
    +    escape           = '|',
    +    multiline        = false,
    +    dateFormat       = 'yyyy-MM-dd',
    +    timestampFormat  = 'yyyy-MM-dd HH:mm:ss',
    +    mode             = 'failfast',
    +    header           = true,
    +    nullValue        = 'null',
    +    comment          = '*',
    +    compression      = 'deflate',
    +    encoding         = 'utf - 8'
    +);
    +
    + +
    +