diff --git a/docs/dli/api-ref/ALL_META.TXT.json b/docs/dli/api-ref/ALL_META.TXT.json new file mode 100644 index 00000000..59cb6e9c --- /dev/null +++ b/docs/dli/api-ref/ALL_META.TXT.json @@ -0,0 +1,1162 @@ +[ + { + "uri":"dli_02_0500.html", + "product_code":"dli", + "code":"1", + "des":"Public cloud APIs comply with the RESTful API design principles. REST-based Web services are organized into resources. Each resource is identified by one or more Uniform ", + "doc_type":"api", + "kw":"Calling APIs,API Reference", + "title":"Calling APIs", + "githuburl":"" + }, + { + "uri":"dli_02_0181.html", + "product_code":"dli", + "code":"2", + "des":"This section describes the APIs provided by DLI.", + "doc_type":"api", + "kw":"Overview,API Reference", + "title":"Overview", + "githuburl":"" + }, + { + "uri":"dli_02_0306.html", + "product_code":"dli", + "code":"3", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Getting Started", + "title":"Getting Started", + "githuburl":"" + }, + { + "uri":"dli_02_0307.html", + "product_code":"dli", + "code":"4", + "des":"This section describes how to create and query a queue using APIs.Queues created using this API will be bound to specified compute resources.It takes 6 to 10 minutes to s", + "doc_type":"api", + "kw":"Creating a Queue,Getting Started,API Reference", + "title":"Creating a Queue", + "githuburl":"" + }, + { + "uri":"dli_02_0308.html", + "product_code":"dli", + "code":"5", + "des":"This section describes how to create and query SQL jobs using APIs.It takes 6 to 10 minutes to start a job using a new queue for the first time.Creating a Queue: Create a", + "doc_type":"api", + "kw":"Creating and Submitting a SQL Job,Getting Started,API Reference", + "title":"Creating and Submitting a SQL Job", + "githuburl":"" + }, + { + "uri":"dli_02_0309.html", + "product_code":"dli", + "code":"6", + "des":"This section describes how to create and submit Spark jobs using APIs.It takes 6 to 10 minutes to start a job using a new queue for the first time.Creating a Queue: Creat", + "doc_type":"api", + "kw":"Creating and Submitting a Spark Job,Getting Started,API Reference", + "title":"Creating and Submitting a Spark Job", + "githuburl":"" + }, + { + "uri":"dli_02_0310.html", + "product_code":"dli", + "code":"7", + "des":"This section describes how to create and run a user-defined Flink job using APIs.It takes 6 to 10 minutes to start a job using a new queue for the first time.Creating a Q", + "doc_type":"api", + "kw":"Creating and Submitting a Flink Job,Getting Started,API Reference", + "title":"Creating and Submitting a Flink Job", + "githuburl":"" + }, + { + "uri":"dli_02_0311.html", + "product_code":"dli", + "code":"8", + "des":"This section describes how to create an enhanced datasource connection using an API.It takes 6 to 10 minutes to start a job using a new queue for the first time.Before cr", + "doc_type":"api", + "kw":"Creating and Using a Datasource Connection,Getting Started,API Reference", + "title":"Creating and Using a Datasource Connection", + "githuburl":"" + }, + { + "uri":"dli_02_0036.html", + "product_code":"dli", + "code":"9", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Permission-related APIs", + "title":"Permission-related APIs", + "githuburl":"" + }, + { + "uri":"dli_02_0037.html", + "product_code":"dli", + "code":"10", + "des":"This API is used to share a specific queue with other users. You can grant users with the permission to use the specified queue or revoke the permission.URI formatPUT /v1", + "doc_type":"api", + "kw":"Granting Users with the Queue Usage Permission,Permission-related APIs,API Reference", + "title":"Granting Users with the Queue Usage Permission", + "githuburl":"" + }, + { + "uri":"dli_02_0038.html", + "product_code":"dli", + "code":"11", + "des":"This API is used to query names of all users who can use a specified queue.URI formatGET /v1.0/{project_id}/queues/{queue_name}/usersGET /v1.0/{project_id}/queues/{queue_", + "doc_type":"api", + "kw":"Querying Queue Users,Permission-related APIs,API Reference", + "title":"Querying Queue Users", + "githuburl":"" + }, + { + "uri":"dli_02_0039.html", + "product_code":"dli", + "code":"12", + "des":"This API is used to grant database or table data usage permission to specified users.URI formatPUT /v1.0/{project_id}/user-authorizationPUT /v1.0/{project_id}/user-author", + "doc_type":"api", + "kw":"Granting Data Permission to Users,Permission-related APIs,API Reference", + "title":"Granting Data Permission to Users", + "githuburl":"" + }, + { + "uri":"dli_02_0040.html", + "product_code":"dli", + "code":"13", + "des":"This API is used query names of all users who have permission to use or access the database.URI formatGET /v1.0/{project_id}/databases/{database_name}/usersGET /v1.0/{pro", + "doc_type":"api", + "kw":"Querying Database Users,Permission-related APIs,API Reference", + "title":"Querying Database Users", + "githuburl":"" + }, + { + "uri":"dli_02_0041.html", + "product_code":"dli", + "code":"14", + "des":"This API is used to query users who have permission to access the specified table or column in the table.URI formatGET /v1.0/{project_id}/databases/{database_name}/tables", + "doc_type":"api", + "kw":"Querying Table Users,Permission-related APIs,API Reference", + "title":"Querying Table Users", + "githuburl":"" + }, + { + "uri":"dli_02_0042.html", + "product_code":"dli", + "code":"15", + "des":"This API is used to query the permission of a specified user on a table.URI formatGET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/users/{user_name}GE", + "doc_type":"api", + "kw":"Querying a User's Table Permissions,Permission-related APIs,API Reference", + "title":"Querying a User's Table Permissions", + "githuburl":"" + }, + { + "uri":"dli_02_0252.html", + "product_code":"dli", + "code":"16", + "des":"This API is used to view the permissions granted to a user.URI formatGET /v1.0/{project_id}/authorization/privilegesGET /v1.0/{project_id}/authorization/privilegesParamet", + "doc_type":"api", + "kw":"Viewing the Granted Permissions of a User,Permission-related APIs,API Reference", + "title":"Viewing the Granted Permissions of a User", + "githuburl":"" + }, + { + "uri":"dli_02_0297.html", + "product_code":"dli", + "code":"17", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Agency-related APIs", + "title":"Agency-related APIs", + "githuburl":"" + }, + { + "uri":"dli_02_0298.html", + "product_code":"dli", + "code":"18", + "des":"This API is used to obtain the agency information of a DLI user.URI formatGET /v2/{project_id}/agencyGET /v2/{project_id}/agencyParameter descriptionURI parametersParam", + "doc_type":"api", + "kw":"Obtaining DLI Agency Information,Agency-related APIs,API Reference", + "title":"Obtaining DLI Agency Information", + "githuburl":"" + }, + { + "uri":"dli_02_0299.html", + "product_code":"dli", + "code":"19", + "des":"This API is used to create an agency for a DLI user.URI formatPOST /v2/{project_id}/agencyPOST /v2/{project_id}/agencyParameter descriptionURI parametersParameterMandat", + "doc_type":"api", + "kw":"Creating a DLI Agency,Agency-related APIs,API Reference", + "title":"Creating a DLI Agency", + "githuburl":"" + }, + { + "uri":"dli_02_0193.html", + "product_code":"dli", + "code":"20", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Queue-related APIs (Recommended)", + "title":"Queue-related APIs (Recommended)", + "githuburl":"" + }, + { + "uri":"dli_02_0194.html", + "product_code":"dli", + "code":"21", + "des":"This API is used to create a queue. The queue will be bound to specified compute resources.It takes 5 to 15 minutes to start a job using a new queue for the first time.UR", + "doc_type":"api", + "kw":"Creating a Queue,Queue-related APIs (Recommended),API Reference", + "title":"Creating a Queue", + "githuburl":"" + }, + { + "uri":"dli_02_0195.html", + "product_code":"dli", + "code":"22", + "des":"This API is used to delete a specified queue.If a task is being executed in a specified queue, the queue cannot be deleted.URI formatDELETE /v1.0/{project_id}/queues/{que", + "doc_type":"api", + "kw":"Deleting a Queue,Queue-related APIs (Recommended),API Reference", + "title":"Deleting a Queue", + "githuburl":"" + }, + { + "uri":"dli_02_0196.html", + "product_code":"dli", + "code":"23", + "des":"This API is used to list all queues under the project.URI formatGET/v1.0/{project_id}/queuesGET/v1.0/{project_id}/queuesParameter descriptionURI parameterParameterMandato", + "doc_type":"api", + "kw":"Querying All Queues,Queue-related APIs (Recommended),API Reference", + "title":"Querying All Queues", + "githuburl":"" + }, + { + "uri":"dli_02_0016.html", + "product_code":"dli", + "code":"24", + "des":"This API is used to list details of a specific queue in a project.URI formatGET /v1.0/{project_id}/queues/{queue_name}GET /v1.0/{project_id}/queues/{queue_name}Parameter ", + "doc_type":"api", + "kw":"Viewing Details of a Queue,Queue-related APIs (Recommended),API Reference", + "title":"Viewing Details of a Queue", + "githuburl":"" + }, + { + "uri":"dli_02_0249.html", + "product_code":"dli", + "code":"25", + "des":"This API is used to restart, scale out, and scale in queues.Only SQL queues in the Available status can be restarted. (The queue status is Available only after the SQL jo", + "doc_type":"api", + "kw":"Restarting, Scaling Out, and Scaling In Queues,Queue-related APIs (Recommended),API Reference", + "title":"Restarting, Scaling Out, and Scaling In Queues", + "githuburl":"" + }, + { + "uri":"dli_02_0284.html", + "product_code":"dli", + "code":"26", + "des":"This API is used to send an address connectivity test request to a specified queue and insert the test address into the table.URI formatPOST /v1.0/{project_id}/queues/{q", + "doc_type":"api", + "kw":"Creating an Address Connectivity Test Request,Queue-related APIs (Recommended),API Reference", + "title":"Creating an Address Connectivity Test Request", + "githuburl":"" + }, + { + "uri":"dli_02_0285.html", + "product_code":"dli", + "code":"27", + "des":"This API is used to query the connectivity test result after the test is submitted.URI formatGET /v1.0/{project_id}/queues/{queue_name}/connection-test/{task_id}GET /v1", + "doc_type":"api", + "kw":"Querying Connectivity Test Details of a Specified Address,Queue-related APIs (Recommended),API Refer", + "title":"Querying Connectivity Test Details of a Specified Address", + "githuburl":"" + }, + { + "uri":"dli_02_0291.html", + "product_code":"dli", + "code":"28", + "des":"This API is used to create a scheduled CU change, that is, to create a scheduled CU change for a specified queue.URI formatPOST /v1/{project_id}/queues/{queue_name}/plan", + "doc_type":"api", + "kw":"Creating a Scheduled CU Change,Queue-related APIs (Recommended),API Reference", + "title":"Creating a Scheduled CU Change", + "githuburl":"" + }, + { + "uri":"dli_02_0292.html", + "product_code":"dli", + "code":"29", + "des":"This API is used to query the scheduled CU changes and list the changes of a specified queue.URI formatGET /v1/{project_id}/queues/{queue_name}/plansGET /v1/{project_id}/", + "doc_type":"api", + "kw":"Viewing a Scheduled CU Change,Queue-related APIs (Recommended),API Reference", + "title":"Viewing a Scheduled CU Change", + "githuburl":"" + }, + { + "uri":"dli_02_0293.html", + "product_code":"dli", + "code":"30", + "des":"This API is used to delete scheduled CU changes in batches.URI formatPOST /v1/{project_id}/queues/{queue_name}/plans/batch-deletePOST /v1/{project_id}/queues/{queue_name}", + "doc_type":"api", + "kw":"Deleting Scheduled CU Changes in Batches,Queue-related APIs (Recommended),API Reference", + "title":"Deleting Scheduled CU Changes in Batches", + "githuburl":"" + }, + { + "uri":"dli_02_0294.html", + "product_code":"dli", + "code":"31", + "des":"This API is used to delete a scheduled CU change for a queue with a specified ID.URI formatDELETE /v1/{project_id}/queues/{queue_name}/plans/{plan_id}DELETE /v1/{projec", + "doc_type":"api", + "kw":"Deleting a Scheduled CU Change,Queue-related APIs (Recommended),API Reference", + "title":"Deleting a Scheduled CU Change", + "githuburl":"" + }, + { + "uri":"dli_02_0295.html", + "product_code":"dli", + "code":"32", + "des":"This API is used to modify a scheduled CU change for a queue with a specified ID.URI formatPUT /v1/{project_id}/queues/{queue_name}/plans/{plan_id}PUT /v1/{project_id}/", + "doc_type":"api", + "kw":"Modifying a Scheduled CU Change,Queue-related APIs (Recommended),API Reference", + "title":"Modifying a Scheduled CU Change", + "githuburl":"" + }, + { + "uri":"dli_02_0158.html", + "product_code":"dli", + "code":"33", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"APIs Related to SQL Jobs", + "title":"APIs Related to SQL Jobs", + "githuburl":"" + }, + { + "uri":"dli_02_0027.html", + "product_code":"dli", + "code":"34", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Database-related APIs", + "title":"Database-related APIs", + "githuburl":"" + }, + { + "uri":"dli_02_0028.html", + "product_code":"dli", + "code":"35", + "des":"This API is used to add a database.URI formatPOST /v1.0/{project_id}/databasesPOST /v1.0/{project_id}/databasesParameter descriptionURI parameterParameterMandatoryTypeDes", + "doc_type":"api", + "kw":"Creating a Database,Database-related APIs,API Reference", + "title":"Creating a Database", + "githuburl":"" + }, + { + "uri":"dli_02_0030.html", + "product_code":"dli", + "code":"36", + "des":"This API is used to delete an empty database. If there are tables in the database to be deleted, delete all tables first. For details about the API used to delete tables,", + "doc_type":"api", + "kw":"Deleting a Database,Database-related APIs,API Reference", + "title":"Deleting a Database", + "githuburl":"" + }, + { + "uri":"dli_02_0029.html", + "product_code":"dli", + "code":"37", + "des":"This API is used to query the information about all the databases.URI formatGET /v1.0/{project_id}/databasesGET /v1.0/{project_id}/databasesParameter descriptionURI param", + "doc_type":"api", + "kw":"Querying All Databases,Database-related APIs,API Reference", + "title":"Querying All Databases", + "githuburl":"" + }, + { + "uri":"dli_02_0164.html", + "product_code":"dli", + "code":"38", + "des":"This API is used to modify the owner of a database.URI formatPUT /v1.0/{project_id}/databases/{database_name}/ownerPUT /v1.0/{project_id}/databases/{database_name}/ownerP", + "doc_type":"api", + "kw":"Modifying a Database Owner,Database-related APIs,API Reference", + "title":"Modifying a Database Owner", + "githuburl":"" + }, + { + "uri":"dli_02_0031.html", + "product_code":"dli", + "code":"39", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Table-related APIs", + "title":"Table-related APIs", + "githuburl":"" + }, + { + "uri":"dli_02_0034.html", + "product_code":"dli", + "code":"40", + "des":"This API is used to create a table.This API is a synchronous API.URI formatPOST /v1.0/{project_id}/databases/{database_name}/tablesPOST /v1.0/{project_id}/databases/{data", + "doc_type":"api", + "kw":"Creating a Table,Table-related APIs,API Reference", + "title":"Creating a Table", + "githuburl":"" + }, + { + "uri":"dli_02_0035.html", + "product_code":"dli", + "code":"41", + "des":"This API is used to delete a specified table.URI formatDELETE /v1.0/{project_id}/databases/{database_name}/tables/{table_name}DELETE /v1.0/{project_id}/databases/{databas", + "doc_type":"api", + "kw":"Deleting a Table,Table-related APIs,API Reference", + "title":"Deleting a Table", + "githuburl":"" + }, + { + "uri":"dli_02_0105.html", + "product_code":"dli", + "code":"42", + "des":"This API is used to query information about tables that meet the filtering criteria or all the tables in the specified database.URI formatGET /v1.0/{project_id}/databases", + "doc_type":"api", + "kw":"Querying All Tables (Recommended),Table-related APIs,API Reference", + "title":"Querying All Tables (Recommended)", + "githuburl":"" + }, + { + "uri":"dli_02_0033.html", + "product_code":"dli", + "code":"43", + "des":"This API is used to describe metadata information in the specified table.URI formatGET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}GET /v1.0/{project_", + "doc_type":"api", + "kw":"Describing the Table Information,Table-related APIs,API Reference", + "title":"Describing the Table Information", + "githuburl":"" + }, + { + "uri":"dli_02_0108.html", + "product_code":"dli", + "code":"44", + "des":"This API is used to preview the first ten rows of a table.URI formatGET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/previewGET /v1.0/{project_id}/dat", + "doc_type":"api", + "kw":"Previewing Table Content,Table-related APIs,API Reference", + "title":"Previewing Table Content", + "githuburl":"" + }, + { + "uri":"dli_02_0250.html", + "product_code":"dli", + "code":"45", + "des":"This API is used to obtain the partition list.URI formatGET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/partitionsGET /v1.0/{project_id}/databases/{d", + "doc_type":"api", + "kw":"Obtaining the Partition List,Table-related APIs,API Reference", + "title":"Obtaining the Partition List", + "githuburl":"" + }, + { + "uri":"dli_02_0017.html", + "product_code":"dli", + "code":"46", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Job-related APIs", + "title":"Job-related APIs", + "githuburl":"" + }, + { + "uri":"dli_02_0019.html", + "product_code":"dli", + "code":"47", + "des":"This API is used to import data from a file to a DLI or OBS table. Currently, only OBS data can be imported to a DLI or OBS table.This API is asynchronous.When importing ", + "doc_type":"api", + "kw":"Importing Data,Job-related APIs,API Reference", + "title":"Importing Data", + "githuburl":"" + }, + { + "uri":"dli_02_0020.html", + "product_code":"dli", + "code":"48", + "des":"This API is used to export data from a DLI table to a file.This API is asynchronous.Currently, data can be exported only from a DLI table to OBS, and the OBS path must be", + "doc_type":"api", + "kw":"Exporting Data,Job-related APIs,API Reference", + "title":"Exporting Data", + "githuburl":"" + }, + { + "uri":"dli_02_0102.html", + "product_code":"dli", + "code":"49", + "des":"This API is used to submit jobs to a queue using SQL statements.The job types support DDL, DCL, IMPORT, QUERY, and INSERT. The IMPORT function is the same as that describ", + "doc_type":"api", + "kw":"Submitting a SQL Job (Recommended),Job-related APIs,API Reference", + "title":"Submitting a SQL Job (Recommended)", + "githuburl":"" + }, + { + "uri":"dli_02_0104.html", + "product_code":"dli", + "code":"50", + "des":"This API is used to cancel a submitted job. If execution of a job completes or fails, this job cannot be canceled.URI formatDELETE /v1.0/{project_id}/jobs/{job_id}DELETE ", + "doc_type":"api", + "kw":"Canceling a Job (Recommended),Job-related APIs,API Reference", + "title":"Canceling a Job (Recommended)", + "githuburl":"" + }, + { + "uri":"dli_02_0025.html", + "product_code":"dli", + "code":"51", + "des":"This API is used to query information about all jobs in the current project.URI formatGET /v1.0/{project_id}/jobsGET /v1.0/{project_id}/jobsParameter descriptionURI param", + "doc_type":"api", + "kw":"Querying All Jobs,Job-related APIs,API Reference", + "title":"Querying All Jobs", + "githuburl":"" + }, + { + "uri":"dli_02_0312.html", + "product_code":"dli", + "code":"52", + "des":"This API is used to view the job execution result after a job is executed using SQL query statements. Currently, you can only query execution results of jobs of the QUERY", + "doc_type":"api", + "kw":"Previewing SQL Job Query Results,Job-related APIs,API Reference", + "title":"Previewing SQL Job Query Results", + "githuburl":"" + }, + { + "uri":"dli_02_0021.html", + "product_code":"dli", + "code":"53", + "des":"This API is used to query the status of a submitted job.URI formatGET /v1.0/{project_id}/jobs/{job_id}/statusGET /v1.0/{project_id}/jobs/{job_id}/statusParameter descript", + "doc_type":"api", + "kw":"Querying Job Status,Job-related APIs,API Reference", + "title":"Querying Job Status", + "githuburl":"" + }, + { + "uri":"dli_02_0022.html", + "product_code":"dli", + "code":"54", + "des":"This API is used to query details about jobs, including databasename, tablename, file size, and export mode.URI formatGET/v1.0/{project_id}/jobs/{job_id}/detailGET/v1.0/{", + "doc_type":"api", + "kw":"Querying Job Details,Job-related APIs,API Reference", + "title":"Querying Job Details", + "githuburl":"" + }, + { + "uri":"dli_02_0107.html", + "product_code":"dli", + "code":"55", + "des":"This API is used to check the SQL syntax.URI formatPOST /v1.0/{project_id}/jobs/check-sqlPOST /v1.0/{project_id}/jobs/check-sqlParameter descriptionURI parametersParamete", + "doc_type":"api", + "kw":"Checking SQL Syntax,Job-related APIs,API Reference", + "title":"Checking SQL Syntax", + "githuburl":"" + }, + { + "uri":"dli_02_0024.html", + "product_code":"dli", + "code":"56", + "des":"This API is used to export results returned from the query using SQL statements to OBS. Only the query result of QUERY jobs can be exported.This API is asynchronous.Curre", + "doc_type":"api", + "kw":"Exporting Query Results,Job-related APIs,API Reference", + "title":"Exporting Query Results", + "githuburl":"" + }, + { + "uri":"dli_02_0296.html", + "product_code":"dli", + "code":"57", + "des":"This API is used to obtain the job execution progress. If a job is being executed, information about its subjobs can be obtained. If a job has just started or has ended, ", + "doc_type":"api", + "kw":"Querying the Job Execution Progress,Job-related APIs,API Reference", + "title":"Querying the Job Execution Progress", + "githuburl":"" + }, + { + "uri":"dli_02_0166.html", + "product_code":"dli", + "code":"58", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Package Group-related APIs", + "title":"Package Group-related APIs", + "githuburl":"" + }, + { + "uri":"dli_02_0130.html", + "product_code":"dli", + "code":"59", + "des":"This API is used to upload a package group to a project. The function is similar to creating a package on the management console.URI formatPOST /v2.0/{project_id}/resourc", + "doc_type":"api", + "kw":"Uploading a Package Group,Package Group-related APIs,API Reference", + "title":"Uploading a Package Group", + "githuburl":"" + }, + { + "uri":"dli_02_0168.html", + "product_code":"dli", + "code":"60", + "des":"This API is used to query all resources in a project, including groups.URI formatGET /v2.0/{project_id}/resourcesGET /v2.0/{project_id}/resourcesParameter descriptionURI ", + "doc_type":"api", + "kw":"Querying Package Group List,Package Group-related APIs,API Reference", + "title":"Querying Package Group List", + "githuburl":"" + }, + { + "uri":"dli_02_0169.html", + "product_code":"dli", + "code":"61", + "des":"This API is used to upload a group of JAR packages to a project.When a resource group with the same name is uploaded, the new group overwrites the old group.URI formatPOS", + "doc_type":"api", + "kw":"Uploading a JAR Package Group,Package Group-related APIs,API Reference", + "title":"Uploading a JAR Package Group", + "githuburl":"" + }, + { + "uri":"dli_02_0170.html", + "product_code":"dli", + "code":"62", + "des":"This API is used to upload a group of PyFile packages to a project.When a group with the same name as the PyFile package is uploaded, the new group overwrites the old gro", + "doc_type":"api", + "kw":"Uploading a PyFile Package Group,Package Group-related APIs,API Reference", + "title":"Uploading a PyFile Package Group", + "githuburl":"" + }, + { + "uri":"dli_02_0171.html", + "product_code":"dli", + "code":"63", + "des":"This API is used to upload a group of File packages to a project.When the File package group with the same name is uploaded, the new group overwrites the old group.URI fo", + "doc_type":"api", + "kw":"Uploading a File Package Group,Package Group-related APIs,API Reference", + "title":"Uploading a File Package Group", + "githuburl":"" + }, + { + "uri":"dli_02_0172.html", + "product_code":"dli", + "code":"64", + "des":"This API is used to query resource information of a package group in a Project.URI formatGET /v2.0/{project_id}/resources/{resource_name}GET /v2.0/{project_id}/resources/", + "doc_type":"api", + "kw":"Querying Resource Packages in a Group,Package Group-related APIs,API Reference", + "title":"Querying Resource Packages in a Group", + "githuburl":"" + }, + { + "uri":"dli_02_0173.html", + "product_code":"dli", + "code":"65", + "des":"This API is used to delete resource packages in a group in a Project.URI formatDELETE /v2.0/{project_id}/resources/{resource_name}DELETE /v2.0/{project_id}/resources/{res", + "doc_type":"api", + "kw":"Deleting a Resource Package from a Group,Package Group-related APIs,API Reference", + "title":"Deleting a Resource Package from a Group", + "githuburl":"" + }, + { + "uri":"dli_02_0253.html", + "product_code":"dli", + "code":"66", + "des":"This API is used to change the owner of a program package.URI formatPUT /v2.0/{project_id}/resources/ownerPUT /v2.0/{project_id}/resources/ownerParameter descriptionURI p", + "doc_type":"api", + "kw":"Changing the Owner of a Group or Resource Package,Package Group-related APIs,API Reference", + "title":"Changing the Owner of a Group or Resource Package", + "githuburl":"" + }, + { + "uri":"dli_02_0223.html", + "product_code":"dli", + "code":"67", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"APIs Related to Flink Jobs", + "title":"APIs Related to Flink Jobs", + "githuburl":"" + }, + { + "uri":"dli_02_0225.html", + "product_code":"dli", + "code":"68", + "des":"This API is used to grant DLI the permission to access OBS buckets for saving job checkpoints and run logs.URI formatPOST /v1.0/{project_id}/dli/obs-authorizePOST /v1.0/{", + "doc_type":"api", + "kw":"Granting OBS Permissions to DLI,APIs Related to Flink Jobs,API Reference", + "title":"Granting OBS Permissions to DLI", + "githuburl":"" + }, + { + "uri":"dli_02_0228.html", + "product_code":"dli", + "code":"69", + "des":"This API is used to create a Flink streaming SQL job.URI formatPOST /v1.0/{project_id}/streaming/sql-jobsPOST /v1.0/{project_id}/streaming/sql-jobsParameter descriptionUR", + "doc_type":"api", + "kw":"Creating a SQL Job,APIs Related to Flink Jobs,API Reference", + "title":"Creating a SQL Job", + "githuburl":"" + }, + { + "uri":"dli_02_0229.html", + "product_code":"dli", + "code":"70", + "des":"This API is used to modify a Flink SQL job.URI formatPUT /v1.0/{project_id}/streaming/sql-jobs/{job_id}PUT /v1.0/{project_id}/streaming/sql-jobs/{job_id}Parameter descrip", + "doc_type":"api", + "kw":"Updating a SQL Job,APIs Related to Flink Jobs,API Reference", + "title":"Updating a SQL Job", + "githuburl":"" + }, + { + "uri":"dli_02_0230.html", + "product_code":"dli", + "code":"71", + "des":"This API is used to create custom jobs, which currently support the JAR format and run in dedicated queues.URI formatPOST /v1.0/{project_id}/streaming/flink-jobsPOST /v1.", + "doc_type":"api", + "kw":"Creating a Flink Jar job,APIs Related to Flink Jobs,API Reference", + "title":"Creating a Flink Jar job", + "githuburl":"" + }, + { + "uri":"dli_02_0231.html", + "product_code":"dli", + "code":"72", + "des":"This API is used to update custom jobs, which currently support the JAR format and run in dedicated queues.URI formatPUT /v1.0/{project_id}/streaming/flink-jobs/{job_id}P", + "doc_type":"api", + "kw":"Updating a Flink Jar Job,APIs Related to Flink Jobs,API Reference", + "title":"Updating a Flink Jar Job", + "githuburl":"" + }, + { + "uri":"dli_02_0233.html", + "product_code":"dli", + "code":"73", + "des":"This API is used to trigger batch job running.URI formatPOST /v1.0/{project_id}/streaming/jobs/runPOST /v1.0/{project_id}/streaming/jobs/runParameter descriptionURI param", + "doc_type":"api", + "kw":"Running Jobs in Batches,APIs Related to Flink Jobs,API Reference", + "title":"Running Jobs in Batches", + "githuburl":"" + }, + { + "uri":"dli_02_0234.html", + "product_code":"dli", + "code":"74", + "des":"This API is used to query the list of the current user's jobs. You can set the job ID as the ID and query jobs whose IDs are greater than or less than the ID. You can als", + "doc_type":"api", + "kw":"Querying the Job List,APIs Related to Flink Jobs,API Reference", + "title":"Querying the Job List", + "githuburl":"" + }, + { + "uri":"dli_02_0235.html", + "product_code":"dli", + "code":"75", + "des":"This API is used to query details of a job.URI formatGET /v1.0/{project_id}/streaming/jobs/{job_id}GET /v1.0/{project_id}/streaming/jobs/{job_id}Parameter descriptionURI ", + "doc_type":"api", + "kw":"Querying Job Details,APIs Related to Flink Jobs,API Reference", + "title":"Querying Job Details", + "githuburl":"" + }, + { + "uri":"dli_02_0236.html", + "product_code":"dli", + "code":"76", + "des":"This API is used to query a job execution plan.URI formatGET /v1.0/{project_id}/streaming/jobs/{job_id}/execute-graphGET /v1.0/{project_id}/streaming/jobs/{job_id}/execut", + "doc_type":"api", + "kw":"Querying the Job Execution Plan,APIs Related to Flink Jobs,API Reference", + "title":"Querying the Job Execution Plan", + "githuburl":"" + }, + { + "uri":"dli_02_0241.html", + "product_code":"dli", + "code":"77", + "des":"This API is used to stop running jobs in batches.URI formatPOST /v1.0/{project_id}/streaming/jobs/stopPOST /v1.0/{project_id}/streaming/jobs/stopParameter descriptionURI ", + "doc_type":"api", + "kw":"Stopping Jobs in Batches,APIs Related to Flink Jobs,API Reference", + "title":"Stopping Jobs in Batches", + "githuburl":"" + }, + { + "uri":"dli_02_0242.html", + "product_code":"dli", + "code":"78", + "des":"This API is used to delete a Flink job at any state.The job records will not be deleted.URI formatDELETE /v1.0/{project_id}/streaming/jobs/{job_id}DELETE /v1.0/{project_i", + "doc_type":"api", + "kw":"Deleting a Job,APIs Related to Flink Jobs,API Reference", + "title":"Deleting a Job", + "githuburl":"" + }, + { + "uri":"dli_02_0243.html", + "product_code":"dli", + "code":"79", + "des":"This API is used to batch delete jobs at any state.URI formatPOST /v1.0/{project_id}/streaming/jobs/deletePOST /v1.0/{project_id}/streaming/jobs/deleteParameter descripti", + "doc_type":"api", + "kw":"Deleting Jobs in Batches,APIs Related to Flink Jobs,API Reference", + "title":"Deleting Jobs in Batches", + "githuburl":"" + }, + { + "uri":"dli_02_0254.html", + "product_code":"dli", + "code":"80", + "des":"This API is used to export Flink job data.URI formatPOST /v1.0/{project_id}/streaming/jobs/exportPOST /v1.0/{project_id}/streaming/jobs/exportParameter descriptionURI par", + "doc_type":"api", + "kw":"Exporting a Flink Job,APIs Related to Flink Jobs,API Reference", + "title":"Exporting a Flink Job", + "githuburl":"" + }, + { + "uri":"dli_02_0255.html", + "product_code":"dli", + "code":"81", + "des":"This API is used to import Flink job data.URI formatPOST /v1.0/{project_id}/streaming/jobs/importPOST /v1.0/{project_id}/streaming/jobs/importParameter descriptionURI par", + "doc_type":"api", + "kw":"Importing a Flink Job,APIs Related to Flink Jobs,API Reference", + "title":"Importing a Flink Job", + "githuburl":"" + }, + { + "uri":"dli_02_0316.html", + "product_code":"dli", + "code":"82", + "des":"This API is used to generate a static stream graph for a Flink SQL job.URI formatPOST /v3/{project_id}/streaming/jobs/{job_id}/gen-graphPOST /v3/{project_id}/streaming/jo", + "doc_type":"api", + "kw":"Generating a Static Stream Graph for a Flink SQL Job,APIs Related to Flink Jobs,API Reference", + "title":"Generating a Static Stream Graph for a Flink SQL Job", + "githuburl":"" + }, + { + "uri":"dli_02_0109.html", + "product_code":"dli", + "code":"83", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"APIs Related to Spark jobs", + "title":"APIs Related to Spark jobs", + "githuburl":"" + }, + { + "uri":"dli_02_0162.html", + "product_code":"dli", + "code":"84", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Batch Processing-related APIs", + "title":"Batch Processing-related APIs", + "githuburl":"" + }, + { + "uri":"dli_02_0124.html", + "product_code":"dli", + "code":"85", + "des":"This API is used to create a batch processing job in a queue.URI formatPOST /v2.0/{project_id}/batchesPOST /v2.0/{project_id}/batchesParameter descriptionURI parameterPar", + "doc_type":"api", + "kw":"Creating a Batch Processing Job,Batch Processing-related APIs,API Reference", + "title":"Creating a Batch Processing Job", + "githuburl":"" + }, + { + "uri":"dli_02_0129.html", + "product_code":"dli", + "code":"86", + "des":"This API is used to cancel a batch processing job.Batch processing jobs in the Successful or Failed state cannot be canceled.URI formatDELETE /v2.0/{project_id}/batches/{", + "doc_type":"api", + "kw":"Canceling a Batch Processing Job,Batch Processing-related APIs,API Reference", + "title":"Canceling a Batch Processing Job", + "githuburl":"" + }, + { + "uri":"dli_02_0125.html", + "product_code":"dli", + "code":"87", + "des":"This API is used to obtain the list of batch processing jobs in a queue of a project.URI formatGET /v2.0/{project_id}/batchesGET /v2.0/{project_id}/batchesParameter descr", + "doc_type":"api", + "kw":"Obtaining the List of Batch Processing Jobs,Batch Processing-related APIs,API Reference", + "title":"Obtaining the List of Batch Processing Jobs", + "githuburl":"" + }, + { + "uri":"dli_02_0126.html", + "product_code":"dli", + "code":"88", + "des":"This API is used to query details about a batch processing job based on the job ID.URI formatGET /v2.0/{project_id}/batches/{batch_id}GET /v2.0/{project_id}/batches/{batc", + "doc_type":"api", + "kw":"Querying Batch Job Details,Batch Processing-related APIs,API Reference", + "title":"Querying Batch Job Details", + "githuburl":"" + }, + { + "uri":"dli_02_0127.html", + "product_code":"dli", + "code":"89", + "des":"This API is used to obtain the execution status of a batch processing job.URI formatGET /v2.0/{project_id}/batches/{batch_id}/stateGET /v2.0/{project_id}/batches/{batch_i", + "doc_type":"api", + "kw":"Querying a Batch Job Status,Batch Processing-related APIs,API Reference", + "title":"Querying a Batch Job Status", + "githuburl":"" + }, + { + "uri":"dli_02_0128.html", + "product_code":"dli", + "code":"90", + "des":"This API is used to query the back-end logs of batch processing jobs.URI formatGET /v2.0/{project_id}/batches/{batch_id}/logGET /v2.0/{project_id}/batches/{batch_id}/logP", + "doc_type":"api", + "kw":"Querying Batch Job Logs,Batch Processing-related APIs,API Reference", + "title":"Querying Batch Job Logs", + "githuburl":"" + }, + { + "uri":"dli_02_0244.html", + "product_code":"dli", + "code":"91", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"APIs Related to Flink Job Templates", + "title":"APIs Related to Flink Job Templates", + "githuburl":"" + }, + { + "uri":"dli_02_0245.html", + "product_code":"dli", + "code":"92", + "des":"This API is used to create a user template for the DLI service. A maximum of 100 user templates can be created.URI formatPOST /v1.0/{project_id}/streaming/job-templatesPO", + "doc_type":"api", + "kw":"Creating a Template,APIs Related to Flink Job Templates,API Reference", + "title":"Creating a Template", + "githuburl":"" + }, + { + "uri":"dli_02_0246.html", + "product_code":"dli", + "code":"93", + "des":"This API is used to update existing templates in DLI.URI formatPUT /v1.0/{project_id}/streaming/job-templates/{template_id}PUT /v1.0/{project_id}/streaming/job-templates/", + "doc_type":"api", + "kw":"Updating a Template,APIs Related to Flink Job Templates,API Reference", + "title":"Updating a Template", + "githuburl":"" + }, + { + "uri":"dli_02_0247.html", + "product_code":"dli", + "code":"94", + "des":"This API is used to delete a template. A template used by jobs can also be deleted.URI formatDELETE /v1.0/{project_id}/streaming/job-templates/{template_id}DELETE /v1.0/{", + "doc_type":"api", + "kw":"Deleting a Template,APIs Related to Flink Job Templates,API Reference", + "title":"Deleting a Template", + "githuburl":"" + }, + { + "uri":"dli_02_0248.html", + "product_code":"dli", + "code":"95", + "des":"This API is used to query the job template list. Currently, only custom templates can be queried.URI formatGET /v1.0/{project_id}/streaming/job-templatesGET /v1.0/{projec", + "doc_type":"api", + "kw":"Querying the Template List,APIs Related to Flink Job Templates,API Reference", + "title":"Querying the Template List", + "githuburl":"" + }, + { + "uri":"dli_02_0186.html", + "product_code":"dli", + "code":"96", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"APIs Related to Enhanced Datasource Connections", + "title":"APIs Related to Enhanced Datasource Connections", + "githuburl":"" + }, + { + "uri":"dli_02_0187.html", + "product_code":"dli", + "code":"97", + "des":"This API is used to create an enhanced datasource connection with other services.URI formatPOST /v2.0/{project_id}/datasource/enhanced-connectionsPOST /v2.0/{project_id}/", + "doc_type":"api", + "kw":"Creating an Enhanced Datasource Connection,APIs Related to Enhanced Datasource Connections,API Refer", + "title":"Creating an Enhanced Datasource Connection", + "githuburl":"" + }, + { + "uri":"dli_02_0188.html", + "product_code":"dli", + "code":"98", + "des":"This API is used to delete an enhanced datasource connection.The connection that is being created cannot be deleted.URI formatDELETE /v2.0/{project_id}/datasource/enhance", + "doc_type":"api", + "kw":"Deleting an Enhanced Datasource Connection,APIs Related to Enhanced Datasource Connections,API Refer", + "title":"Deleting an Enhanced Datasource Connection", + "githuburl":"" + }, + { + "uri":"dli_02_0190.html", + "product_code":"dli", + "code":"99", + "des":"This API is used to query the list of created enhanced datasource connections.URI formatGET /v2.0/{project_id}/datasource/enhanced-connectionsGET /v2.0/{project_id}/datas", + "doc_type":"api", + "kw":"Querying an Enhanced Datasource Connection List,APIs Related to Enhanced Datasource Connections,API ", + "title":"Querying an Enhanced Datasource Connection List", + "githuburl":"" + }, + { + "uri":"dli_02_0189.html", + "product_code":"dli", + "code":"100", + "des":"This API is used to query the created enhanced datasource connections.URI formatGET /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}GET /v2.0/{project_i", + "doc_type":"api", + "kw":"Querying an Enhanced Datasource Connection,APIs Related to Enhanced Datasource Connections,API Refer", + "title":"Querying an Enhanced Datasource Connection", + "githuburl":"" + }, + { + "uri":"dli_02_0191.html", + "product_code":"dli", + "code":"101", + "des":"This API is used to bind a queue to a created enhanced datasource connection.URI formatPOST /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}/associate-q", + "doc_type":"api", + "kw":"Binding a Queue,APIs Related to Enhanced Datasource Connections,API Reference", + "title":"Binding a Queue", + "githuburl":"" + }, + { + "uri":"dli_02_0192.html", + "product_code":"dli", + "code":"102", + "des":"This API is used to unbind a queue from an enhanced datasource connection.URI formatPOST /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}/disassociate-q", + "doc_type":"api", + "kw":"Unbinding a Queue,APIs Related to Enhanced Datasource Connections,API Reference", + "title":"Unbinding a Queue", + "githuburl":"" + }, + { + "uri":"dli_02_0200.html", + "product_code":"dli", + "code":"103", + "des":"This API is used to modify the host information of a connected datasource. Only full overwriting is supported.URI formatPUT /v2.0/{project_id}/datasource/enhanced-connect", + "doc_type":"api", + "kw":"Modifying the Host Information,APIs Related to Enhanced Datasource Connections,API Reference", + "title":"Modifying the Host Information", + "githuburl":"" + }, + { + "uri":"dli_02_0256.html", + "product_code":"dli", + "code":"104", + "des":"This API is used to query the authorization about an enhanced datasource connection.URI formatGET /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}/privi", + "doc_type":"api", + "kw":"Querying Authorization of an Enhanced Datasource Connection,APIs Related to Enhanced Datasource Conn", + "title":"Querying Authorization of an Enhanced Datasource Connection", + "githuburl":"" + }, + { + "uri":"dli_02_0257.html", + "product_code":"dli", + "code":"105", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Global Variable-related APIs", + "title":"Global Variable-related APIs", + "githuburl":"" + }, + { + "uri":"dli_02_0258.html", + "product_code":"dli", + "code":"106", + "des":"This API is used to create a global variable.URI formatPOST /v1.0/{project_id}/variablesPOST /v1.0/{project_id}/variablesParameter descriptionURI parametersParameterManda", + "doc_type":"api", + "kw":"Creating a Global Variable,Global Variable-related APIs,API Reference", + "title":"Creating a Global Variable", + "githuburl":"" + }, + { + "uri":"dli_02_0259.html", + "product_code":"dli", + "code":"107", + "des":"This API is used to delete a global variable.Only the user who creates a global variable can delete the variable.URI formatDELETE /v1.0/{project_id}/variables/{var_name}D", + "doc_type":"api", + "kw":"Deleting a Global Variable,Global Variable-related APIs,API Reference", + "title":"Deleting a Global Variable", + "githuburl":"" + }, + { + "uri":"dli_02_0260.html", + "product_code":"dli", + "code":"108", + "des":"This API is used to modify a global variable.URI formatPUT /v1.0/{project_id}/variables/{var_name}PUT /v1.0/{project_id}/variables/{var_name}Parameter descriptionURI para", + "doc_type":"api", + "kw":"Modifying a Global Variable,Global Variable-related APIs,API Reference", + "title":"Modifying a Global Variable", + "githuburl":"" + }, + { + "uri":"dli_02_0261.html", + "product_code":"dli", + "code":"109", + "des":"This API is used to query information about all global variables in the current project.URI formatGET /v1.0/{project_id}/variablesGET /v1.0/{project_id}/variablesParamete", + "doc_type":"api", + "kw":"Querying All Global Variables,Global Variable-related APIs,API Reference", + "title":"Querying All Global Variables", + "githuburl":"" + }, + { + "uri":"dli_02_0201.html", + "product_code":"dli", + "code":"110", + "des":"This section describes fine-grained permissions management for your DLI. If your account does not need individual IAM users, then you may skip this section.By default, ne", + "doc_type":"api", + "kw":"Permissions Policies and Supported Actions,API Reference", + "title":"Permissions Policies and Supported Actions", + "githuburl":"" + }, + { + "uri":"dli_02_0011.html", + "product_code":"dli", + "code":"111", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Public Parameters", + "title":"Public Parameters", + "githuburl":"" + }, + { + "uri":"dli_02_0012.html", + "product_code":"dli", + "code":"112", + "des":"Table 1 describes status codes.", + "doc_type":"api", + "kw":"Status Codes,Public Parameters,API Reference", + "title":"Status Codes", + "githuburl":"" + }, + { + "uri":"dli_02_0056.html", + "product_code":"dli", + "code":"113", + "des":"If an error occurs in API calling, no result is returned. Identify the cause of error based on the error codes of each API. If an error occurs in API calling, HTTP status", + "doc_type":"api", + "kw":"Error Code,Public Parameters,API Reference", + "title":"Error Code", + "githuburl":"" + }, + { + "uri":"dli_02_0183.html", + "product_code":"dli", + "code":"114", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Obtaining a Project ID,Public Parameters,API Reference", + "title":"Obtaining a Project ID", + "githuburl":"" + }, + { + "uri":"dli_02_0013.html", + "product_code":"dli", + "code":"115", + "des":"An account ID (domain-id) is required for some URLs when an API is called. To obtain an account ID, perform the following operations:Log in to the management console.Hove", + "doc_type":"api", + "kw":"Obtaining an Account ID,Public Parameters,API Reference", + "title":"Obtaining an Account ID", + "githuburl":"" + }, + { + "uri":"dli_02_00003.html", + "product_code":"dli", + "code":"116", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Change History,API Reference", + "title":"Change History", + "githuburl":"" + } +] \ No newline at end of file diff --git a/docs/dli/api-ref/CLASS.TXT.json b/docs/dli/api-ref/CLASS.TXT.json new file mode 100644 index 00000000..b8d22363 --- /dev/null +++ b/docs/dli/api-ref/CLASS.TXT.json @@ -0,0 +1,1046 @@ +[ + { + "desc":"Public cloud APIs comply with the RESTful API design principles. REST-based Web services are organized into resources. Each resource is identified by one or more Uniform ", + "product_code":"dli", + "title":"Calling APIs", + "uri":"dli_02_0500.html", + "doc_type":"api", + "p_code":"", + "code":"1" + }, + { + "desc":"This section describes the APIs provided by DLI.", + "product_code":"dli", + "title":"Overview", + "uri":"dli_02_0181.html", + "doc_type":"api", + "p_code":"", + "code":"2" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Getting Started", + "uri":"dli_02_0306.html", + "doc_type":"api", + "p_code":"", + "code":"3" + }, + { + "desc":"This section describes how to create and query a queue using APIs.Queues created using this API will be bound to specified compute resources.It takes 6 to 10 minutes to s", + "product_code":"dli", + "title":"Creating a Queue", + "uri":"dli_02_0307.html", + "doc_type":"api", + "p_code":"3", + "code":"4" + }, + { + "desc":"This section describes how to create and query SQL jobs using APIs.It takes 6 to 10 minutes to start a job using a new queue for the first time.Creating a Queue: Create a", + "product_code":"dli", + "title":"Creating and Submitting a SQL Job", + "uri":"dli_02_0308.html", + "doc_type":"api", + "p_code":"3", + "code":"5" + }, + { + "desc":"This section describes how to create and submit Spark jobs using APIs.It takes 6 to 10 minutes to start a job using a new queue for the first time.Creating a Queue: Creat", + "product_code":"dli", + "title":"Creating and Submitting a Spark Job", + "uri":"dli_02_0309.html", + "doc_type":"api", + "p_code":"3", + "code":"6" + }, + { + "desc":"This section describes how to create and run a user-defined Flink job using APIs.It takes 6 to 10 minutes to start a job using a new queue for the first time.Creating a Q", + "product_code":"dli", + "title":"Creating and Submitting a Flink Job", + "uri":"dli_02_0310.html", + "doc_type":"api", + "p_code":"3", + "code":"7" + }, + { + "desc":"This section describes how to create an enhanced datasource connection using an API.It takes 6 to 10 minutes to start a job using a new queue for the first time.Before cr", + "product_code":"dli", + "title":"Creating and Using a Datasource Connection", + "uri":"dli_02_0311.html", + "doc_type":"api", + "p_code":"3", + "code":"8" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Permission-related APIs", + "uri":"dli_02_0036.html", + "doc_type":"api", + "p_code":"", + "code":"9" + }, + { + "desc":"This API is used to share a specific queue with other users. You can grant users with the permission to use the specified queue or revoke the permission.URI formatPUT /v1", + "product_code":"dli", + "title":"Granting Users with the Queue Usage Permission", + "uri":"dli_02_0037.html", + "doc_type":"api", + "p_code":"9", + "code":"10" + }, + { + "desc":"This API is used to query names of all users who can use a specified queue.URI formatGET /v1.0/{project_id}/queues/{queue_name}/usersGET /v1.0/{project_id}/queues/{queue_", + "product_code":"dli", + "title":"Querying Queue Users", + "uri":"dli_02_0038.html", + "doc_type":"api", + "p_code":"9", + "code":"11" + }, + { + "desc":"This API is used to grant database or table data usage permission to specified users.URI formatPUT /v1.0/{project_id}/user-authorizationPUT /v1.0/{project_id}/user-author", + "product_code":"dli", + "title":"Granting Data Permission to Users", + "uri":"dli_02_0039.html", + "doc_type":"api", + "p_code":"9", + "code":"12" + }, + { + "desc":"This API is used query names of all users who have permission to use or access the database.URI formatGET /v1.0/{project_id}/databases/{database_name}/usersGET /v1.0/{pro", + "product_code":"dli", + "title":"Querying Database Users", + "uri":"dli_02_0040.html", + "doc_type":"api", + "p_code":"9", + "code":"13" + }, + { + "desc":"This API is used to query users who have permission to access the specified table or column in the table.URI formatGET /v1.0/{project_id}/databases/{database_name}/tables", + "product_code":"dli", + "title":"Querying Table Users", + "uri":"dli_02_0041.html", + "doc_type":"api", + "p_code":"9", + "code":"14" + }, + { + "desc":"This API is used to query the permission of a specified user on a table.URI formatGET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/users/{user_name}GE", + "product_code":"dli", + "title":"Querying a User's Table Permissions", + "uri":"dli_02_0042.html", + "doc_type":"api", + "p_code":"9", + "code":"15" + }, + { + "desc":"This API is used to view the permissions granted to a user.URI formatGET /v1.0/{project_id}/authorization/privilegesGET /v1.0/{project_id}/authorization/privilegesParamet", + "product_code":"dli", + "title":"Viewing the Granted Permissions of a User", + "uri":"dli_02_0252.html", + "doc_type":"api", + "p_code":"9", + "code":"16" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Agency-related APIs", + "uri":"dli_02_0297.html", + "doc_type":"api", + "p_code":"", + "code":"17" + }, + { + "desc":"This API is used to obtain the agency information of a DLI user.URI formatGET /v2/{project_id}/agencyGET /v2/{project_id}/agencyParameter descriptionURI parametersParam", + "product_code":"dli", + "title":"Obtaining DLI Agency Information", + "uri":"dli_02_0298.html", + "doc_type":"api", + "p_code":"17", + "code":"18" + }, + { + "desc":"This API is used to create an agency for a DLI user.URI formatPOST /v2/{project_id}/agencyPOST /v2/{project_id}/agencyParameter descriptionURI parametersParameterMandat", + "product_code":"dli", + "title":"Creating a DLI Agency", + "uri":"dli_02_0299.html", + "doc_type":"api", + "p_code":"17", + "code":"19" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Queue-related APIs (Recommended)", + "uri":"dli_02_0193.html", + "doc_type":"api", + "p_code":"", + "code":"20" + }, + { + "desc":"This API is used to create a queue. The queue will be bound to specified compute resources.It takes 5 to 15 minutes to start a job using a new queue for the first time.UR", + "product_code":"dli", + "title":"Creating a Queue", + "uri":"dli_02_0194.html", + "doc_type":"api", + "p_code":"20", + "code":"21" + }, + { + "desc":"This API is used to delete a specified queue.If a task is being executed in a specified queue, the queue cannot be deleted.URI formatDELETE /v1.0/{project_id}/queues/{que", + "product_code":"dli", + "title":"Deleting a Queue", + "uri":"dli_02_0195.html", + "doc_type":"api", + "p_code":"20", + "code":"22" + }, + { + "desc":"This API is used to list all queues under the project.URI formatGET/v1.0/{project_id}/queuesGET/v1.0/{project_id}/queuesParameter descriptionURI parameterParameterMandato", + "product_code":"dli", + "title":"Querying All Queues", + "uri":"dli_02_0196.html", + "doc_type":"api", + "p_code":"20", + "code":"23" + }, + { + "desc":"This API is used to list details of a specific queue in a project.URI formatGET /v1.0/{project_id}/queues/{queue_name}GET /v1.0/{project_id}/queues/{queue_name}Parameter ", + "product_code":"dli", + "title":"Viewing Details of a Queue", + "uri":"dli_02_0016.html", + "doc_type":"api", + "p_code":"20", + "code":"24" + }, + { + "desc":"This API is used to restart, scale out, and scale in queues.Only SQL queues in the Available status can be restarted. (The queue status is Available only after the SQL jo", + "product_code":"dli", + "title":"Restarting, Scaling Out, and Scaling In Queues", + "uri":"dli_02_0249.html", + "doc_type":"api", + "p_code":"20", + "code":"25" + }, + { + "desc":"This API is used to send an address connectivity test request to a specified queue and insert the test address into the table.URI formatPOST /v1.0/{project_id}/queues/{q", + "product_code":"dli", + "title":"Creating an Address Connectivity Test Request", + "uri":"dli_02_0284.html", + "doc_type":"api", + "p_code":"20", + "code":"26" + }, + { + "desc":"This API is used to query the connectivity test result after the test is submitted.URI formatGET /v1.0/{project_id}/queues/{queue_name}/connection-test/{task_id}GET /v1", + "product_code":"dli", + "title":"Querying Connectivity Test Details of a Specified Address", + "uri":"dli_02_0285.html", + "doc_type":"api", + "p_code":"20", + "code":"27" + }, + { + "desc":"This API is used to create a scheduled CU change, that is, to create a scheduled CU change for a specified queue.URI formatPOST /v1/{project_id}/queues/{queue_name}/plan", + "product_code":"dli", + "title":"Creating a Scheduled CU Change", + "uri":"dli_02_0291.html", + "doc_type":"api", + "p_code":"20", + "code":"28" + }, + { + "desc":"This API is used to query the scheduled CU changes and list the changes of a specified queue.URI formatGET /v1/{project_id}/queues/{queue_name}/plansGET /v1/{project_id}/", + "product_code":"dli", + "title":"Viewing a Scheduled CU Change", + "uri":"dli_02_0292.html", + "doc_type":"api", + "p_code":"20", + "code":"29" + }, + { + "desc":"This API is used to delete scheduled CU changes in batches.URI formatPOST /v1/{project_id}/queues/{queue_name}/plans/batch-deletePOST /v1/{project_id}/queues/{queue_name}", + "product_code":"dli", + "title":"Deleting Scheduled CU Changes in Batches", + "uri":"dli_02_0293.html", + "doc_type":"api", + "p_code":"20", + "code":"30" + }, + { + "desc":"This API is used to delete a scheduled CU change for a queue with a specified ID.URI formatDELETE /v1/{project_id}/queues/{queue_name}/plans/{plan_id}DELETE /v1/{projec", + "product_code":"dli", + "title":"Deleting a Scheduled CU Change", + "uri":"dli_02_0294.html", + "doc_type":"api", + "p_code":"20", + "code":"31" + }, + { + "desc":"This API is used to modify a scheduled CU change for a queue with a specified ID.URI formatPUT /v1/{project_id}/queues/{queue_name}/plans/{plan_id}PUT /v1/{project_id}/", + "product_code":"dli", + "title":"Modifying a Scheduled CU Change", + "uri":"dli_02_0295.html", + "doc_type":"api", + "p_code":"20", + "code":"32" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"APIs Related to SQL Jobs", + "uri":"dli_02_0158.html", + "doc_type":"api", + "p_code":"", + "code":"33" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Database-related APIs", + "uri":"dli_02_0027.html", + "doc_type":"api", + "p_code":"33", + "code":"34" + }, + { + "desc":"This API is used to add a database.URI formatPOST /v1.0/{project_id}/databasesPOST /v1.0/{project_id}/databasesParameter descriptionURI parameterParameterMandatoryTypeDes", + "product_code":"dli", + "title":"Creating a Database", + "uri":"dli_02_0028.html", + "doc_type":"api", + "p_code":"34", + "code":"35" + }, + { + "desc":"This API is used to delete an empty database. If there are tables in the database to be deleted, delete all tables first. For details about the API used to delete tables,", + "product_code":"dli", + "title":"Deleting a Database", + "uri":"dli_02_0030.html", + "doc_type":"api", + "p_code":"34", + "code":"36" + }, + { + "desc":"This API is used to query the information about all the databases.URI formatGET /v1.0/{project_id}/databasesGET /v1.0/{project_id}/databasesParameter descriptionURI param", + "product_code":"dli", + "title":"Querying All Databases", + "uri":"dli_02_0029.html", + "doc_type":"api", + "p_code":"34", + "code":"37" + }, + { + "desc":"This API is used to modify the owner of a database.URI formatPUT /v1.0/{project_id}/databases/{database_name}/ownerPUT /v1.0/{project_id}/databases/{database_name}/ownerP", + "product_code":"dli", + "title":"Modifying a Database Owner", + "uri":"dli_02_0164.html", + "doc_type":"api", + "p_code":"34", + "code":"38" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Table-related APIs", + "uri":"dli_02_0031.html", + "doc_type":"api", + "p_code":"33", + "code":"39" + }, + { + "desc":"This API is used to create a table.This API is a synchronous API.URI formatPOST /v1.0/{project_id}/databases/{database_name}/tablesPOST /v1.0/{project_id}/databases/{data", + "product_code":"dli", + "title":"Creating a Table", + "uri":"dli_02_0034.html", + "doc_type":"api", + "p_code":"39", + "code":"40" + }, + { + "desc":"This API is used to delete a specified table.URI formatDELETE /v1.0/{project_id}/databases/{database_name}/tables/{table_name}DELETE /v1.0/{project_id}/databases/{databas", + "product_code":"dli", + "title":"Deleting a Table", + "uri":"dli_02_0035.html", + "doc_type":"api", + "p_code":"39", + "code":"41" + }, + { + "desc":"This API is used to query information about tables that meet the filtering criteria or all the tables in the specified database.URI formatGET /v1.0/{project_id}/databases", + "product_code":"dli", + "title":"Querying All Tables (Recommended)", + "uri":"dli_02_0105.html", + "doc_type":"api", + "p_code":"39", + "code":"42" + }, + { + "desc":"This API is used to describe metadata information in the specified table.URI formatGET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}GET /v1.0/{project_", + "product_code":"dli", + "title":"Describing the Table Information", + "uri":"dli_02_0033.html", + "doc_type":"api", + "p_code":"39", + "code":"43" + }, + { + "desc":"This API is used to preview the first ten rows of a table.URI formatGET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/previewGET /v1.0/{project_id}/dat", + "product_code":"dli", + "title":"Previewing Table Content", + "uri":"dli_02_0108.html", + "doc_type":"api", + "p_code":"39", + "code":"44" + }, + { + "desc":"This API is used to obtain the partition list.URI formatGET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/partitionsGET /v1.0/{project_id}/databases/{d", + "product_code":"dli", + "title":"Obtaining the Partition List", + "uri":"dli_02_0250.html", + "doc_type":"api", + "p_code":"39", + "code":"45" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Job-related APIs", + "uri":"dli_02_0017.html", + "doc_type":"api", + "p_code":"33", + "code":"46" + }, + { + "desc":"This API is used to import data from a file to a DLI or OBS table. Currently, only OBS data can be imported to a DLI or OBS table.This API is asynchronous.When importing ", + "product_code":"dli", + "title":"Importing Data", + "uri":"dli_02_0019.html", + "doc_type":"api", + "p_code":"46", + "code":"47" + }, + { + "desc":"This API is used to export data from a DLI table to a file.This API is asynchronous.Currently, data can be exported only from a DLI table to OBS, and the OBS path must be", + "product_code":"dli", + "title":"Exporting Data", + "uri":"dli_02_0020.html", + "doc_type":"api", + "p_code":"46", + "code":"48" + }, + { + "desc":"This API is used to submit jobs to a queue using SQL statements.The job types support DDL, DCL, IMPORT, QUERY, and INSERT. The IMPORT function is the same as that describ", + "product_code":"dli", + "title":"Submitting a SQL Job (Recommended)", + "uri":"dli_02_0102.html", + "doc_type":"api", + "p_code":"46", + "code":"49" + }, + { + "desc":"This API is used to cancel a submitted job. If execution of a job completes or fails, this job cannot be canceled.URI formatDELETE /v1.0/{project_id}/jobs/{job_id}DELETE ", + "product_code":"dli", + "title":"Canceling a Job (Recommended)", + "uri":"dli_02_0104.html", + "doc_type":"api", + "p_code":"46", + "code":"50" + }, + { + "desc":"This API is used to query information about all jobs in the current project.URI formatGET /v1.0/{project_id}/jobsGET /v1.0/{project_id}/jobsParameter descriptionURI param", + "product_code":"dli", + "title":"Querying All Jobs", + "uri":"dli_02_0025.html", + "doc_type":"api", + "p_code":"46", + "code":"51" + }, + { + "desc":"This API is used to view the job execution result after a job is executed using SQL query statements. Currently, you can only query execution results of jobs of the QUERY", + "product_code":"dli", + "title":"Previewing SQL Job Query Results", + "uri":"dli_02_0312.html", + "doc_type":"api", + "p_code":"46", + "code":"52" + }, + { + "desc":"This API is used to query the status of a submitted job.URI formatGET /v1.0/{project_id}/jobs/{job_id}/statusGET /v1.0/{project_id}/jobs/{job_id}/statusParameter descript", + "product_code":"dli", + "title":"Querying Job Status", + "uri":"dli_02_0021.html", + "doc_type":"api", + "p_code":"46", + "code":"53" + }, + { + "desc":"This API is used to query details about jobs, including databasename, tablename, file size, and export mode.URI formatGET/v1.0/{project_id}/jobs/{job_id}/detailGET/v1.0/{", + "product_code":"dli", + "title":"Querying Job Details", + "uri":"dli_02_0022.html", + "doc_type":"api", + "p_code":"46", + "code":"54" + }, + { + "desc":"This API is used to check the SQL syntax.URI formatPOST /v1.0/{project_id}/jobs/check-sqlPOST /v1.0/{project_id}/jobs/check-sqlParameter descriptionURI parametersParamete", + "product_code":"dli", + "title":"Checking SQL Syntax", + "uri":"dli_02_0107.html", + "doc_type":"api", + "p_code":"46", + "code":"55" + }, + { + "desc":"This API is used to export results returned from the query using SQL statements to OBS. Only the query result of QUERY jobs can be exported.This API is asynchronous.Curre", + "product_code":"dli", + "title":"Exporting Query Results", + "uri":"dli_02_0024.html", + "doc_type":"api", + "p_code":"46", + "code":"56" + }, + { + "desc":"This API is used to obtain the job execution progress. If a job is being executed, information about its subjobs can be obtained. If a job has just started or has ended, ", + "product_code":"dli", + "title":"Querying the Job Execution Progress", + "uri":"dli_02_0296.html", + "doc_type":"api", + "p_code":"46", + "code":"57" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Package Group-related APIs", + "uri":"dli_02_0166.html", + "doc_type":"api", + "p_code":"", + "code":"58" + }, + { + "desc":"This API is used to upload a package group to a project. The function is similar to creating a package on the management console.URI formatPOST /v2.0/{project_id}/resourc", + "product_code":"dli", + "title":"Uploading a Package Group", + "uri":"dli_02_0130.html", + "doc_type":"api", + "p_code":"58", + "code":"59" + }, + { + "desc":"This API is used to query all resources in a project, including groups.URI formatGET /v2.0/{project_id}/resourcesGET /v2.0/{project_id}/resourcesParameter descriptionURI ", + "product_code":"dli", + "title":"Querying Package Group List", + "uri":"dli_02_0168.html", + "doc_type":"api", + "p_code":"58", + "code":"60" + }, + { + "desc":"This API is used to upload a group of JAR packages to a project.When a resource group with the same name is uploaded, the new group overwrites the old group.URI formatPOS", + "product_code":"dli", + "title":"Uploading a JAR Package Group", + "uri":"dli_02_0169.html", + "doc_type":"api", + "p_code":"58", + "code":"61" + }, + { + "desc":"This API is used to upload a group of PyFile packages to a project.When a group with the same name as the PyFile package is uploaded, the new group overwrites the old gro", + "product_code":"dli", + "title":"Uploading a PyFile Package Group", + "uri":"dli_02_0170.html", + "doc_type":"api", + "p_code":"58", + "code":"62" + }, + { + "desc":"This API is used to upload a group of File packages to a project.When the File package group with the same name is uploaded, the new group overwrites the old group.URI fo", + "product_code":"dli", + "title":"Uploading a File Package Group", + "uri":"dli_02_0171.html", + "doc_type":"api", + "p_code":"58", + "code":"63" + }, + { + "desc":"This API is used to query resource information of a package group in a Project.URI formatGET /v2.0/{project_id}/resources/{resource_name}GET /v2.0/{project_id}/resources/", + "product_code":"dli", + "title":"Querying Resource Packages in a Group", + "uri":"dli_02_0172.html", + "doc_type":"api", + "p_code":"58", + "code":"64" + }, + { + "desc":"This API is used to delete resource packages in a group in a Project.URI formatDELETE /v2.0/{project_id}/resources/{resource_name}DELETE /v2.0/{project_id}/resources/{res", + "product_code":"dli", + "title":"Deleting a Resource Package from a Group", + "uri":"dli_02_0173.html", + "doc_type":"api", + "p_code":"58", + "code":"65" + }, + { + "desc":"This API is used to change the owner of a program package.URI formatPUT /v2.0/{project_id}/resources/ownerPUT /v2.0/{project_id}/resources/ownerParameter descriptionURI p", + "product_code":"dli", + "title":"Changing the Owner of a Group or Resource Package", + "uri":"dli_02_0253.html", + "doc_type":"api", + "p_code":"58", + "code":"66" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"APIs Related to Flink Jobs", + "uri":"dli_02_0223.html", + "doc_type":"api", + "p_code":"", + "code":"67" + }, + { + "desc":"This API is used to grant DLI the permission to access OBS buckets for saving job checkpoints and run logs.URI formatPOST /v1.0/{project_id}/dli/obs-authorizePOST /v1.0/{", + "product_code":"dli", + "title":"Granting OBS Permissions to DLI", + "uri":"dli_02_0225.html", + "doc_type":"api", + "p_code":"67", + "code":"68" + }, + { + "desc":"This API is used to create a Flink streaming SQL job.URI formatPOST /v1.0/{project_id}/streaming/sql-jobsPOST /v1.0/{project_id}/streaming/sql-jobsParameter descriptionUR", + "product_code":"dli", + "title":"Creating a SQL Job", + "uri":"dli_02_0228.html", + "doc_type":"api", + "p_code":"67", + "code":"69" + }, + { + "desc":"This API is used to modify a Flink SQL job.URI formatPUT /v1.0/{project_id}/streaming/sql-jobs/{job_id}PUT /v1.0/{project_id}/streaming/sql-jobs/{job_id}Parameter descrip", + "product_code":"dli", + "title":"Updating a SQL Job", + "uri":"dli_02_0229.html", + "doc_type":"api", + "p_code":"67", + "code":"70" + }, + { + "desc":"This API is used to create custom jobs, which currently support the JAR format and run in dedicated queues.URI formatPOST /v1.0/{project_id}/streaming/flink-jobsPOST /v1.", + "product_code":"dli", + "title":"Creating a Flink Jar job", + "uri":"dli_02_0230.html", + "doc_type":"api", + "p_code":"67", + "code":"71" + }, + { + "desc":"This API is used to update custom jobs, which currently support the JAR format and run in dedicated queues.URI formatPUT /v1.0/{project_id}/streaming/flink-jobs/{job_id}P", + "product_code":"dli", + "title":"Updating a Flink Jar Job", + "uri":"dli_02_0231.html", + "doc_type":"api", + "p_code":"67", + "code":"72" + }, + { + "desc":"This API is used to trigger batch job running.URI formatPOST /v1.0/{project_id}/streaming/jobs/runPOST /v1.0/{project_id}/streaming/jobs/runParameter descriptionURI param", + "product_code":"dli", + "title":"Running Jobs in Batches", + "uri":"dli_02_0233.html", + "doc_type":"api", + "p_code":"67", + "code":"73" + }, + { + "desc":"This API is used to query the list of the current user's jobs. You can set the job ID as the ID and query jobs whose IDs are greater than or less than the ID. You can als", + "product_code":"dli", + "title":"Querying the Job List", + "uri":"dli_02_0234.html", + "doc_type":"api", + "p_code":"67", + "code":"74" + }, + { + "desc":"This API is used to query details of a job.URI formatGET /v1.0/{project_id}/streaming/jobs/{job_id}GET /v1.0/{project_id}/streaming/jobs/{job_id}Parameter descriptionURI ", + "product_code":"dli", + "title":"Querying Job Details", + "uri":"dli_02_0235.html", + "doc_type":"api", + "p_code":"67", + "code":"75" + }, + { + "desc":"This API is used to query a job execution plan.URI formatGET /v1.0/{project_id}/streaming/jobs/{job_id}/execute-graphGET /v1.0/{project_id}/streaming/jobs/{job_id}/execut", + "product_code":"dli", + "title":"Querying the Job Execution Plan", + "uri":"dli_02_0236.html", + "doc_type":"api", + "p_code":"67", + "code":"76" + }, + { + "desc":"This API is used to stop running jobs in batches.URI formatPOST /v1.0/{project_id}/streaming/jobs/stopPOST /v1.0/{project_id}/streaming/jobs/stopParameter descriptionURI ", + "product_code":"dli", + "title":"Stopping Jobs in Batches", + "uri":"dli_02_0241.html", + "doc_type":"api", + "p_code":"67", + "code":"77" + }, + { + "desc":"This API is used to delete a Flink job at any state.The job records will not be deleted.URI formatDELETE /v1.0/{project_id}/streaming/jobs/{job_id}DELETE /v1.0/{project_i", + "product_code":"dli", + "title":"Deleting a Job", + "uri":"dli_02_0242.html", + "doc_type":"api", + "p_code":"67", + "code":"78" + }, + { + "desc":"This API is used to batch delete jobs at any state.URI formatPOST /v1.0/{project_id}/streaming/jobs/deletePOST /v1.0/{project_id}/streaming/jobs/deleteParameter descripti", + "product_code":"dli", + "title":"Deleting Jobs in Batches", + "uri":"dli_02_0243.html", + "doc_type":"api", + "p_code":"67", + "code":"79" + }, + { + "desc":"This API is used to export Flink job data.URI formatPOST /v1.0/{project_id}/streaming/jobs/exportPOST /v1.0/{project_id}/streaming/jobs/exportParameter descriptionURI par", + "product_code":"dli", + "title":"Exporting a Flink Job", + "uri":"dli_02_0254.html", + "doc_type":"api", + "p_code":"67", + "code":"80" + }, + { + "desc":"This API is used to import Flink job data.URI formatPOST /v1.0/{project_id}/streaming/jobs/importPOST /v1.0/{project_id}/streaming/jobs/importParameter descriptionURI par", + "product_code":"dli", + "title":"Importing a Flink Job", + "uri":"dli_02_0255.html", + "doc_type":"api", + "p_code":"67", + "code":"81" + }, + { + "desc":"This API is used to generate a static stream graph for a Flink SQL job.URI formatPOST /v3/{project_id}/streaming/jobs/{job_id}/gen-graphPOST /v3/{project_id}/streaming/jo", + "product_code":"dli", + "title":"Generating a Static Stream Graph for a Flink SQL Job", + "uri":"dli_02_0316.html", + "doc_type":"api", + "p_code":"67", + "code":"82" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"APIs Related to Spark jobs", + "uri":"dli_02_0109.html", + "doc_type":"api", + "p_code":"", + "code":"83" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Batch Processing-related APIs", + "uri":"dli_02_0162.html", + "doc_type":"api", + "p_code":"83", + "code":"84" + }, + { + "desc":"This API is used to create a batch processing job in a queue.URI formatPOST /v2.0/{project_id}/batchesPOST /v2.0/{project_id}/batchesParameter descriptionURI parameterPar", + "product_code":"dli", + "title":"Creating a Batch Processing Job", + "uri":"dli_02_0124.html", + "doc_type":"api", + "p_code":"84", + "code":"85" + }, + { + "desc":"This API is used to cancel a batch processing job.Batch processing jobs in the Successful or Failed state cannot be canceled.URI formatDELETE /v2.0/{project_id}/batches/{", + "product_code":"dli", + "title":"Canceling a Batch Processing Job", + "uri":"dli_02_0129.html", + "doc_type":"api", + "p_code":"84", + "code":"86" + }, + { + "desc":"This API is used to obtain the list of batch processing jobs in a queue of a project.URI formatGET /v2.0/{project_id}/batchesGET /v2.0/{project_id}/batchesParameter descr", + "product_code":"dli", + "title":"Obtaining the List of Batch Processing Jobs", + "uri":"dli_02_0125.html", + "doc_type":"api", + "p_code":"84", + "code":"87" + }, + { + "desc":"This API is used to query details about a batch processing job based on the job ID.URI formatGET /v2.0/{project_id}/batches/{batch_id}GET /v2.0/{project_id}/batches/{batc", + "product_code":"dli", + "title":"Querying Batch Job Details", + "uri":"dli_02_0126.html", + "doc_type":"api", + "p_code":"84", + "code":"88" + }, + { + "desc":"This API is used to obtain the execution status of a batch processing job.URI formatGET /v2.0/{project_id}/batches/{batch_id}/stateGET /v2.0/{project_id}/batches/{batch_i", + "product_code":"dli", + "title":"Querying a Batch Job Status", + "uri":"dli_02_0127.html", + "doc_type":"api", + "p_code":"84", + "code":"89" + }, + { + "desc":"This API is used to query the back-end logs of batch processing jobs.URI formatGET /v2.0/{project_id}/batches/{batch_id}/logGET /v2.0/{project_id}/batches/{batch_id}/logP", + "product_code":"dli", + "title":"Querying Batch Job Logs", + "uri":"dli_02_0128.html", + "doc_type":"api", + "p_code":"84", + "code":"90" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"APIs Related to Flink Job Templates", + "uri":"dli_02_0244.html", + "doc_type":"api", + "p_code":"", + "code":"91" + }, + { + "desc":"This API is used to create a user template for the DLI service. A maximum of 100 user templates can be created.URI formatPOST /v1.0/{project_id}/streaming/job-templatesPO", + "product_code":"dli", + "title":"Creating a Template", + "uri":"dli_02_0245.html", + "doc_type":"api", + "p_code":"91", + "code":"92" + }, + { + "desc":"This API is used to update existing templates in DLI.URI formatPUT /v1.0/{project_id}/streaming/job-templates/{template_id}PUT /v1.0/{project_id}/streaming/job-templates/", + "product_code":"dli", + "title":"Updating a Template", + "uri":"dli_02_0246.html", + "doc_type":"api", + "p_code":"91", + "code":"93" + }, + { + "desc":"This API is used to delete a template. A template used by jobs can also be deleted.URI formatDELETE /v1.0/{project_id}/streaming/job-templates/{template_id}DELETE /v1.0/{", + "product_code":"dli", + "title":"Deleting a Template", + "uri":"dli_02_0247.html", + "doc_type":"api", + "p_code":"91", + "code":"94" + }, + { + "desc":"This API is used to query the job template list. Currently, only custom templates can be queried.URI formatGET /v1.0/{project_id}/streaming/job-templatesGET /v1.0/{projec", + "product_code":"dli", + "title":"Querying the Template List", + "uri":"dli_02_0248.html", + "doc_type":"api", + "p_code":"91", + "code":"95" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"APIs Related to Enhanced Datasource Connections", + "uri":"dli_02_0186.html", + "doc_type":"api", + "p_code":"", + "code":"96" + }, + { + "desc":"This API is used to create an enhanced datasource connection with other services.URI formatPOST /v2.0/{project_id}/datasource/enhanced-connectionsPOST /v2.0/{project_id}/", + "product_code":"dli", + "title":"Creating an Enhanced Datasource Connection", + "uri":"dli_02_0187.html", + "doc_type":"api", + "p_code":"96", + "code":"97" + }, + { + "desc":"This API is used to delete an enhanced datasource connection.The connection that is being created cannot be deleted.URI formatDELETE /v2.0/{project_id}/datasource/enhance", + "product_code":"dli", + "title":"Deleting an Enhanced Datasource Connection", + "uri":"dli_02_0188.html", + "doc_type":"api", + "p_code":"96", + "code":"98" + }, + { + "desc":"This API is used to query the list of created enhanced datasource connections.URI formatGET /v2.0/{project_id}/datasource/enhanced-connectionsGET /v2.0/{project_id}/datas", + "product_code":"dli", + "title":"Querying an Enhanced Datasource Connection List", + "uri":"dli_02_0190.html", + "doc_type":"api", + "p_code":"96", + "code":"99" + }, + { + "desc":"This API is used to query the created enhanced datasource connections.URI formatGET /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}GET /v2.0/{project_i", + "product_code":"dli", + "title":"Querying an Enhanced Datasource Connection", + "uri":"dli_02_0189.html", + "doc_type":"api", + "p_code":"96", + "code":"100" + }, + { + "desc":"This API is used to bind a queue to a created enhanced datasource connection.URI formatPOST /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}/associate-q", + "product_code":"dli", + "title":"Binding a Queue", + "uri":"dli_02_0191.html", + "doc_type":"api", + "p_code":"96", + "code":"101" + }, + { + "desc":"This API is used to unbind a queue from an enhanced datasource connection.URI formatPOST /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}/disassociate-q", + "product_code":"dli", + "title":"Unbinding a Queue", + "uri":"dli_02_0192.html", + "doc_type":"api", + "p_code":"96", + "code":"102" + }, + { + "desc":"This API is used to modify the host information of a connected datasource. Only full overwriting is supported.URI formatPUT /v2.0/{project_id}/datasource/enhanced-connect", + "product_code":"dli", + "title":"Modifying the Host Information", + "uri":"dli_02_0200.html", + "doc_type":"api", + "p_code":"96", + "code":"103" + }, + { + "desc":"This API is used to query the authorization about an enhanced datasource connection.URI formatGET /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}/privi", + "product_code":"dli", + "title":"Querying Authorization of an Enhanced Datasource Connection", + "uri":"dli_02_0256.html", + "doc_type":"api", + "p_code":"96", + "code":"104" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Global Variable-related APIs", + "uri":"dli_02_0257.html", + "doc_type":"api", + "p_code":"", + "code":"105" + }, + { + "desc":"This API is used to create a global variable.URI formatPOST /v1.0/{project_id}/variablesPOST /v1.0/{project_id}/variablesParameter descriptionURI parametersParameterManda", + "product_code":"dli", + "title":"Creating a Global Variable", + "uri":"dli_02_0258.html", + "doc_type":"api", + "p_code":"105", + "code":"106" + }, + { + "desc":"This API is used to delete a global variable.Only the user who creates a global variable can delete the variable.URI formatDELETE /v1.0/{project_id}/variables/{var_name}D", + "product_code":"dli", + "title":"Deleting a Global Variable", + "uri":"dli_02_0259.html", + "doc_type":"api", + "p_code":"105", + "code":"107" + }, + { + "desc":"This API is used to modify a global variable.URI formatPUT /v1.0/{project_id}/variables/{var_name}PUT /v1.0/{project_id}/variables/{var_name}Parameter descriptionURI para", + "product_code":"dli", + "title":"Modifying a Global Variable", + "uri":"dli_02_0260.html", + "doc_type":"api", + "p_code":"105", + "code":"108" + }, + { + "desc":"This API is used to query information about all global variables in the current project.URI formatGET /v1.0/{project_id}/variablesGET /v1.0/{project_id}/variablesParamete", + "product_code":"dli", + "title":"Querying All Global Variables", + "uri":"dli_02_0261.html", + "doc_type":"api", + "p_code":"105", + "code":"109" + }, + { + "desc":"This section describes fine-grained permissions management for your DLI. If your account does not need individual IAM users, then you may skip this section.By default, ne", + "product_code":"dli", + "title":"Permissions Policies and Supported Actions", + "uri":"dli_02_0201.html", + "doc_type":"api", + "p_code":"", + "code":"110" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Public Parameters", + "uri":"dli_02_0011.html", + "doc_type":"api", + "p_code":"", + "code":"111" + }, + { + "desc":"Table 1 describes status codes.", + "product_code":"dli", + "title":"Status Codes", + "uri":"dli_02_0012.html", + "doc_type":"api", + "p_code":"111", + "code":"112" + }, + { + "desc":"If an error occurs in API calling, no result is returned. Identify the cause of error based on the error codes of each API. If an error occurs in API calling, HTTP status", + "product_code":"dli", + "title":"Error Code", + "uri":"dli_02_0056.html", + "doc_type":"api", + "p_code":"111", + "code":"113" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Obtaining a Project ID", + "uri":"dli_02_0183.html", + "doc_type":"api", + "p_code":"111", + "code":"114" + }, + { + "desc":"An account ID (domain-id) is required for some URLs when an API is called. To obtain an account ID, perform the following operations:Log in to the management console.Hove", + "product_code":"dli", + "title":"Obtaining an Account ID", + "uri":"dli_02_0013.html", + "doc_type":"api", + "p_code":"111", + "code":"115" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Change History", + "uri":"dli_02_00003.html", + "doc_type":"api", + "p_code":"", + "code":"116" + } +] \ No newline at end of file diff --git a/docs/dli/api-ref/PARAMETERS.txt b/docs/dli/api-ref/PARAMETERS.txt new file mode 100644 index 00000000..6da8d5f0 --- /dev/null +++ b/docs/dli/api-ref/PARAMETERS.txt @@ -0,0 +1,3 @@ +version="" +language="en-us" +type="" \ No newline at end of file diff --git a/docs/dli/api-ref/dli_02_00003.html b/docs/dli/api-ref/dli_02_00003.html new file mode 100644 index 00000000..69640514 --- /dev/null +++ b/docs/dli/api-ref/dli_02_00003.html @@ -0,0 +1,20 @@ + + +

Change History

+
+
+ + + + + + + +
Table 1 Change History

Released On

+

Description

+

2023-01-30

+

This issue is the first official release.

+
+
+
+ diff --git a/docs/dli/api-ref/dli_02_0011.html b/docs/dli/api-ref/dli_02_0011.html new file mode 100644 index 00000000..02e45091 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0011.html @@ -0,0 +1,17 @@ + + +

Public Parameters

+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0012.html b/docs/dli/api-ref/dli_02_0012.html new file mode 100644 index 00000000..0aaf966a --- /dev/null +++ b/docs/dli/api-ref/dli_02_0012.html @@ -0,0 +1,329 @@ + + +

Status Codes

+

Table 1 describes status codes.

+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1 Status codes

Status Code

+

Message

+

Description

+

100

+

Continue

+

The client should continue with its request.

+

This interim response is used to inform the client that part of the request has been received and has not yet been rejected by the server.

+

101

+

Switching Protocols

+

The protocol should be switched. The protocol can only be switched to a newer protocol.

+

For example, the current HTTP protocol is switched to a later version of HTTP.

+

200

+

Success

+

The request has been fulfilled. This indicates that the server has provided the requested web page.

+

201

+

Created

+

The request is successful and the server has created a new resource.

+

202

+

Accepted

+

The request has been accepted, but the processing has not been completed.

+

203

+

Non-Authoritative Information

+

Unauthorized information. The request is successful.

+

204

+

NoContent

+

The server has successfully processed the request, but does not return any content.

+

The status code is returned in response to an HTTP OPTIONS request.

+

205

+

Reset Content

+

The server has successfully processed the request, but does not return any content.

+

206

+

Partial Content

+

The server has successfully processed the partial GET request.

+

300

+

Multiple Choices

+

There are multiple options for the location of the requested resource. The response contains a list of resource characteristics and addresses from which a user terminal (such as a browser) can choose the most appropriate one.

+

301

+

Moved Permanently

+

The requested resource has been assigned a new permanent URI, and the new URI is contained in the response.

+

302

+

Found

+

The requested resource resides temporarily under a different URI.

+

303

+

See Other

+

The response to the request can be found under a different URI,

+

and should be retrieved using a GET or POST method.

+

304

+

Not Modified

+

The requested resource has not been modified. In such a case, there is no need to retransmit the resource since the client still has a previously-downloaded copy.

+

305

+

Use Proxy

+

The requested resource is available only through a proxy.

+

306

+

Unused

+

The HTTP status code is no longer used.

+

400

+

BadRequest

+

Invalid request.

+

The client should not repeat the request without modifications.

+

401

+

Unauthorized

+

This status code is returned after the client provides the authentication information, indicating that the authentication information is incorrect or invalid.

+

402

+

Payment Required

+

This status code is reserved for future use.

+

403

+

Forbidden

+

The server has received the request and understood it, but the server is refusing to respond to it.

+

The client should modify the request instead of re-initiating it.

+

404

+

NotFound

+

The requested resource cannot be found.

+

The client should not repeat the request without modifications.

+

405

+

MethodNotAllowed

+

A request method is not supported for the requested resource.

+

The client should not repeat the request without modifications.

+

406

+

Not Acceptable

+

The server could not fulfill the request according to the content characteristics of the request.

+

407

+

Proxy Authentication Required

+

This code is similar to 401, but indicates that the client must first authenticate itself with the proxy.

+

408

+

Request Time-out

+

The server has timed out waiting for the request.

+

The client may repeat the request without modifications at any time later.

+

409

+

Conflict

+

The request could not be processed due to a conflict in the request.

+

This status code indicates that the resource that the client is attempting to create already exists, or that the request has failed to be processed because of the update of the conflict request.

+

410

+

Gone

+

The requested resource cannot be found.

+

The status code indicates that the requested resource has been deleted permanently.

+

411

+

Length Required

+

The server is refusing to process the request without a defined Content-Length.

+

412

+

Precondition Failed

+

The server does not meet one of the preconditions that the requester puts on the request.

+

413

+

Request Entity Too Large

+

The server is refusing to process a request because the request entity is too large for the server to process. The server may disable the connection to prevent the client from sending requests consecutively. If the server is only temporarily unable to process the request, the response will contain a Retry-After header field.

+

414

+

Request-URI Too Large

+

The Request-URI is too long for the server to process.

+

415

+

Unsupported Media Type

+

The server does not support the media type in the request.

+

416

+

Requested range not satisfiable

+

The requested range is invalid.

+

417

+

Expectation Failed

+

The server has failed to meet the requirements of the Expect request-header field.

+

422

+

UnprocessableEntity

+

The request was well-formed but was unable to be followed due to semantic errors.

+

429

+

TooManyRequests

+

The client sends excessive requests to the server within a given time (exceeding the limit on the access frequency of the client), or the server receives excessive requests within a given time (beyond its processing capability). In this case, the client should resend the request after the time specified in the Retry-After header of the response has elapsed.

+

500

+

InternalServerError

+

The server is able to receive the request but unable to understand it.

+

501

+

Not Implemented

+

The server does not support the requested function.

+

502

+

Bad Gateway

+

The server was acting as a gateway or proxy and received an invalid request from the remote server.

+

503

+

ServiceUnavailable

+

The requested service is invalid.

+

The client should not repeat the request without modifications.

+

504

+

ServerTimeout

+

The request cannot be fulfilled within a given time. This status code is returned to the client only when the Timeout parameter is specified in the request.

+

505

+

HTTP Version not supported

+

The server does not support the HTTP protocol version used in the request.

+
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0013.html b/docs/dli/api-ref/dli_02_0013.html new file mode 100644 index 00000000..4638b55b --- /dev/null +++ b/docs/dli/api-ref/dli_02_0013.html @@ -0,0 +1,12 @@ + + +

Obtaining an Account ID

+

An account ID (domain-id) is required for some URLs when an API is called. To obtain an account ID, perform the following operations:

+
  1. Log in to the management console.
  2. Hover the cursor on the username in the upper right corner and select My Credentials from the drop-down list.
  3. On the API Credentials page, view Account ID.
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0016.html b/docs/dli/api-ref/dli_02_0016.html new file mode 100644 index 00000000..ba488a14 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0016.html @@ -0,0 +1,247 @@ + + +

Viewing Details of a Queue

+

Function

This API is used to list details of a specific queue in a project.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

queueName

+

No

+

String

+

Name of a queue.

+
NOTE:

The queue name is case-insensitive. The uppercase letters will be automatically converted to lowercase letters.

+
+

description

+

No

+

String

+

Queue description.

+

owner

+

No

+

String

+

User who creates a queue.

+

create_time

+

No

+

Long

+

Time when the queue is created. The timestamp is expressed in milliseconds.

+

queueType

+

No

+

String

+

Indicates the queue type.

+
  • sql
  • general
  • all
+

If this parameter is not specified, the default value sql is used.

+

cuCount

+

No

+

Integer

+

Number of compute units (CUs) bound to a queue, that is, the number of CUs in the current queue.

+

resource_id

+

No

+

String

+

Resource ID of a queue.

+

resource_mode

+

No

+

Integer

+

Resource mode

+
  • 0: Shared queue
  • 1: Dedicated queue
+

enterprise_project_id

+

No

+

String

+

Enterprise project ID.

+

0 indicates the default enterprise project.

+
NOTE:

Users who have enabled Enterprise Management can set this parameter to bind a specified project.

+
+

cu_spec

+

No

+

Integer

+

Specifications of a queue. For a queue whose billing mode is yearly/monthly, this parameter indicates the CU value of the yearly/monthly part. For a pay-per-use queue, this parameter indicates the initial value when a user purchases a queue.

+

cu_scale_out_limit

+

No

+

Integer

+

Upper limit of the CU value for elastic scaling of the current queue.

+

cu_scale_in_limit

+

No

+

Integer

+

Lower limit of the CU value for elastic scaling of the current queue.

+
+
+
+

Example Request

None

+
+

Example Response

{
+    "is_success": true,
+    "message": "",
+    "owner": "testuser",
+    "description": "",
+    "queueName": "test",
+    "create_time": 1587613028851,
+    "queueType": "general",
+    "cuCount": 16,
+    "resource_id": "03d51b88-db63-4611-b779-9a72ba0cf58b",
+    "resource_mode": 0
+,
+    "resource_type": "vm",
+     "cu_spec": 16
+}
+
+

Status Codes

Table 3 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 3 Status codes

Status Code

+

Description

+

200

+

The query is successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0017.html b/docs/dli/api-ref/dli_02_0017.html new file mode 100644 index 00000000..88fccf45 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0017.html @@ -0,0 +1,35 @@ + + +

Job-related APIs

+
+
+ + + +
+ diff --git a/docs/dli/api-ref/dli_02_0019.html b/docs/dli/api-ref/dli_02_0019.html new file mode 100644 index 00000000..0db25037 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0019.html @@ -0,0 +1,421 @@ + + +

Importing Data

+

Function

This API is used to import data from a file to a DLI or OBS table. Currently, only OBS data can be imported to a DLI or OBS table.

+
  • This API is asynchronous.
  • When importing data, you can select an existing OBS bucket path or create an OBS bucket path, but only one OBS bucket path can be specified.
  • If you need to create an OBS bucket, ensure that the bucket name complies with the following naming rules:
    • The name must be globally unique in OBS.
    • The name must contain 3 to 63 characters. Only lowercase letters, digits, hyphens (-), and periods (.) are allowed.
    • The name cannot start or end with a period (.) or hyphen (-), and cannot contain two consecutive periods (.) or contain a period (.) and a hyphen (-) adjacent to each other.
    • The name cannot be an IP address.
    • If the name contains any period (.), the security certificate verification may be triggered when you access the bucket or objects in the bucket.
    +
  • If the type of a column in the source file to be imported does not match that of the target table, the query result of the row will be null.
  • Two or more concurrent tasks of importing data to the same table are not allowed.
+
+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

data_path

+

Yes

+

String

+

Path to the data to be imported. Currently, only OBS data can be imported.

+

data_type

+

Yes

+

String

+

Type of the data to be imported. Currently, data types of CSV, Parquet, ORC, JSON, and Avro are supported.

+
NOTE:

Data in Avro format generated by Hive tables cannot be imported.

+
+

database_name

+

Yes

+

String

+

Name of the database where the table to which data is imported resides.

+

table_name

+

Yes

+

String

+

Name of the table to which data is imported.

+

with_column_header

+

No

+

Boolean

+

Whether the first line of the imported data contains column names, that is, headers. The default value is false, indicating that column names are not contained. This parameter can be specified when CSV data is imported.

+

delimiter

+

No

+

String

+

User-defined data delimiter. The default value is a comma (,). This parameter can be specified when CSV data is imported.

+

quote_char

+

No

+

String

+

User-defined quotation character. The default value is double quotation marks ("). This parameter can be specified when CSV data is imported.

+

escape_char

+

No

+

String

+

User-defined escape character. The default value is a backslash (\). This parameter can be specified when CSV data is imported.

+

date_format

+

No

+

String

+

Specified date format. The default value is yyyy-MM-dd. For details about the characters involved in the date format, see Table 3. This parameter can be specified when data in the CSV or JSON format is imported.

+

bad_records_path

+

No

+

String

+

Bad records storage directory during job execution. After configuring this item, the bad records is not imported into the target table.

+

timestamp_format

+

No

+

String

+

+

Specified time format. The default value is yyyy-MM-dd HH:mm:ss. For definitions about characters in the time format, see Table 3. This parameter can be specified when data in the CSV or JSON format is imported.

+

queue_name

+

No

+

String

+

Name of the queue that is specified to execute a task. If no queue is specified, the default queue is used.

+

overwrite

+

No

+

Boolean

+

Whether to overwrite data. The default value is false, indicating appending write. If the value is true, it indicates overwriting.

+

partition_spec

+

No

+

Object

+

Partition to which data is to be imported.

+
  • If this parameter is not set, the entire table data is dynamically imported. The imported data must contain the data in the partition column.
  • If this parameter is set and all partition information is configured during data import, data is imported to the specified partition. The imported data cannot contain data in the partition column.
  • If not all partition information is configured during data import, the imported data must contain all non-specified partition data. Otherwise, abnormal values such as null exist in the partition field column of non-specified data after data import.
+

conf

+

No

+

Array of Strings

+

User-defined parameter that applies to the job. Currently, dli.sql.dynamicPartitionOverwrite.enabled can be set to false by default. If it is set to true, data in a specified partition is overwritten. If it is set to false, data in the entire DataSource table is dynamically overwritten.

+
NOTE:

For dynamic overwrite of Hive partition tables, only the involved partition data can be overwritten. The entire table data cannot be overwritten.

+
+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Definition of characters involved in the date and time patterns

Character

+

Date or Time Element

+

Example

+

G

+

Epoch ID

+

AD

+

y

+

Year

+

1996; 96

+

M

+

Month

+

July; Jul; 07

+

w

+

Which week in a year

+

27 (Week 27 in the year)

+

W

+

Which week in a month

+

2 (Second week in the month)

+

D

+

Which day in a year

+

189 (Day 189 in the year)

+

d

+

Which day in a month

+

10 (Day 10 in the month)

+

u

+

Which day in a week

+

1 (Monday), ..., 7 (Sunday)

+

a

+

am/pm flag

+

pm (Afternoon)

+

H

+

Hour time (0-23)

+

2

+

h

+

Hour time (1-12)

+

12

+

m

+

Minute time

+

30

+

s

+

Second time

+

55

+

S

+

Which milliseconds

+

978

+

z

+

Time zone

+

Pacific Standard Time; PST; GMT-08:00

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully sent. Value true indicates that the request is successfully sent.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

job_id

+

No

+

String

+

ID of a job returned after a job is generated and submitted by using SQL statements. The job ID can be used to query the job status and results.

+

job_mode

+

No

+

String

+

Job execution mode. The options are as follows:

+
  • async: asynchronous
  • sync: synchronous
+
+
+
+

Example Request

{
+    "data_path": "obs://home/data1/DLI/t1.csv",
+    "data_type": "csv",
+    "database_name": "db2",
+    "table_name": "t2",
+    "with_column_header": false,
+    "delimiter": ",",
+    "quote_char": ",",
+    "escape_char": ",",
+    "date_format": "yyyy-MM-dd",
+    "timestamp_format": "yyyy-MM-dd'T'HH:mm:ss.SSSZZ",
+    "queue_name": "queue2",
+    "overwrite": false,
+    "partition_spec":{
+      "column1":  "2020-01-01",
+      "column2":  "columnPartValue"  
+     }
+}
+
+

Example Response

{
+  "is_success": true,
+  "message": "import data to table t2 started",
+  "job_id": "6b29eb77-4c16-4e74-838a-2cf7959e9202",
+  "job_mode":"async"
+}
+
+

Status Codes

Table 5 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 5 Status codes

Status Code

+

Description

+

200

+

Import succeeded.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0020.html b/docs/dli/api-ref/dli_02_0020.html new file mode 100644 index 00000000..d201b206 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0020.html @@ -0,0 +1,227 @@ + + +

Exporting Data

+

Function

This API is used to export data from a DLI table to a file.

+
  • This API is asynchronous.
  • Currently, data can be exported only from a DLI table to OBS, and the OBS path must be specified to the folder level. The OBS path cannot contain commas (,). The OBS bucket name cannot end with the regular expression format .[0-9]+(.*). Specifically, if the bucket name contains dots (.), the last dot (.) cannot be followed by a digit, for example, **.12abc and **.12.
  • Data can be exported across accounts. That is, after account B authorizes account A, account A can export data to the OBS path of account B if account A has the permission to read the metadata and permission information about the OBS bucket of account B and read and write the path.
+
+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

data_path

+

Yes

+

String

+

Path for storing the exported data. Currently, data can be stored only on OBS. If export_mode is set to errorifexists, the OBS path cannot contain the specified folder, for example, the test folder in the example request.

+

data_type

+

Yes

+

String

+

Type of data to be exported. Currently, only CSV and JSON are supported.

+

database_name

+

Yes

+

String

+

Name of the database where the table from which data is exported resides.

+

table_name

+

Yes

+

String

+

Name of the table from which data is exported.

+

compress

+

Yes

+

String

+

Compression mode for exported data. Currently, the compression modes gzip, bzip2, and deflate are supported. If you do not want to compress data, enter none.

+

queue_name

+

No

+

String

+

Name of the queue that is specified to execute a task. If no queue is specified, the default queue is used.

+

export_mode

+

No

+

String

+

Export mode. The parameter value can be ErrorIfExists or Overwrite. If export_mode is not specified, this parameter is set to ErrorIfExists by default.

+
  • ErrorIfExists: Ensure that the specified export directory does not exist. If the specified export directory exists, an error is reported and the export operation cannot be performed.
  • Overwrite: If you add new files to a specific directory, existing files will be deleted.
+

with_column_header

+

No

+

Boolean

+

Whether to export column names when exporting CSV and JSON data.

+
  • If this parameter is set to true, the column names are exported.
  • If this parameter is set to false, the column names are not exported.
  • If this parameter is left blank, the default value false is used.
+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully sent. Value true indicates that the request is successfully sent.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

job_id

+

No

+

String

+

ID of a job returned after a job is generated and submitted by using SQL statements. The job ID can be used to query the job status and results.

+

job_mode

+

No

+

String

+

Job execution mode. The options are as follows:

+
  • async: asynchronous
  • sync: synchronous
+
+
+
+

Example Request

{
+    "data_path": "obs://home/data1/DLI/test",
+    "data_type": "json",
+    "database_name": "db2",
+    "table_name": "t2",
+    "compress": "gzip",
+    "with_column_header": "true",
+    "queue_name": "queue2"
+}
+
+

Example Response

{
+  "is_success": true,
+  "message": "export all data from table db2.t2 to path obs://home/data1/DLI/test started",
+  "job_id": "828d4044-3d39-449b-b32c-957f7cfadfc9",
+  "job_mode":"async"
+}
+
+

Status Codes

Table 4 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

Export successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0021.html b/docs/dli/api-ref/dli_02_0021.html new file mode 100644 index 00000000..d70ccbdd --- /dev/null +++ b/docs/dli/api-ref/dli_02_0021.html @@ -0,0 +1,319 @@ + + +

Querying Job Status

+

Function

This API is used to query the status of a submitted job.

+
+

URI

+ +
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

Yes

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

Yes

+

String

+

System prompt. If execution succeeds, this parameter is left blank.

+

job_id

+

Yes

+

String

+

Job ID. You can get the value by calling Submitting a SQL Job (Recommended).

+

job_type

+

Yes

+

String

+

Type of a job, Includes DDL, DCL, IMPORT, EXPORT, QUERY, INSERT, DATA_MIGRATION, UPDATE, DELETE, RESTART_QUEUE and SCALE_QUEUE.

+

job_mode

+

Yes

+

String

+

Job execution mode. The options are as follows:

+
  • async: asynchronous
  • sync: synchronous
+

queue_name

+

Yes

+

String

+

Name of the queue where the job is submitted.

+

owner

+

Yes

+

String

+

User who submits a job.

+

start_time

+

Yes

+

Long

+

Time when a job is started. The timestamp is in milliseconds.

+

duration

+

No

+

Long

+

Job running duration (unit: millisecond).

+

status

+

Yes

+

String

+

Status of a job, including RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELLED.

+

input_row_count

+

No

+

Long

+

Number of records scanned during the Insert job execution.

+

bad_row_count

+

No

+

Long

+

Number of error records scanned during the Insert job execution.

+

input_size

+

Yes

+

Long

+

Size of scanned files during job execution (unit: byte).

+

result_count

+

Yes

+

Integer

+

Total number of records returned by the current job or total number of records inserted by the Insert job.

+

database_name

+

No

+

String

+

Name of the database where the target table resides. database_name is valid only for jobs of the IMPORT EXPORT, and QUERY types.

+

table_name

+

No

+

String

+

Name of the target table. table_name is valid only for jobs of the IMPORT EXPORT, and QUERY types.

+

detail

+

Yes

+

String

+

JSON character string for information about related columns.

+

statement

+

Yes

+

String

+

SQL statements of a job.

+

tags

+

No

+

Array of objects

+

Job tags. For details, see Table 3.

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 3 tags parameters

Parameter

+

Mandatory

+

Type

+

Description

+

key

+

Yes

+

String

+

Tag key

+

value

+

Yes

+

String

+

Tag value

+
+
+
+

Example Request

None

+
+

Example Response

{
+  "is_success": true,
+  "message": "",
+  "job_id": "208b08d4-0dc2-4dd7-8879-ddd4c020d7aa",
+  "job_type": "QUERY",
+  "job_mode":"async",
+  "queue_name": "default",
+  "owner": "test",
+  "start_time": 1509335108918,
+  "duration": 2523,
+  "status": "FINISHED",
+  "input_size": 22,
+  "result_count": 4,
+  "database_name":"dbtest",
+  "table_name":"tbtest",
+  "detail": "{\"type\":\"struct\",\"fields\":[{\"name\":\"id\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}},{\"name\":\"name\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}}]}",
+  "statement": "select * from t1"
+}
+
+

Status Codes

Table 4 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

The query is successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0022.html b/docs/dli/api-ref/dli_02_0022.html new file mode 100644 index 00000000..8c347c1b --- /dev/null +++ b/docs/dli/api-ref/dli_02_0022.html @@ -0,0 +1,335 @@ + + +

Querying Job Details

+

Function

This API is used to query details about jobs, including databasename, tablename, file size, and export mode.

+
+

URI

+ +
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

Yes

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

Yes

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

job_id

+

Yes

+

String

+

Job ID.

+

owner

+

Yes

+

String

+

User who submits a job.

+

start_time

+

Yes

+

Long

+

Time when a job is started. The timestamp is in milliseconds.

+

duration

+

Yes

+

Long

+

Duration for executing the job (unit: millisecond).

+

export_mode

+

No

+

String

+

Specified export mode during data export and query result saving.

+

Available values are ErrorIfExists and Overwrite.

+
  • ErrorIfExists: Ensure that the specified export directory does not exist. If the specified export directory exists, an error is reported and the export operation cannot be performed.
  • Overwrite: If you add new files to a specific directory, existing files will be deleted.
+

data_path

+

Yes

+

String

+

Path to imported or exported files.

+

data_type

+

Yes

+

String

+

Type of data to be imported or exported. Currently, only CSV and JSON are supported.

+

database_name

+

Yes

+

String

+

Name of the database where the table, where data is imported or exported, resides.

+

table_name

+

Yes

+

String

+

Name of the table where data is imported or exported.

+

with_column_header

+

No

+

Boolean

+

Whether the imported data contains the column name during the execution of an import job.

+

delimiter

+

No

+

String

+

User-defined data delimiter set when the import job is executed.

+

quote_char

+

No

+

String

+

User-defined quotation character set when the import job is executed.

+

escape_char

+

No

+

String

+

User-defined escape character set when the import job is executed.

+

date_format

+

No

+

String

+

Table date format specified when the import job is executed.

+

timestamp_format

+

No

+

String

+

Table time format specified when the import job is executed.

+

compress

+

No

+

String

+

Compression mode specified when the export job is executed.

+

tags

+

No

+

Array of objects

+

Job tags. For details, see Table 3.

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 3 tags parameter

Parameter

+

Mandatory

+

Type

+

Description

+

key

+

Yes

+

String

+

Tag key

+

value

+

Yes

+

String

+

Tag value

+
+
+
+

Example Request

None

+
+

Example Response

+
+

Status Codes

Table 4 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

The query is successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0024.html b/docs/dli/api-ref/dli_02_0024.html new file mode 100644 index 00000000..e261d291 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0024.html @@ -0,0 +1,235 @@ + + +

Exporting Query Results

+

Function

This API is used to export results returned from the query using SQL statements to OBS. Only the query result of QUERY jobs can be exported.

+
  • This API is asynchronous.
  • Currently, data can be exported only to OBS, and the OBS path must be specified to the folder level. The OBS path cannot contain commas (,). The OBS bucket name cannot end with the regular expression format ".[0-9]+(.*)". Specifically, if the bucket name contains dots (.), the last dot (.) cannot be followed by a digit, for example, "**.12abc" and "**.12".
+
+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

data_path

+

Yes

+

String

+

Path for storing the exported data. Currently, data can be stored only on OBS. The OBS path cannot contain folders, for example, the path folder in the sample request.

+

compress

+

No

+

String

+

Compression format of exported data. Currently, gzip, bzip2, and deflate are supported. The default value is none, indicating that data is not compressed.

+

data_type

+

Yes

+

String

+

Storage format of exported data. Currently, only CSV and JSON are supported.

+

queue_name

+

No

+

String

+

Name of the queue that is specified to execute a task. If no queue is specified, the default queue is used.

+

export_mode

+

No

+

String

+

Export mode. The parameter value can be ErrorIfExists or Overwrite. If export_mode is not specified, this parameter is set to ErrorIfExists by default.

+
  • ErrorIfExists: Ensure that the specified export directory does not exist. If the specified export directory exists, an error is reported and the export operation cannot be performed.
  • Overwrite: If you add new files to a specific directory, existing files will be deleted.
+

with_column_header

+

No

+

Boolean

+

Whether to export column names when exporting CSV and JSON data.

+
  • If this parameter is set to true, the column names are exported.
  • If this parameter is set to false, the column names are not exported.
  • If this parameter is left blank, the default value false is used.
+

limit_num

+

No

+

Integer

+

Number of data records to be exported. The default value is 0, indicating that all data records are exported.

+

encoding_type

+

No

+

String

+

Format of the data to be exported. The value can be utf-8, gb2312, or gbk. Value utf-8 will be used if this parameter is left empty.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

Yes

+

Boolean

+

Indicates whether the request is successfully sent. Value true indicates that the request is successfully sent.

+

message

+

Yes

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

job_id

+

No

+

String

+

ID of a job returned after a job is generated and submitted by using SQL statements. The job ID can be used to query the job status and results.

+

job_mode

+

No

+

String

+

Job execution mode. The options are as follows:

+
  • async: asynchronous
  • sync: synchronous
+
+
+
+

Example Request

{
+  "data_path": "obs://obs-bucket1/path",
+  "data_type": "json",
+  "compress": "gzip",
+  "with_column_header": "true",
+  "queue_name": "queue2",
+  "limit_num": 10
+}
+
+

Example Response

{
+  "is_success": true,
+  "message": "",
+  "job_id": "37a40ef9-86f5-42e6-b4c6-8febec89cc20",
+  "job_mode":"async"
+}
+
+

Status Codes

Table 4 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

Export successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0025.html b/docs/dli/api-ref/dli_02_0025.html new file mode 100644 index 00000000..29b2f2fd --- /dev/null +++ b/docs/dli/api-ref/dli_02_0025.html @@ -0,0 +1,506 @@ + + +

Querying All Jobs

+

Function

This API is used to query information about all jobs in the current project.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

Yes

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

Yes

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

job_count

+

Yes

+

Integer

+

Indicates the total number of jobs.

+

jobs

+

Yes

+

Array of Objects

+

Indicates the information about a job. For details, see Table 4.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4 jobs parameters

Parameter

+

Mandatory

+

Type

+

Description

+

job_id

+

Yes

+

String

+

Job ID.

+

job_type

+

Yes

+

String

+

Type of a job.

+

queue_name

+

Yes

+

String

+

Queue to which a job is submitted.

+

owner

+

Yes

+

String

+

User who submits a job.

+

start_time

+

Yes

+

Long

+

Time when a job is started. The timestamp is expressed in milliseconds.

+

duration

+

Yes

+

Long

+

Job running duration (unit: millisecond).

+

status

+

Yes

+

String

+

Status of a job, including LAUNCHING, RUNNING, FINISHED, FAILED, and CANCELLED.

+

input_row_count

+

No

+

Long

+

Number of records scanned during the Insert job execution.

+

bad_row_count

+

No

+

Long

+

Number of error records scanned during the Insert job execution.

+

input_size

+

Yes

+

Long

+

Size of scanned files during job execution.

+

result_count

+

Yes

+

Integer

+

Total number of records returned by the current job or total number of records inserted by the Insert job.

+

database_name

+

No

+

String

+

Name of the database where the target table resides. database_name is valid only for jobs of the Import and Export types.

+

table_name

+

No

+

String

+

Name of the target table. table_name is valid only for jobs of the Import and Export types.

+

with_column_header

+

No

+

Boolean

+

Import jobs, which record whether the imported data contains column names.

+

detail

+

Yes

+

String

+

JSON character string of related columns queried by using SQL statements.

+

statement

+

Yes

+

String

+

SQL statements of a job.

+

message

+

No

+

String

+

System prompt

+

end_time

+

No

+

Long

+

Job end time. The timestamp is in milliseconds.

+

tags

+

No

+

Array of Objects

+

Job tags. For details, see Table 5.

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 5 tags parameters

Parameter

+

Mandatory

+

Type

+

Description

+

key

+

Yes

+

String

+

Tag key

+

value

+

Yes

+

String

+

Tag value

+
+
+
+

Example Request

None

+
+

Example Response

{
+ "is_success": true,
+  "message": "",
+  "job_count": 1,
+  "jobs": [
+    {
+
+      "detail": "{\"type\":\"struct\",\"fields\":[{\"name\":\"name\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"age\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}}]}",
+      "duration": 17731,
+
+      "end_time": 1502349821460,
+      "input_size": 0,
+      "job_id": "37286cc7-0508-4ffd-b636-951c8a5c75de",
+      "job_type": "QUERY",
+      "message": "",
+      "owner": "tenant1",
+      "queue_name": "queue1",
+      "result_count": 3,
+      "start_time": 1502349803729,
+      "statement": "select * from t_json_002",
+      "status": "FINISHED",
+      "with_column_header": false
+    }
+  ]
+}
+
+

Status Codes

Table 6 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 6 Status codes

Status Code

+

Description

+

200

+

The query is successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0027.html b/docs/dli/api-ref/dli_02_0027.html new file mode 100644 index 00000000..e0eb6b67 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0027.html @@ -0,0 +1,21 @@ + + +

Database-related APIs

+
+
+ + + +
+ diff --git a/docs/dli/api-ref/dli_02_0028.html b/docs/dli/api-ref/dli_02_0028.html new file mode 100644 index 00000000..af25cb25 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0028.html @@ -0,0 +1,198 @@ + + +

Creating a Database

+

Function

This API is used to add a database.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

database_name

+

Yes

+

String

+

Name of the created database.

+
  • The database name can contain only digits, letters, and underscores (_), but cannot contain only digits or start with an underscore (_).
  • The database name is case insensitive and cannot be left blank.
  • The length of the database name cannot exceed 128 characters.
+
NOTE:

The default database is a built-in database. You cannot create a database named default.

+
+

description

+

No

+

String

+

Information about the created database.

+

enterprise_project_id

+

No

+

String

+

Enterprise project ID. The value 0 indicates the default enterprise project.

+
NOTE:

Users who have enabled Enterprise Management can set this parameter to bind a specified project.

+
+

tags

+

No

+

Array of Objects

+

Database tag. For details, see Table 3.

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 3 tags parameters

Parameter

+

Mandatory

+

Type

+

Description

+

Key

+

Yes

+

String

+

Tag key.

+

value

+

Yes

+

String

+

Tag key.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + +
Table 4 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+
+
+
+

Example Request

{
+  "database_name": "db1",
+  "description": "this is for test"
+}
+
+

Example Response

{
+ "is_success": true,
+ "message": ""
+}
+
+

Status Codes

Table 5 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 5 Status codes

Status Code

+

Description

+

200

+

The job is created successfully.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0029.html b/docs/dli/api-ref/dli_02_0029.html new file mode 100644 index 00000000..07b0ccbf --- /dev/null +++ b/docs/dli/api-ref/dli_02_0029.html @@ -0,0 +1,268 @@ + + +

Querying All Databases

+

Function

This API is used to query the information about all the databases.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

database_count

+

No

+

Integer

+

Total number of databases.

+

databases

+

No

+

Array of objects

+

Database information. For details, see Table 4.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4 databases parameters

Parameter

+

Mandatory

+

Type

+

Description

+

database_name

+

No

+

String

+

Name of a database.

+

owner

+

No

+

String

+

Creator of a database.

+

table_number

+

No

+

Integer

+

Number of tables in a database.

+

description

+

No

+

String

+

Information about a database.

+

enterprise_project_id

+

Yes

+

String

+

Enterprise project ID. The value 0 indicates the default enterprise project.

+
NOTE:

Users who have enabled Enterprise Management can set this parameter to bind a specified project.

+
+
+
+
+

Example Request

None

+
+

Example Response

{
+  "is_success": true,
+  "message": "",
+  "database_count": 1,
+  "databases": [
+    {
+      "database_name": "db2",
+      "description": "this is for test",
+      "owner": "tenant1",
+      "table_number": 15
+
+    }
+  ]
+}
+
+

Status Codes

Table 5 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 5 Status codes

Status Code

+

Description

+

200

+

The query is successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0030.html b/docs/dli/api-ref/dli_02_0030.html new file mode 100644 index 00000000..3a45942b --- /dev/null +++ b/docs/dli/api-ref/dli_02_0030.html @@ -0,0 +1,193 @@ + + +

Deleting a Database

+

Function

This API is used to delete an empty database. If there are tables in the database to be deleted, delete all tables first. For details about the API used to delete tables, see Deleting a Table.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

job_id

+

No

+

String

+

Returned job ID, which can be used to obtain the job status and result.

+

job_type

+

No

+

String

+

Type of a job. The options are as follows:

+
  • DDL
  • DCL
  • IMPORT
  • EXPORT
  • QUERY
  • INSERT
+

job_mode

+

No

+

String

+

Job execution mode. The options are as follows:

+
  • async: asynchronous
  • sync: synchronous
+
+
+
+

Example Request

None

+
+

Example Response

+
+

Status Codes

Table 4 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

Deletion succeeded.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0031.html b/docs/dli/api-ref/dli_02_0031.html new file mode 100644 index 00000000..1146a97c --- /dev/null +++ b/docs/dli/api-ref/dli_02_0031.html @@ -0,0 +1,25 @@ + + +

Table-related APIs

+
+
+ + + +
+ diff --git a/docs/dli/api-ref/dli_02_0033.html b/docs/dli/api-ref/dli_02_0033.html new file mode 100644 index 00000000..37921312 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0033.html @@ -0,0 +1,352 @@ + + +

Describing the Table Information

+

Function

This API is used to describe metadata information in the specified table.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

Yes

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

Yes

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

column_count

+

Yes

+

Integer

+

Total number of columns in the table.

+

columns

+

Yes

+

Array of Objects

+

Column information, including the column name, type, and description. For details, see Table 3.

+

table_type

+

Yes

+

String

+

Table type. The options are as follows:

+

MANAGED: DLI table

+

EXTERNAL: OBS table

+

VIEW: view

+

data_type

+

No

+

String

+

Data type, including CSV, Parquet, ORC, JSON, and Avro.

+

data_location

+

No

+

String

+

Path for storing data, which is an OBS path.

+

storage_properties

+

No

+

Array of Objects

+

Storage attribute, which is in the format of key/value and includes parameters delimiter, escape, quote, header, dateformat, and timestampformat.

+

table_comment

+

No

+

String

+

Table comment.

+

create_table_sql

+

No

+

String

+

Statement used to create a table.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 columns parameters

Parameter

+

Mandatory

+

Type

+

Description

+

column_name

+

Yes

+

String

+

Column name.

+

description

+

Yes

+

String

+

Description of a column.

+

type

+

Yes

+

String

+

Data type of a column.

+

is_partition_column

+

Yes

+

Boolean

+

Indicates whether the column is a partition column. The value true indicates that the column is a partition column, and the value false indicates that the column is not a partition column. The default value is false.

+
+
+
+

Example Request

None

+
+

Example Response

+
+

Status Codes

Table 4 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

The operation is successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0034.html b/docs/dli/api-ref/dli_02_0034.html new file mode 100644 index 00000000..85fed689 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0034.html @@ -0,0 +1,357 @@ + + +

Creating a Table

+

Function

This API is used to create a table.

+

This API is a synchronous API.

+
+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

table_name

+

Yes

+

String

+

Name of the created table.

+
  • The table name can contain only digits, letters, and underscores (_), but cannot contain only digits or start with an underscore (_).
  • The table name is case insensitive and cannot be left unspecified.
  • The table name can contain the dollar sign ($). Example: $test
  • The length of the database name cannot exceed 128 characters.
+

data_location

+

Yes

+

String

+

Location where data is stored. The options are as follows:

+
  • OBS: OBS table
  • DLI: DLI table
  • VIEW: VIEW table
+

description

+

No

+

String

+

Information about the new table.

+

columns

+

Yes

+

Array of Objects

+

Columns of the new table. For details about column parameters, see Table 3. This parameter is optional when data_location is VIEW.

+

select_statement

+

No

+

String

+

Query statement required for creating a view. The database to which the table belongs needs to be specified in the query statement, in the format of database.table. This parameter is mandatory when data_location is VIEW.

+

data_type

+

No

+

String

+

Type of the data to be added to the OBS table. The options are as follows: Parquet, ORC, CSV, JSON, and Avro.

+
NOTE:

This parameter is mandatory for an OBS table.

+
+

data_path

+

No

+

String

+

Storage path of data in the new OBS table, which must be a path on OBS and must begin with obs.

+
NOTE:

This parameter is mandatory for an OBS table.

+

Do not set this parameter to the OBS root directory. Otherwise, all data in the root directory will be cleared when you clear table data.

+
+

with_column_header

+

No

+

Boolean

+

Whether the table header is included in the OBS table data. Only data in CSV files has this attribute. This parameter is mandatory when data_location is OBS.

+

delimiter

+

No

+

String

+

User-defined data delimiter. Only data in CSV files has this attribute. This parameter is mandatory when data_location is OBS.

+

quote_char

+

No

+

String

+

User-defined reference character. Double quotation marks ("\") are used by default. Only data in CSV files has this attribute. This parameter is mandatory when data_location is OBS.

+

escape_char

+

No

+

String

+

User-defined escape character. Backslashes (\\) are used by default. Only data in CSV files has this attribute. This parameter is mandatory when data_location is OBS.

+

date_format

+

No

+

String

+

User-defined date type. yyyy-MM-dd is used by default. For details about the characters involved in the date format, see Table 3. Only data in CSV and JSON files has this attribute. This parameter is mandatory when data_location is OBS.

+

timestamp_format

+

No

+

String

+

User-defined timestamp type. yyyy-MM-dd HH:mm:ss is used by default. For definitions about characters in the timestamp format, see Table 3. Only data in CSV and JSON files has this attribute. This parameter is mandatory when data_location is OBS.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 columns parameters

Parameter

+

Mandatory

+

Type

+

Description

+

column_name

+

Yes

+

String

+

Name of a column.

+

type

+

Yes

+

String

+

Data type of a column.

+

description

+

No

+

String

+

Description of a column.

+

is_partition_column

+

No

+

Boolean

+

Whether the column is a partition column. The value true indicates a partition column, and the value false indicates a non-partition column. The default value is false.

+
NOTE:

When creating a partition table, ensure that at least one column in the table is a non-partition column. For details, see "Request example".

+
+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + +
Table 4 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+
+
+
+

Example Request

+
+

Example Response

{
+  "is_success": true,
+  "message": ""
+}
+
+

Status Codes

Table 5 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 5 Status codes

Status Code

+

Description

+

200

+

The job is created successfully.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0035.html b/docs/dli/api-ref/dli_02_0035.html new file mode 100644 index 00000000..fb51df3b --- /dev/null +++ b/docs/dli/api-ref/dli_02_0035.html @@ -0,0 +1,163 @@ + + +

Deleting a Table

+

Function

This API is used to delete a specified table.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

job_mode

+

No

+

String

+

Job execution mode. The options are as follows:

+
  • async: asynchronous
  • sync: synchronous
+
+
+
+

Example Request

None

+
+

Example Response

{
+ "is_success": true,
+ "message": ""
+}
+
+

Status Codes

Table 4 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

Deletion succeeded.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0036.html b/docs/dli/api-ref/dli_02_0036.html new file mode 100644 index 00000000..43f1a6cd --- /dev/null +++ b/docs/dli/api-ref/dli_02_0036.html @@ -0,0 +1,23 @@ + + +

Permission-related APIs

+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0037.html b/docs/dli/api-ref/dli_02_0037.html new file mode 100644 index 00000000..9e3396f7 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0037.html @@ -0,0 +1,167 @@ + + +

Granting Users with the Queue Usage Permission

+

Function

This API is used to share a specific queue with other users. You can grant users with the permission to use the specified queue or revoke the permission.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

queue_name

+

Yes

+

String

+

Name of a queue.

+

user_name

+

Yes

+

String

+

Name of the user who is granted with usage permission on a queue or whose queue usage permission is revoked or updated.

+

action

+

Yes

+

String

+

Grants or revokes the permission. The parameter value can be grant, revoke, or update. Users can perform the update operation only when they have been granted with the grant and revoke permissions.

+
  • grant: Indicates to grant users with permissions.
  • revoke: Indicates to revoke permissions.
  • update: Indicates to clear all the original permissions and assign the permissions in the provided permission array.
+

privileges

+

Yes

+

Array of Strings

+

List of permissions to be granted, revoked, or updated. The following permissions are supported:

+
  • SUBMIT_JOB: indicates to submit a job.
  • CANCEL_JOB: indicates to cancel a job.
  • DROP_QUEUE: indicates to a delete a queue.
  • GRANT_PRIVILEGE: indicates to assign a permission.
  • REVOKE_PRIVILEGE: indicates to revoke a permission.
  • SHOW_PRIVILEGE: indicates to view other user's permissions.
  • RESTART: indicates to restart the queue.
  • SCALE_QUEUE: indicates to change the queue specifications.
    NOTE:

    If the update list is empty, all permissions of the queue granted to the user are revoked.

    +
    +
+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+
+
+
+

Example Request

{
+    "queue_name": "queue1",
+    "user_name": "tenant2",
+    "action": "grant",
+    "privileges" : ["DROP_QUEUE", "SUBMIT_JOB"]
+}
+
+

Example Response

{
+  "is_success": true,
+  "message": ""
+}
+
+

Status Codes

Table 4 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Codes

+

Description

+

200

+

Authorization succeeds.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0038.html b/docs/dli/api-ref/dli_02_0038.html new file mode 100644 index 00000000..43f910c1 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0038.html @@ -0,0 +1,227 @@ + + +

Querying Queue Users

+

Function

This API is used to query names of all users who can use a specified queue.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

queue_name

+

No

+

String

+

Name of a queue.

+

privileges

+

No

+

Array of Object

+

Users who are granted with the permission to use this queue and the permission array to which users belong.

+

For details, see Table 4.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + +
Table 4 privileges parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_admin

+

No

+

Boolean

+

Whether the database user is an administrator.

+

user_name

+

No

+

String

+

Name of the user who has permission on the current queue.

+

privileges

+

No

+

Array of Strings

+

Permission of the user on the queue.

+
+
+
+

Example Request

None

+
+

Example Response

{
+  "is_success": true,
+  "message": "",
+  "privileges": [
+    {
+      "is_admin": true,
+      "privileges": [
+        "ALL"
+      ],
+      "user_name": "tenant1"
+    },
+    {
+      "is_admin": false,
+      "privileges": [
+        "SUBMIT_JOB"
+      ],
+      "user_name": "user2"
+    }
+  ],
+  "queue_name": "queue1"
+}
+
+

Status Codes

Table 5 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 5 Status codes

Status Code

+

Description

+

200

+

Authorization succeeds.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0039.html b/docs/dli/api-ref/dli_02_0039.html new file mode 100644 index 00000000..5f6f0710 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0039.html @@ -0,0 +1,210 @@ + + +

Granting Data Permission to Users

+

Function

This API is used to grant database or table data usage permission to specified users.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

user_name

+

Yes

+

String

+

Name of the user who is granted with usage permission on a queue or whose queue usage permission is revoked or updated.

+

action

+

Yes

+

String

+

Grants or revokes the permission. The parameter value can be grant, revoke, or update.

+
  • grant: Indicates to grant users with permissions.
  • revoke: Indicates to revoke permissions.
  • update: Indicates to clear all the original permissions and assign the permissions in the provided permission array.
+
NOTE:

Users can perform the update operation only when they have been granted with the grant and revoke permissions.

+
+

privileges

+

Yes

+

Array of Objects

+

Permission granting information. For details, see Table 3.

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 3 privileges parameters

Parameter

+

Mandatory

+

Type

+

Description

+

object

+

Yes

+

String

+

Data objects to be assigned. If they are named:

+
  • databases.Database name, data in the entire database will be shared.
  • databases.Database name.tables.Table name, data in the specified table will be shared.
  • databases.Database name.tables.Table name.columns.Column name, data in the specified column will be shared.
  • jobs.flink.Flink job ID, data the specified job will be shared.
  • groups. Package group name, data in the specified package group will be shared.
  • resources. Package name, data in the specified package will be shared.
+

privileges

+

Yes

+

Array of Strings

+

List of permissions to be granted, revoked, or updated.

+
NOTE:

If Action is Update and the update list is empty, all permissions of the user in the database or table are revoked.

+
+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + +
Table 4 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+
+
+
+

Example Request

{
+  "user_name": "user2",
+  "action": "grant",
+  "privileges": [
+    {
+      "object": "databases.db1.tables.tb2.columns.column1",
+      "privileges": [
+        "SELECT"
+      ]
+    },
+    {
+      "object": "databases.db1.tables.tbl",
+      "privileges": [
+        "DROP_TABLE"
+      ]
+    },
+    {
+      "object": "databases.db1",
+      "privileges": [
+        "SELECT"
+      ]
+    }
+  ]
+}
+
+

Example Response

{
+  "is_success": true,
+  "message": "" 
+}
+
+

Status Codes

Table 5 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 5 Status codes

Status Code

+

Description

+

200

+

Authorization succeeds.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0040.html b/docs/dli/api-ref/dli_02_0040.html new file mode 100644 index 00000000..f752fa9a --- /dev/null +++ b/docs/dli/api-ref/dli_02_0040.html @@ -0,0 +1,168 @@ + + +

Querying Database Users

+

Function

This API is used query names of all users who have permission to use or access the database.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

database_name

+

No

+

String

+

Name of the database to be queried.

+

privileges

+

No

+

Array of objects

+

Permission information. For details, see Table 3.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + +
Table 3 privileges parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_admin

+

No

+

Boolean

+

Whether the database user is an administrator.

+

user_name

+

No

+

String

+

Name of the user who has permission on the current database.

+

privileges

+

No

+

Array of Strings

+

Permission of the user on the database.

+
+
+
+

Example Request

None

+
+

Example Response

{
+  "is_success": true,
+  "message": "",
+  "database_name": "dsstest",
+  "privileges": [
+    {
+      "is_admin": true,
+      "privileges": [
+        "ALL"
+      ],
+      "user_name": "test"
+    },
+    {
+      "is_admin": false,
+      "privileges": [
+        "ALTER_TABLE_ADD_PARTITION"
+      ],
+      "user_name": "scuser1"
+    },
+    {
+      "is_admin": false,
+      "privileges": [
+        "CREATE_TABLE"
+      ],
+      "user_name": "scuser2"
+    }
+  ]
+}
+

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0041.html b/docs/dli/api-ref/dli_02_0041.html new file mode 100644 index 00000000..2d1a2a9c --- /dev/null +++ b/docs/dli/api-ref/dli_02_0041.html @@ -0,0 +1,170 @@ + + +

Querying Table Users

+

Function

This API is used to query users who have permission to access the specified table or column in the table.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 2 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

privileges

+

No

+

Array <Objects>

+

Permission information. For details, see Table 3.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 privileges parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_admin

+

No

+

Boolean

+

Whether the table user is an administrator.

+

object

+

No

+

String

+

Objects on which a user has permission.

+
  • If the object is in the format of databases.Database name.tables.Table name, the user has permission on the database.
  • If the object is in the format of databases.Database name.tables.Table namecolumns.Column name, the user has permission on the table.
+

privileges

+

No

+

Array<String>

+

Permission of the user on the object.

+

user_name

+

No

+

String

+

Name of the user who has the permission.

+
+
+
+

Example Request

None

+
+

Example Response

{
+  "is_success": true,
+  "message": "",
+  "privileges": [
+    {
+      "is_admin": false,
+      "object": "databases.dsstest.tables.csv_par_table",
+      "privileges": [
+        "SELECT"
+      ],
+      "user_name": "tent2"
+    },
+    {
+      "is_admin": true,
+      "object": "databases.dsstest.tables.csv_par_table",
+      "privileges": [
+        "ALL"
+      ],
+      "user_name": "tent4"
+    }
+  ]
+}
+

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0042.html b/docs/dli/api-ref/dli_02_0042.html new file mode 100644 index 00000000..d8361aef --- /dev/null +++ b/docs/dli/api-ref/dli_02_0042.html @@ -0,0 +1,202 @@ + + +

Querying a User's Table Permissions

+

Function

This API is used to query the permission of a specified user on a table.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

user_name

+

No

+

String

+

Name of the user whose permission is to be queried.

+

privileges

+

No

+

Array Of objects

+

Permission information. For details, see Table 3.

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 3 privileges parameters

Parameter

+

Mandatory

+

Type

+

Description

+

object

+

No

+

String

+

Objects on which a user has permission.

+
  • If the object is in the format of databases.Database name.tables.Table name, the user has permission on the database.
  • If the object is in the format of databases.Database name.tables.Table namecolumns.Column name, the user has permission on the table.
+

privileges

+

No

+

Array of Strings

+

Permission of the user on a specified object.

+
+
+
+

Example Request

None

+
+

Example Response

{
+  "is_success": true,
+  "message": "",
+  "privileges": [
+    {
+      "object": "databases.dsstest.tables.obs_2312",
+      "privileges": [
+        "DESCRIBE_TABLE"
+      ]
+    },
+    {
+      "object": "databases.dsstest.tables.obs_2312.columns.id",
+      "privileges": [
+        "SELECT"
+      ]
+    }
+  ],
+  "user_name": "scuser1"
+}
+
+

Status Codes

Table 4 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

Authorization succeeds.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0056.html b/docs/dli/api-ref/dli_02_0056.html new file mode 100644 index 00000000..cd3eff05 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0056.html @@ -0,0 +1,210 @@ + + +

Error Code

+

If an error occurs in API calling, no result is returned. Identify the cause of error based on the error codes of each API. If an error occurs in API calling, HTTP status code 4xx or 5xx is returned. The response body contains the specific error code and information. If you are unable to identify the cause of an error, contact technical personnel and provide the error code so that we can help you solve the problem as soon as possible.

+

Format of an Error Response Body

If an error occurs during API calling, the system returns an error code and a message to you. The following shows the format of an error response body:

+
{
+    "error_msg": "The format of message is error",
+    "error_code": "DLI.0001"
+}
+

In the preceding information, error_code is an error code, and error_msg describes the error.

+ +
+ + + + + + + + + + + + + +
Table 1 Exceptions

Parameter

+

Parameter Type

+

Description

+

error_code

+

String

+

Error code. For details, see Table 2.

+

error_msg

+

String

+

Error details.

+
+
+
+

Error Code Description

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Error codes

Status Code

+

Error Code

+

Error Message

+

400

+

DLI.0001

+

Parameter check errors occur.

+

400

+

DLI.0002

+

The object does not exist.

+

400

+

DLI.0003

+

SQL permission verification fails.

+

400

+

DLI.0004

+

SQL syntax parsing errors occur.

+

400

+

DLI.0005

+

SQL semantics parsing errors occur.

+

400

+

DLI.0006

+

The object exists.

+

400

+

DLI.0007

+

The operation is not supported.

+

400

+

DLI.0008

+

Metadata errors occur.

+

400

+

DLI.0009

+

System restrictions.

+

400

+

DLI.0011

+

The file permission check fails.

+

400

+

DLI.0012

+

Resource objects are unavailable.

+

401

+

DLI.0013

+

User authentication errors occur.

+

401

+

DLI.0014

+

Service authentication errors occur.

+

400

+

DLI.0015

+

Token parsing error.

+

400

+

DLI.0016

+

The identity and role are incorrect.

+

400

+

DLI.0018

+

Data conversion errors occur.

+

400

+

DLI.0019

+

The task times out.

+

400

+

DLI.0100

+

The result expires.

+

404

+

DLI.0023

+

No related resources were found.

+

400

+

DLI.0999

+

Server-side errors occur.

+

400

+

DLI.1028

+

The quota is insufficient.

+
+
+
+

Example

If no queue named testqueue exists, the following error message is displayed when you submit a job submission request:

+
{
+  "error_code": "DLI.0002",
+  "error_msg": "There is no queue named testqueue"
+}
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0102.html b/docs/dli/api-ref/dli_02_0102.html new file mode 100644 index 00000000..72f66cd8 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0102.html @@ -0,0 +1,365 @@ + + +

Submitting a SQL Job (Recommended)

+

Function

This API is used to submit jobs to a queue using SQL statements.

+

The job types support DDL, DCL, IMPORT, QUERY, and INSERT. The IMPORT function is the same as that described in Importing Data. The difference lies in the implementation method.

+

Additionally, you can use other APIs to query and manage jobs. For details, see the following sections:

+ +

This API is synchronous if job_type in the response message is DCL.

+
+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

sql

+

Yes

+

String

+

SQL statement that you want to execute.

+

currentdb

+

No

+

String

+

Database where the SQL statement is executed. This parameter does not need to be configured during database creation.

+

queue_name

+

No

+

String

+

Name of the queue to which a job to be submitted belongs. The name can contain only digits, letters, and underscores (_), but cannot contain only digits or start with an underscore (_).

+

conf

+

No

+

Array of Strings

+

You can set the configuration parameters for the SQL job in the form of Key/Value. For details about the supported configuration items, see Table 3.

+

tags

+

No

+

Array of Objects

+

Label of a job. For details, see Table 4.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Configuration parameters description

Parameter

+

Default Value

+

Description

+

spark.sql.files.maxRecordsPerFile

+

0

+

Maximum number of records to be written into a single file. If the value is zero or negative, there is no limit.

+

spark.sql.autoBroadcastJoinThreshold

+

209715200

+

Maximum size of the table that displays all working nodes when a connection is executed. You can set this parameter to -1 to disable the display.

+
NOTE:

Currently, only the configuration unit metastore table that runs the ANALYZE TABLE COMPUTE statistics noscan command and the file-based data source table that directly calculates statistics based on data files are supported.

+
+

spark.sql.shuffle.partitions

+

200

+

Default number of partitions used to filter data for join or aggregation.

+

spark.sql.dynamicPartitionOverwrite.enabled

+

false

+

Whether DLI overwrites the partitions where data will be written into during runtime. If you set this parameter to false, all partitions that meet the specified condition will be deleted before data overwrite starts. For example, if you set false and use INSERT OVERWRITE to write partition 2021-02 to a partitioned table that has the 2021-01 partition, this partition will be deleted.

+

If you set this parameter to true, DLI does not delete partitions before overwrite starts.

+

spark.sql.files.maxPartitionBytes

+

134217728

+

Maximum number of bytes to be packed into a single partition when a file is read.

+

spark.sql.badRecordsPath

+

-

+

Path of bad records.

+

dli.sql.sqlasync.enabled

+

false

+

Indicates whether DDL and DCL statements are executed asynchronously. The value true indicates that asynchronous execution is enabled.

+

dli.sql.job.timeout

+

-

+

Sets the job running timeout interval. If the timeout interval expires, the job is canceled. Unit: second

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 4 tags parameters

Parameter

+

Mandatory

+

Type

+

Description

+

key

+

Yes

+

String

+

Tag key.

+

value

+

Yes

+

String

+

Tag value

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 5 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

Yes

+

Boolean

+

Indicates whether the request is successfully sent. Value true indicates that the request is successfully sent.

+

message

+

Yes

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

job_id

+

Yes

+

String

+

ID of a job returned after a job is generated and submitted by using SQL statements. The job ID can be used to query the job status and results.

+

job_type

+

Yes

+

String

+

Type of a job. Job types include the following:

+
  • DDL
  • DCL
  • IMPORT
  • EXPORT
  • QUERY
  • INSERT
+

schema

+

No

+

Array of objects

+

If the statement type is DDL, the column name and type of DDL are displayed.

+

rows

+

No

+

Array of objects

+

When the statement type is DDL, results of the DDL are displayed.

+

job_mode

+

No

+

String

+

Job execution mode. The options are as follows:

+
  • async: asynchronous
  • sync: synchronous
+
+
+
+

Example Request

{
+    "currentdb": "db1",
+    "sql": "desc table1",
+    "queue_name": "default",
+    "conf": [
+        "dli.sql.shuffle.partitions = 200"
+    ],
+    "tags": [
+            {
+              "key": "workspace",
+              "value": "space1"
+             },
+            {
+              "key": "jobName",
+              "value": "name1"
+             }
+      ]
+}
+
+

Example Response

{
+  "is_success": true,
+  "message": "",
+  "job_id": "8ecb0777-9c70-4529-9935-29ea0946039c",
+  "job_type": "DDL",
+  "job_mode":"sync",
+  "schema": [
+    {
+      "col_name": "string"
+    },
+    {
+      "data_type": "string"
+    },
+    {
+      "comment": "string"
+    }
+  ],
+  "rows": [
+    [
+      "c1",
+      "int",
+      null
+    ],
+    [
+      "c2",
+      "string",
+      null
+    ]
+  ]
+}
+
+

Status Codes

Table 6 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 6 Status codes

Status Code

+

Description

+

200

+

Submitted successfully.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0104.html b/docs/dli/api-ref/dli_02_0104.html new file mode 100644 index 00000000..ea35b7c6 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0104.html @@ -0,0 +1,118 @@ + + +

Canceling a Job (Recommended)

+

Function

This API is used to cancel a submitted job. If execution of a job completes or fails, this job cannot be canceled.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + +
Table 2 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+
+
+
+

Example Request

None

+
+

Example Response

{
+  "is_success": true,
+  "message": ""
+}
+
+

Status Codes

Table 3 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 3 Status codes

Status Code

+

Description

+

200

+

Canceled.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0105.html b/docs/dli/api-ref/dli_02_0105.html new file mode 100644 index 00000000..870b8382 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0105.html @@ -0,0 +1,370 @@ + + +

Querying All Tables (Recommended)

+

Function

This API is used to query information about tables that meet the filtering criteria or all the tables in the specified database.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

Yes

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

Yes

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

table_count

+

Yes

+

Integer

+

Total number of tables.

+

tables

+

Yes

+

Array of Objects

+

Table information. For details, see Table 4.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4 tables parameters

Parameter

+

Mandatory

+

Type

+

Description

+

create_time

+

Yes

+

Long

+

Time when a table is created. The timestamp is expressed in milliseconds.

+

data_type

+

No

+

String

+

Type of the data to be added to the OBS table. The options are as follows: Parquet, ORC, CSV, JSON, and Avro.

+
NOTE:

This parameter is available only for OBS tables.

+
+

data_location

+

Yes

+

String

+

Data storage location, which can be DLI or OBS.

+

last_access_time

+

Yes

+

Long

+

Time when the table was last updated. The timestamp is expressed in milliseconds.

+

location

+

No

+

String

+

Storage path on the OBS table.

+
NOTE:

This parameter is available only for OBS tables.

+
+

owner

+

Yes

+

String

+

Table owner.

+

table_name

+

Yes

+

String

+

Name of a table.

+

table_size

+

Yes

+

Long

+

Size of a DLI table. Set parameter to 0 for non-DLI tables. The unit is byte.

+

table_type

+

Yes

+

String

+

Type of a table.

+
  • EXTERNAL: Indicates an OBS table.
  • MANAGED: Indicates a DLI table.
  • VIEW: Indicates a view.
+

partition_columns

+

No

+

String

+

Partition field. This parameter is valid only for OBS partition tables.

+

page-size

+

No

+

Integer

+

Paging size. The minimum value is 1 and the maximum value is 100.

+

current-page

+

No

+

Integer

+

Current page number. The minimum value is 1.

+
+
+

If with-detail is set to false in the URI, only values of tables-related parameters data_location, table_name, and table_type are returned.

+
+
+

Example Request

None

+
+

Example Response

{
+  "is_success": true,
+  "message": "",
+  "table_count": 1,
+  "tables": [
+    { "create_time":1517364268000,
+      "data_location":"OBS",
+      "data_type":"csv",
+      "last_access_time":1517364268000,
+      "location":"obs://DLI/sqldata/data.txt",
+      "owner":"test",     
+      "partition_columns": ["a0"],
+      "table_name":"obs_t",
+      "table_size":0,
+      "table_type":"EXTERNAL"
+    }
+  ]
+}
+
+

Status Codes

Table 5 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 5 Status codes

Status Code

+

Description

+

200

+

The query is successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0107.html b/docs/dli/api-ref/dli_02_0107.html new file mode 100644 index 00000000..34f1b759 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0107.html @@ -0,0 +1,155 @@ + + +

Checking SQL Syntax

+

Function

This API is used to check the SQL syntax.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

sql

+

Yes

+

String

+

SQL statement that you want to execute.

+

currentdb

+

No

+

String

+

Database where the SQL statement is executed.

+
NOTE:
  • If the SQL statement contains db_name, for example, select * from db1.t1, you do not need to set this parameter.
  • If the SQL statement does not contain db_name, the semantics check will fail when you do not set this parameter or set this parameter to an incorrect value.
+
+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

job_type

+

No

+

String

+

Type of a job. Job types include the following: DDL, DCL, IMPORT, EXPORT, QUERY, and INSERT.

+
+
+
+

Example Request

{
+   "currentdb": "db1",
+   "sql": "select * from t1"   
+}
+
+

Example Response

{
+  "is_success": true,
+  "message": "the sql is ok",
+  "job_type":"QUERY"
+}
+
+

Status Codes

Table 4 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

The request is successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0108.html b/docs/dli/api-ref/dli_02_0108.html new file mode 100644 index 00000000..1203e008 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0108.html @@ -0,0 +1,197 @@ + + +

Previewing Table Content

+

Function

This API is used to preview the first ten rows of a table.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

schema

+

No

+

Array of objects

+

Column name and type of a table.

+

rows

+

No

+

Array of objects

+

Previewed table content.

+
+
+
+

Example Request

None

+
+

Example Response

The following is an example of a successful response in synchronous mode:
{
+       "is_success": true,
+       "message": "",
+       "schema": [    
+           {      
+             "id": "int"
+           },
+           {      
+             "name": "string"
+           },
+           {      
+             "address": "string"
+           }  
+        ],
+        "rows": [
+           [
+               "1",
+               "John",
+               "xxx"
+           ],
+           [
+               "2",
+               "Lily",
+               "xxx"
+           ]
+       ]
+    }
+

In asynchronous request mode, a job ID is returned. You can obtain the preview information based on the job ID.

+
+
+
+

Status Codes

Table 4 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

The query is successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0109.html b/docs/dli/api-ref/dli_02_0109.html new file mode 100644 index 00000000..4bc8c982 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0109.html @@ -0,0 +1,11 @@ + + +

APIs Related to Spark jobs

+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0124.html b/docs/dli/api-ref/dli_02_0124.html new file mode 100644 index 00000000..deb627ac --- /dev/null +++ b/docs/dli/api-ref/dli_02_0124.html @@ -0,0 +1,650 @@ + + +

Creating a Batch Processing Job

+

Function

This API is used to create a batch processing job in a queue.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

file

+

Yes

+

String

+

Name of the package that is of the JAR or pyFile type and has been uploaded to the DLI resource management system. You can also specify an OBS path, for example, obs://Bucket name/Package name.

+

class_name

+

Yes

+

String

+

Java/Spark main class of the batch processing job.

+

queue

+

No

+

String

+

Queue name. Set this parameter to the name of the created DLI queue. The queue must be of the general-purpose type.

+
NOTE:
  • This parameter is compatible with the cluster_name parameter. That is, if cluster_name is used to specify a queue, the queue is still valid.
  • You are advised to use the queue parameter. The queue and cluster_name parameters cannot coexist.
+
+

cluster_name

+

No

+

String

+

Queue name. Set this parameter to the created DLI queue name.

+
NOTE:

You are advised to use the queue parameter. The queue and cluster_name parameters cannot coexist.

+
+

args

+

No

+

Array of Strings

+

Input parameters of the main class, that is, application parameters.

+

sc_type

+

No

+

String

+

Compute resource type. Currently, resource types A, B, and C are available. If this parameter is not specified, the minimum configuration (type A) is used. For details about resource types, see Table 3.

+

jars

+

No

+

Array of Strings

+

Name of the package that is of the JAR type and has been uploaded to the DLI resource management system. You can also specify an OBS path, for example, obs://Bucket name/Package name.

+

python_files

+

No

+

Array of Strings

+

Name of the package that is of the PyFile type and has been uploaded to the DLI resource management system. You can also specify an OBS path, for example, obs://Bucket name/Package name.

+

files

+

No

+

Array of Strings

+

Name of the package that is of the file type and has been uploaded to the DLI resource management system. You can also specify an OBS path, for example, obs://Bucket name/Package name.

+

modules

+

No

+

Array of Strings

+

Name of the dependent system resource module. You can view the module name using the API related to Querying Resource Packages in a Group.

+
DLI provides dependencies for executing datasource jobs. The following table lists the dependency modules corresponding to different services.
  • CloudTable/MRS HBase: sys.datasource.hbase
  • CloudTable/MRS OpenTSDB: sys.datasource.opentsdb
  • RDS MySQL: sys.datasource.rds
  • RDS Postgre: preset
  • DWS: preset
  • CSS: sys.datasource.css
+
+

resources

+

No

+

Array of Objects

+

JSON object list, including the name and type of the JSON package that has been uploaded to the queue. For details, see Table 4.

+

groups

+

No

+

Array of Objects

+

JSON object list, including the package group resource. For details about the format, see the request example. If the type of the name in resources is not verified, the package with the name exists in the group. For details, see Table 5.

+

conf

+

No

+

Array of Objects

+

Batch configuration item.

+

name

+

No

+

String

+

Batch processing task name. The value contains a maximum of 128 characters.

+

driver_memory

+

No

+

String

+

Driver memory of the Spark application, for example, 2 GB and 2048 MB. This configuration item replaces the default parameter in sc_type. The unit must be provided. Otherwise, the startup fails.

+

driver_cores

+

No

+

Integer

+

Number of CPU cores of the Spark application driver. This configuration item replaces the default parameter in sc_type.

+

executor_memory

+

No

+

String

+

Executor memory of the Spark application, for example, 2 GB and 2048 MB. This configuration item replaces the default parameter in sc_type. The unit must be provided. Otherwise, the startup fails.

+

executor_cores

+

No

+

Integer

+

Number of CPU cores of each Executor in the Spark application. This configuration item replaces the default parameter in sc_type.

+

num_executors

+

No

+

Integer

+

Number of Executors in a Spark application. This configuration item replaces the default parameter in sc_type.

+

obs_bucket

+

No

+

String

+

OBS bucket for storing the Spark jobs. Set this parameter when you need to save jobs.

+

auto_recovery

+

No

+

Boolean

+

Whether to enable the retry function. If enabled, Spark jobs will be automatically retried after an exception occurs. The default value is false.

+

max_retry_times

+

No

+

Integer

+

Maximum retry times. The maximum value is 100, and the default value is 20.

+

catalog_name

+

No

+

String

+

To access metadata, set this parameter to dli.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Resource types

Resource Type

+

Physical Resource

+

driverCores

+

executorCores

+

driverMemory

+

executorMemory

+

numExecutor

+

A

+

8 vCPUs, 32-GB memory

+

2

+

1

+

7 GB

+

4 GB

+

6

+

B

+

16 vCPUs, 64-GB memory

+

2

+

2

+

7 GB

+

8 GB

+

7

+

C

+

32 vCPUs, 128-GB memory

+

4

+

2

+

15 GB

+

8 GB

+

14

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 4 resources parameters

Parameter

+

Mandatory

+

Type

+

Description

+

name

+

No

+

String

+

Resource name You can also specify an OBS path, for example, obs://Bucket name/Package name.

+

type

+

No

+

String

+

Resource type.

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 5 groups parameters

Parameter

+

Mandatory

+

Type

+

Description

+

name

+

No

+

String

+

User group name

+

resources

+

No

+

Array of Objects

+

User group resource For details, see Table 4.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 6 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

id

+

No

+

String

+

ID of a batch processing job.

+

appId

+

No

+

String

+

Back-end application ID of a batch processing job.

+

name

+

No

+

String

+

Batch processing task name. The value contains a maximum of 128 characters.

+

owner

+

No

+

String

+

Owner of a batch processing job.

+

proxyUser

+

No

+

String

+

Proxy user (resource tenant) to which a batch processing job belongs.

+

state

+

No

+

String

+

Status of a batch processing job. For details, see Table 7.

+

kind

+

No

+

String

+

Type of a batch processing job. Only Spark parameters are supported.

+

log

+

No

+

Array of strings

+

Last 10 records of the current batch processing job.

+

sc_type

+

No

+

String

+

Type of a computing resource. If the computing resource type is customized, value CUSTOMIZED is returned.

+

cluster_name

+

No

+

String

+

Queue where a batch processing job is located.

+

queue

+

Yes

+

String

+

Queue name. Set this parameter to the name of the created DLI queue.

+
NOTE:
  • This parameter is compatible with the cluster_name parameter. That is, if cluster_name is used to specify a queue, the queue is still valid.
  • You are advised to use the queue parameter. The queue and cluster_name parameters cannot coexist.
+
+

create_time

+

No

+

Long

+

Time when a batch processing job is created. The timestamp is expressed in milliseconds.

+

update_time

+

No

+

Long

+

Time when a batch processing job is updated. The timestamp is expressed in milliseconds.

+

duration

+

No

+

Long

+

Job running duration (unit: millisecond)

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + +
Table 7 Batch processing job statuses

Parameter

+

Type

+

Description

+

starting

+

String

+

The batch processing job is being started.

+

running

+

String

+

The batch processing job is executing a task.

+

dead

+

String

+

The batch processing job has exited.

+

success

+

String

+

The batch processing job is successfully executed.

+

recovering

+

String

+

The batch processing job is being restored.

+
+
+
+

Example Request

{
+    "file": "batchTest/spark-examples_2.11-2.1.0.luxor.jar",
+    "class_name": "org.apache.spark.examples.SparkPi",
+    "sc_type": "A",
+    "jars": ["demo-1.0.0.jar"],
+    "files": ["count.txt"],
+    "resources":[
+                   {"name": "groupTest/testJar.jar", "type": "jar"},
+                   {"name": "kafka-clients-0.10.0.0.jar", "type": "jar"}],
+    "groups": [
+                   {"name": "groupTestJar", "resources": [{"name": "testJar.jar", "type": "jar"}, {"name": "testJar1.jar", "type": "jar"}]}, 
+                   {"name": "batchTest", "resources":  [{"name": "luxor.jar", "type": "jar"}]}],
+    "queue": " test",
+    "name": "TestDemo4"
+    
+    
+}
+

The batchTest/spark-examples_2.11-2.1.0.luxor.jar file has been uploaded through API involved in Uploading a Package Group.

+
+
+

Example Response

{
+  "id": "07a3e4e6-9a28-4e92-8d3f-9c538621a166",
+  "appId": "",
+  "name": "",
+  "owner": "test1",
+  "proxyUser": "",
+  "state": "starting",
+  "kind": "",
+  "log": [],
+  "sc_type": "CUSTOMIZED",
+  "cluster_name": "aaa",
+  "queue": "aaa",
+  "create_time": 1607589874156,
+  "update_time": 1607589874156
+}
+
+

Status Codes

Table 8 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 8 Status code

Status Code

+

Description

+

200

+

The job is created successfully.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0125.html b/docs/dli/api-ref/dli_02_0125.html new file mode 100644 index 00000000..3b01be61 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0125.html @@ -0,0 +1,378 @@ + + +

Obtaining the List of Batch Processing Jobs

+

Function

This API is used to obtain the list of batch processing jobs in a queue of a project.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameter description

Parameter

+

Mandatory

+

Type

+

Description

+

from

+

No

+

Integer

+

Index number of the start batch processing job.

+

total

+

No

+

Integer

+

Total number of batch processing jobs.

+

sessions

+

No

+

Array of objects

+

Batch job information. For details, see Table 6 in Creating a Batch Processing Job.

+

create_time

+

No

+

Long

+

Time when a batch processing job is created.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4 sessions parameters

Parameter

+

Mandatory

+

Type

+

Description

+

duration

+

No

+

Long

+

Job running duration (unit: millisecond)

+

id

+

No

+

String

+

ID of a batch processing job.

+

state

+

No

+

String

+

Status of a batch processing job

+

appId

+

No

+

String

+

Back-end application ID of a batch processing job

+

log

+

No

+

Array of Strings

+

Last 10 records of the current batch processing job

+

sc_type

+

No

+

String

+

Type of a computing resource. If the computing resource type is customized, value CUSTOMIZED is returned.

+

cluster_name

+

No

+

String

+

Queue where a batch processing job is located.

+

create_time

+

No

+

Long

+

Time when a batch processing job is created. The timestamp is in milliseconds.

+

name

+

No

+

String

+

Name of a batch processing job.

+

owner

+

No

+

String

+

Owner of a batch processing job.

+

proxyUser

+

No

+

String

+

Proxy user (resource tenant) to which a batch processing job belongs.

+

kind

+

No

+

String

+

Type of a batch processing job. Only Spark parameters are supported.

+

queue

+

No

+

String

+

Queue where a batch processing job is located.

+

image

+

No

+

String

+

Custom image. The format is Organization name/Image name:Image version.

+

This parameter is valid only when feature is set to custom. You can use this parameter with the feature parameter to specify a user-defined Spark image for job running.

+

update_time

+

No

+

Long

+

Time when a batch processing job is updated. The timestamp is in milliseconds.

+
+
+
+

Example Request

None

+
+

Example Response

{
+    "from": 0,
+    "total": 1,
+    "sessions": [
+        {
+            "id": "178fa687-2e8a-41ed-a439-b00de60bb176",
+            "state": "dead",
+            "appId": null,
+            "log": [
+             "stdout: ",
+             "stderr: ",
+             "YARN Diagnostics: "
+           ],
+           "sc_type": "A",
+           "cluster_name": "test",
+           "create_time": 1531906043036
+        }
+    ]
+}
+
+

Status Codes

Table 5 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 5 Status codes

Status Code

+

Description

+

200

+

The query is successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0126.html b/docs/dli/api-ref/dli_02_0126.html new file mode 100644 index 00000000..455eeb7a --- /dev/null +++ b/docs/dli/api-ref/dli_02_0126.html @@ -0,0 +1,232 @@ + + +

Querying Batch Job Details

+

Function

This API is used to query details about a batch processing job based on the job ID.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

id

+

No

+

String

+

ID of a batch processing job.

+

appId

+

No

+

String

+

Back-end application ID of a batch processing job.

+

name

+

No

+

String

+

Name of a batch processing job.

+

owner

+

No

+

String

+

Owner of a batch processing job.

+

proxyUser

+

No

+

String

+

Proxy user (resource tenant) to which a batch processing job belongs.

+

state

+

No

+

String

+

Status of a batch processing job. For details, see Table 7 in Creating a Batch Processing Job.

+

kind

+

No

+

String

+

Type of a batch processing job. Only Spark parameters are supported.

+

log

+

No

+

Array of Strings

+

Last 10 records of the current batch processing job.

+

sc_type

+

No

+

String

+

Type of a computing resource. If the computing resource type is customized, value CUSTOMIZED is returned.

+

cluster_name

+

No

+

String

+

Queue where a batch processing job is located.

+

queue

+

No

+

String

+

Queue where a batch processing job is located.

+

create_time

+

No

+

Long

+

Time when a batch processing job is created. The timestamp is expressed in milliseconds.

+

update_time

+

No

+

Long

+

Time when a batch processing job is updated. The timestamp is expressed in milliseconds.

+
+
+
+

Example Request

None

+
+

Example Response

{
+    "id": "0a324461-d9d9-45da-a52a-3b3c7a3d809e",
+    "appId": "",
+    "name": "",
+    "owner": "",
+    "proxyUser": "",
+    "state": "starting",
+    "kind":"",
+    "log": [
+           "stdout: ",
+            "stderr: ",
+            "YARN Diagnostics: "
+    ],
+    "sc_type": "A",
+    "cluster_name": "test",
+    "queue": "test",
+    "create_time": 1531906043036,
+    "update_time": 1531906043036
+}
+
+

Status Codes

Table 3 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 3 Status codes

Status Code

+

Description

+

200

+

The query is successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0127.html b/docs/dli/api-ref/dli_02_0127.html new file mode 100644 index 00000000..dab2a612 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0127.html @@ -0,0 +1,109 @@ + + +

Querying a Batch Job Status

+

Function

This API is used to obtain the execution status of a batch processing job.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + +
Table 2 Response parameter description

Parameter

+

Mandatory

+

Type

+

Description

+

id

+

No

+

String

+

ID of a batch processing job, which is in the universal unique identifier (UUID) format.

+

state

+

No

+

String

+

Status of a batch processing job. For details, see Table 7 in Creating a Batch Processing Job.

+
+
+
+

Example Request

None

+
+

Example Response

{"id":"0a324461-d9d9-45da-a52a-3b3c7a3d809e","state":"Success"}
+
+

Status Codes

Table 3 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 3 Status codes

Status Code

+

Description

+

200

+

The query is successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0128.html b/docs/dli/api-ref/dli_02_0128.html new file mode 100644 index 00000000..ad08f5e3 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0128.html @@ -0,0 +1,190 @@ + + +

Querying Batch Job Logs

+

Function

This API is used to query the back-end logs of batch processing jobs.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

id

+

No

+

String

+

ID of a batch processing job.

+

from

+

No

+

String

+

Start index of a log.

+

total

+

No

+

Long

+

Total number of records in a log.

+

log

+

No

+

Array of Strings

+

Log of the current batch processing job.

+
+
+
+

Example Request

None

+
+

Example Response

{
+    "id": "0a324461-d9d9-45da-a52a-3b3c7a3d809e",
+    "from": 0,
+    "total": 3,
+    "log": [
+           "Detailed information about job logs"
+    ]
+}
+
+

Status Codes

Table 4 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

The query is successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0129.html b/docs/dli/api-ref/dli_02_0129.html new file mode 100644 index 00000000..49ef7fa5 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0129.html @@ -0,0 +1,110 @@ + + +

Canceling a Batch Processing Job

+

Function

This API is used to cancel a batch processing job.

+

Batch processing jobs in the Successful or Failed state cannot be canceled.

+
+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + +
Table 2 Response parameter

Parameter

+

Mandatory

+

Type

+

Description

+

msg

+

No

+

String

+

If the batch processing job is successfully canceled, value deleted is returned.

+
+
+
+

Example Request

None

+
+

Example Response

{
+    "msg": "deleted"
+}
+
+

Status Codes

Table 3 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 3 Status codes

Status Code

+

Description

+

200

+

Deletion succeeded.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0130.html b/docs/dli/api-ref/dli_02_0130.html new file mode 100644 index 00000000..c43c2ee5 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0130.html @@ -0,0 +1,375 @@ + + +

Uploading a Package Group

+

Function

This API is used to upload a package group to a project. The function is similar to creating a package on the management console.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

paths

+

Yes

+

Array of Strings

+

List of OBS object paths. The OBS object path refers to the OBS object URL.

+

kind

+

Yes

+

String

+

File type of a package group.

+
  • jar: JAR file
  • pyFile: User Python file
  • file: User file
  • modelFile: User AI model file
+
NOTE:

If the same group of packages to be uploaded contains different file types, select file as the type of the file to be uploaded.

+
+

group

+

Yes

+

String

+

Name of the group to be created.

+

is_async

+

No

+

Boolean

+

Whether to upload resource packages in asynchronous mode. The default value is false, indicating that the asynchronous mode is not used. You are advised to upload resource packages in asynchronous mode.

+

tags

+

No

+

Array of Objects

+

Resource tag. For details, see Table 3.

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 3 tags parameter

Parameter

+

Mandatory

+

Type

+

Description

+

key

+

Yes

+

String

+

Tag key.

+

value

+

Yes

+

String

+

Tag key.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

group_name

+

No

+

String

+

Group name.

+

status

+

No

+

String

+

Status of a package group to be uploaded.

+

resources

+

No

+

Array of strings

+

List of names of resource packages contained in the group.

+

details

+

No

+

Array of body

+

Details about a group resource package. For details, see Table 5.

+

create_time

+

No

+

Long

+

UNIX timestamp when a package group is uploaded.

+

update_time

+

No

+

Long

+

UNIX timestamp when a package group is updated.

+

is_async

+

No

+

Boolean

+

Whether to upload resource packages in asynchronous mode. The default value is false, indicating that the asynchronous mode is not used. You are advised to upload resource packages in asynchronous mode.

+

owner

+

No

+

String

+

Owner of a resource package.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 5 details parameter description

Parameter

+

Mandatory

+

Type

+

Description

+

create_time

+

Yes

+

Long

+

UNIX time when a resource package is uploaded. The timestamp is in milliseconds.

+

update_time

+

No

+

Long

+

UNIX time when the uploaded resource package is uploaded. The timestamp is in milliseconds.

+

resource_type

+

Yes

+

String

+

Resource type.

+

resource_name

+

No

+

String

+

Resource name.

+

status

+

No

+

String

+
  • Value UPLOADING indicates that the resource package group is being uploaded.
  • Value READY indicates that the resource package has been uploaded.
  • Value FAILED indicates that the resource package fails to be uploaded.
+

underlying_name

+

No

+

String

+

Name of the resource packages in a queue.

+

is_async

+

No

+

Boolean

+

Whether to upload resource packages in asynchronous mode. The default value is false, indicating that the asynchronous mode is not used. You are advised to upload resource packages in asynchronous mode.

+
+
+
+

Example Request

{
+    "paths": [
+        "https://xkftest.obs.xxx.com/txr_test/jars/spark-sdv-app.jar",
+        "https://xkftest.obs.xxx.com/txr_test/jars/wordcount",
+        "https://xkftest.obs.xxx.com/txr_test/jars/wordcount.py"
+    ],
+    "kind": "jar",
+    "group": "gatk",
+    "is_async":"true"
+}
+
+

Example Response

{
+    "group_name": "gatk",
+    "status": "READY",
+    "resources": [
+        "spark-sdv-app.jar",
+        "wordcount",
+        "wordcount.py"
+    ],
+    "details": [
+        {
+            "create_time": 0,
+            "update_time": 0,
+            "resource_type": "jar",
+            "resource_name": "spark-sdv-app.jar",
+            "status": "READY",
+            "underlying_name": "987e208d-d46e-4475-a8c0-a62f0275750b_spark-sdv-app.jar"
+        },
+        {
+            "create_time": 0,
+            "update_time": 0,
+            "resource_type": "jar",
+            "resource_name": "wordcount",
+            "status": "READY",
+            "underlying_name": "987e208d-d46e-4475-a8c0-a62f0275750b_wordcount"
+        },
+        {
+            "create_time": 0,
+            "update_time": 0,
+            "resource_type": "jar",
+            "resource_name": "wordcount.py",
+            "status": "READY",
+            "underlying_name": "987e208d-d46e-4475-a8c0-a62f0275750b_wordcount.py"
+        }
+    ],
+    "create_time": 1551334579654,
+    "update_time": 1551345369070
+}
+
+

Status Codes

Table 6 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 6 Status codes

Status Code

+

Description

+

201

+

The file is successfully uploaded.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0158.html b/docs/dli/api-ref/dli_02_0158.html new file mode 100644 index 00000000..b5b49d2e --- /dev/null +++ b/docs/dli/api-ref/dli_02_0158.html @@ -0,0 +1,16 @@ + + +

APIs Related to SQL Jobs

+

+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0162.html b/docs/dli/api-ref/dli_02_0162.html new file mode 100644 index 00000000..85a7b245 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0162.html @@ -0,0 +1,25 @@ + + +

Batch Processing-related APIs

+
+
+ + + +
+ diff --git a/docs/dli/api-ref/dli_02_0164.html b/docs/dli/api-ref/dli_02_0164.html new file mode 100644 index 00000000..4e9823d1 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0164.html @@ -0,0 +1,142 @@ + + +

Modifying a Database Owner

+

Function

This API is used to modify the owner of a database.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

new_owner

+

Yes

+

String

+

Name of the new owner. The new user must be a sub-user of the current tenant.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+
+
+
+

Example Request

{
+    "new_owner": "scuser1"
+}
+
+

Example Response

{
+  "is_success": true,
+  "message": ""
+}
+
+

Status Codes

Table 4 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

The modification operations are successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0166.html b/docs/dli/api-ref/dli_02_0166.html new file mode 100644 index 00000000..77f20e0b --- /dev/null +++ b/docs/dli/api-ref/dli_02_0166.html @@ -0,0 +1,25 @@ + + +

Package Group-related APIs

+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0168.html b/docs/dli/api-ref/dli_02_0168.html new file mode 100644 index 00000000..49f0631b --- /dev/null +++ b/docs/dli/api-ref/dli_02_0168.html @@ -0,0 +1,365 @@ + + +

Querying Package Group List

+

Function

This API is used to query all resources in a project, including groups.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

resources

+

No

+

Array of Objects

+

List of names of uploaded user resources. For details about resources, see Table 4.

+

modules

+

No

+

Array of Objects

+

List of built-in resource groups. For details about the groups, see Table 5.

+

groups

+

No

+

Array of Objects

+

Uploaded package groups of a user.

+

total

+

Yes

+

Integer

+

Total number of returned resource packages.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4 resources parameters

Parameter

+

Mandatory

+

Type

+

Description

+

create_time

+

No

+

Long

+

UNIX timestamp when a resource package is uploaded.

+

update_time

+

No

+

Long

+

UNIX timestamp when the uploaded resource package is uploaded.

+

resource_type

+

No

+

String

+

Resource type.

+

resource_name

+

No

+

String

+

Resource name.

+

status

+

No

+

String

+
  • Value UPLOADING indicates that the resource package is being uploaded.
  • Value READY indicates that the resource package has been uploaded.
  • Value FAILED indicates that the resource package fails to be uploaded.
+

underlying_name

+

No

+

String

+

Name of the resource package in the queue.

+

owner

+

No

+

String

+

Owner of the resource package.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 5 modules parameters

Parameter

+

Mandatory

+

Type

+

Description

+

module_name

+

No

+

String

+

Module name.

+

module_type

+

No

+

String

+

Module type.

+

status

+

No

+

String

+
  • Value UPLOADING indicates that the package group is being uploaded.
  • Value READY indicates that the package group has been uploaded.
  • Value FAILED indicates that the package group fails to be uploaded.
+

resources

+

No

+

Array of Strings

+

List of names of resource packages contained in the group.

+

description

+

No

+

String

+

Module description.

+

create_time

+

No

+

Long

+

UNIX timestamp when a package group is uploaded.

+

update_time

+

No

+

Long

+

UNIX timestamp when a package group is updated.

+
+
+
+

Example Request

None

+
+

Example Response

{
+    "resources": [
+        {
+            "create_time": 1521532893736,
+            "update_time": 1521552364503,
+            "resource_type": "jar",
+            "resource_name": "luxor-router-1.1.1.jar",
+            "status": "READY",
+            "underlying_name": "3efffb4f-40e9-455e-8b5a-a23b4d355e46_luxor-router-1.1.1.jar"
+        }
+    ],
+    "groups": [
+        {
+            "group_name": "groupTest",
+            "status": "READY",
+            "resources": [
+                "part-00000-9dfc17b1-2feb-45c5-b81d-bff533d6ed13.csv.gz",
+                "person.csv"
+            ],
+            "details": [
+                {
+                    "create_time": 1547090015132,
+                    "update_time": 1547090015132,
+                    "resource_type": "jar",
+                    "resource_name": "part-00000-9dfc17b1-2feb-45c5-b81d-bff533d6ed13.csv.gz",
+                    "status": "READY",
+                    "underlying_name": "db50c4dc-7187-4eb9-a5d0-73ba8102ea5e_part-00000-9dfc17b1-2feb-45c5-b81d-bff533d6ed13.csv.gz"
+                },
+                {
+                    "create_time": 1547091098668,
+                    "update_time": 1547091098668,
+                    "resource_type": "file",
+                    "resource_name": "person.csv",
+                    "status": "READY",
+                    "underlying_name": "a4243a8c-bca6-4e77-a968-1f3b00217474_person.csv"
+                }
+            ],
+            "create_time": 1547090015131,
+            "update_time": 1547091098666
+        }
+    ],
+    "modules": [
+        {
+            "module_name": "gatk",
+            "status": "READY",
+            "resources": [
+                "gatk.jar",
+                "tika-core-1.18.jar",
+                "s3fs-2.2.2.jar"
+            ],
+            "create_time": 1521532893736,
+            "update_time": 1521552364503
+        }
+    ]
+}
+
+

Status Codes

Table 6 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 6 Status codes

Status Code

+

Description

+

200

+

The query is successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0169.html b/docs/dli/api-ref/dli_02_0169.html new file mode 100644 index 00000000..964aac2d --- /dev/null +++ b/docs/dli/api-ref/dli_02_0169.html @@ -0,0 +1,322 @@ + + +

Uploading a JAR Package Group

+

Function

This API is used to upload a group of JAR packages to a project.

+

When a resource group with the same name is uploaded, the new group overwrites the old group.

+
+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

paths

+

Yes

+

Array of Strings

+

List of OBS object paths. The OBS object path refers to the OBS object URL.

+

group

+

Yes

+

String

+

Name of a package group.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

group_name

+

No

+

String

+

Group name.

+

status

+

No

+

String

+

Status of a package group to be uploaded.

+

resources

+

No

+

Array of strings

+

List of names of resource packages contained in the group.

+

details

+

No

+

Array of body

+

Details about a group resource package. For details, see Table 4.

+

create_time

+

No

+

Long

+

UNIX timestamp when a package group is uploaded.

+

update_time

+

No

+

Long

+

UNIX timestamp when a package group is updated.

+

is_async

+

No

+

Boolean

+

Whether to upload resource packages in asynchronous mode. The default value is false, indicating that the asynchronous mode is not used. You are advised to upload resource packages in asynchronous mode.

+

owner

+

No

+

String

+

Owner of a resource package.

+

description

+

No

+

String

+

Description of a resource module.

+

module_name

+

No

+

String

+

Name of a resource module.

+

module_type

+

No

+

String

+

Type of a resource module.

+
  • jar: User JAR file
  • pyFile: User Python file
  • file: User file
+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4 details parameter description

Parameter

+

Mandatory

+

Type

+

Description

+

create_time

+

No

+

Long

+

UNIX time when a resource package is uploaded. The timestamp is in milliseconds.

+

update_time

+

No

+

Long

+

UNIX time when the uploaded resource package is uploaded. The timestamp is in milliseconds.

+

resource_type

+

No

+

String

+

Resource type. Set this parameter to jar.

+

resource_name

+

No

+

String

+

Resource name.

+

status

+

No

+

String

+
  • Value UPLOADING indicates that the resource package group is being uploaded.
  • Value READY indicates that the resource package has been uploaded.
  • Value FAILED indicates that the resource package fails to be uploaded.
+

underlying_name

+

No

+

String

+

Name of the resource packages in a queue.

+

is_async

+

No

+

Boolean

+

Indicates whether to upload a resource package asynchronously.

+
+
+
+

Example Request

{
+    "paths": [
+        "https://test.obs.xxx.com/test_dli.jar"
+    ],
+    "group": "gatk"
+}
+
+

Example Response

{
+    "group_name": "gatk",
+    "status": "READY",
+    "resources": [
+        "test_dli.jar"
+    ],
+    "details":[
+        {
+          "create_time":1608804435312,
+          "update_time":1608804435312,
+          "resource_type":"jar",
+          "resource_name":"test_dli.jar",
+          "status":"READY",
+          "underlying_name":"test_dli.jar"
+        }
+       ],
+    "create_time": 1521532893736,
+    "update_time": 1521552364503,
+    "is_async":false
+}
+
+

Status Codes

Table 5 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 5 Status codes

Status Code

+

Description

+

201

+

Upload succeeded.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0170.html b/docs/dli/api-ref/dli_02_0170.html new file mode 100644 index 00000000..39876f0b --- /dev/null +++ b/docs/dli/api-ref/dli_02_0170.html @@ -0,0 +1,322 @@ + + +

Uploading a PyFile Package Group

+

Function

This API is used to upload a group of PyFile packages to a project.

+

When a group with the same name as the PyFile package is uploaded, the new group overwrites the old group.

+
+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

paths

+

Yes

+

Array of strings

+

List of OBS object paths. The OBS object path refers to the OBS object URL.

+

group

+

Yes

+

String

+

Name of a package group.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

group_name

+

No

+

String

+

Group name.

+

status

+

No

+

String

+

Status of a package group to be uploaded.

+

resources

+

No

+

Array of strings

+

List of names of resource packages contained in the group.

+

details

+

No

+

Array of body

+

Details about a group resource package. For details, see Table 4.

+

create_time

+

No

+

Long

+

UNIX timestamp when a package group is uploaded.

+

update_time

+

No

+

Long

+

UNIX timestamp when a package group is updated.

+

is_async

+

No

+

Boolean

+

Whether to upload resource packages in asynchronous mode. The default value is false, indicating that the asynchronous mode is not used. You are advised to upload resource packages in asynchronous mode.

+

owner

+

No

+

String

+

Owner of a resource package.

+

description

+

No

+

String

+

Description of a resource module.

+

module_name

+

No

+

String

+

Name of a resource module.

+

module_type

+

No

+

String

+

Type of a resource module.

+
  • jar: User JAR file
  • pyFile: User Python file
  • file: User file
+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4 details parameter description

Parameter

+

Mandatory

+

Type

+

Description

+

create_time

+

No

+

Long

+

UNIX time when a resource package is uploaded. The timestamp is in milliseconds.

+

update_time

+

No

+

Long

+

UNIX time when the uploaded resource package is uploaded. The timestamp is in milliseconds.

+

resource_type

+

No

+

String

+

Resource type. Set this parameter to pyFile.

+

resource_name

+

No

+

String

+

Resource name.

+

status

+

No

+

String

+
  • Value UPLOADING indicates that the resource package group is being uploaded.
  • Value READY indicates that the resource package has been uploaded.
  • Value FAILED indicates that the resource package fails to be uploaded.
+

underlying_name

+

No

+

String

+

Name of the resource packages in a queue.

+

is_async

+

No

+

Boolean

+

Indicates whether to upload a resource package asynchronously.

+
+
+
+

Example Request

{
+    "paths": [
+        "https://test.obs.xxx.com/dli_tf.py"
+    ],
+    "group": " gatk"
+}
+
+

Example Response

{
+    "group_name": "gatk",
+    "status": "READY",
+    "resources": [
+        "dli_tf.py"
+    ],
+    "details":[
+        {
+          "create_time":1608804435312,
+          "update_time":1608804435312,
+          "resource_type":"pyFile",
+          "resource_name":"dli_tf.py",
+          "status":"READY",
+          "underlying_name":"dli_tf.py"
+        }
+       ],
+    "create_time": 1521532893736,
+    "update_time": 1521552364503,
+    "is_async":false
+}
+
+

Status Codes

Table 5 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 5 Status codes

Status Code

+

Description

+

201

+

Upload succeeded.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0171.html b/docs/dli/api-ref/dli_02_0171.html new file mode 100644 index 00000000..ecdacdb8 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0171.html @@ -0,0 +1,332 @@ + + +

Uploading a File Package Group

+

Function

This API is used to upload a group of File packages to a project.

+

When the File package group with the same name is uploaded, the new group overwrites the old group.

+
+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

paths

+

Yes

+

Array of Strings

+

List of OBS object paths. The OBS object path refers to the OBS object URL.

+

group

+

Yes

+

String

+

Name of a package group.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

group_name

+

No

+

String

+

Group name.

+

status

+

No

+

String

+

Status of a package group to be uploaded.

+

resources

+

No

+

Array of strings

+

List of names of resource packages contained in the group.

+

details

+

No

+

Array of body

+

Details about a group resource package. For details, see Table 4.

+

create_time

+

No

+

Long

+

UNIX timestamp when a package group is uploaded.

+

update_time

+

No

+

Long

+

UNIX timestamp when a package group is updated.

+

is_async

+

No

+

Boolean

+

Whether to upload resource packages in asynchronous mode. The default value is false, indicating that the asynchronous mode is not used. You are advised to upload resource packages in asynchronous mode.

+

owner

+

No

+

String

+

Owner of a resource package.

+

description

+

No

+

String

+

Description of a resource module.

+

module_name

+

No

+

String

+

Name of a resource module.

+

module_type

+

No

+

String

+

Type of a resource module.

+
  • jar: User JAR file
  • pyFile: User Python file
  • file: User file
+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4 details parameter description

Parameter

+

Mandatory

+

Type

+

Description

+

create_time

+

No

+

Long

+

UNIX time when a resource package is uploaded. The timestamp is expressed in milliseconds.

+

update_time

+

No

+

Long

+

UNIX time when the uploaded resource package is uploaded. The timestamp is expressed in milliseconds.

+

resource_type

+

No

+

String

+

Resource type. Set this parameter to file.

+

resource_name

+

No

+

String

+

Resource name.

+

status

+

No

+

String

+
  • Value UPLOADING indicates that the resource package group is being uploaded.
  • Value READY indicates that the resource package has been uploaded.
  • Value FAILED indicates that the resource package fails to be uploaded.
+

underlying_name

+

No

+

String

+

Name of the resource packages in a queue.

+

is_async

+

No

+

Boolean

+

Indicates whether to upload a resource package asynchronously.

+
+
+
+

Example Request

{
+    "paths": [
+        "https: //test.obs.xxx.com/test_dli.jar",
+        "https://test.obs.xxx.com/dli_tf.py"
+    ],
+    "group": "gatk"
+}
+
+

Example Response

{
+    "group_name": "gatk",
+    "status": "READY",
+    "resources": [
+        "test_dli.jar",
+        "dli_tf.py"
+    ],
+    "details":[
+        {
+          "create_time":1608804435312,
+          "update_time":1608804435312,
+          "resource_type":"file",
+          "resource_name":"test_dli.jar",
+          "status":"READY",
+          "underlying_name":"test_dli.jar"
+        },
+       {
+        "create_time":1608804435312,
+        "update_time":1608804435312,
+        "resource_type":"file",
+        "resource_name":"dli_tf.py",
+        "status":"READY",
+        "underlying_name":"dli_tf.py"
+      }
+       ],
+    "create_time": 1521532893736,
+    "update_time": 1521552364503,
+    "is_async":false
+}
+
+

Status Codes

Table 5 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 5 Status codes

Status Code

+

Description

+

201

+

Upload succeeded.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0172.html b/docs/dli/api-ref/dli_02_0172.html new file mode 100644 index 00000000..cc214d9c --- /dev/null +++ b/docs/dli/api-ref/dli_02_0172.html @@ -0,0 +1,177 @@ + + +

Querying Resource Packages in a Group

+

Function

This API is used to query resource information of a package group in a Project.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Type

+

Description

+

create_time

+

Long

+

UNIX time when a resource package is uploaded. The timestamp is expressed in milliseconds.

+

update_time

+

Long

+

UNIX time when the uploaded resource package is uploaded. The timestamp is expressed in milliseconds.

+

resource_type

+

String

+

Resource type.

+

resource_name

+

String

+

Resource name.

+

status

+

String

+
  • Value UPLOADING indicates that the resource package group is being uploaded.
  • Value READY indicates that the resource package has been uploaded.
  • Value FAILED indicates that the resource package fails to be uploaded.
+

underlying_name

+

String

+

Name of the resource packages in a queue.

+

owner

+

String

+

Owner of a resource package.

+
+
+
+

Example Request

None

+
+

Example Response

{
+    "create_time": 1522055409139,
+    "update_time": 1522228350501,
+    "resource_type": "jar",
+    "resource_name": "luxor-ommanager-dist.tar.gz",
+    "status": "uploading",
+    "underlying_name": "7885d26e-c532-40f3-a755-c82c442f19b8_luxor-ommanager-dist.tar.gz"
+}
+
+

Status Codes

Table 4 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Codes

+

Description

+

200

+

The query is successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0173.html b/docs/dli/api-ref/dli_02_0173.html new file mode 100644 index 00000000..44d4944f --- /dev/null +++ b/docs/dli/api-ref/dli_02_0173.html @@ -0,0 +1,105 @@ + + +

Deleting a Resource Package from a Group

+

Function

This API is used to delete resource packages in a group in a Project.

+
+

URI

+
+

Request

None

+
+

Response

+
+

Example Request

None

+
+

Example Response

None

+
+

Status Codes

Table 3 describes the status code.

+ +
+ + + + + + + + + + +
Table 3 Status codes

Status Code

+

Description

+

200

+

Deletion succeeded.

+

404

+

Not found.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0181.html b/docs/dli/api-ref/dli_02_0181.html new file mode 100644 index 00000000..d69d1b67 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0181.html @@ -0,0 +1,101 @@ + + +

Overview

+

This section describes the APIs provided by DLI.

+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1 DLI APIs

Type

+

Subtype

+

Description

+

Permission-related APIs

+

-

+

You can assign permissions to queues, view queue users, assign data permissions, view database users, view table users, view table user permissions, and view the permissions of authorized users.

+

Agency-related APIs

+

-

+

Obtain the DLI agency information and create a DLI agency.

+

Queue-related APIs (Recommended)

+

-

+

You can create queues, delete queues, query all queues, modify the CIDR block of a queue, restart, scale out, or scale in a queue, query queue details, create a request for testing the connectivity of a specified address, query the connectivity of a specified address, create a scheduled queue scaling plan, query a scheduled queue scaling plan, delete scheduled queue scaling plans in batches, and delete or modify a scheduled queue scaling plan.

+

APIs Related to SQL Jobs

+

+

Database-related APIs

+

You can create a database, delete a database, view all databases, and modify database users.

+

Table-related APIs

+

You can create, delete, and query tables, describe table information, preview table content, modify table users, and obtain the partition information list.

+

Job-related APIs

+

You can import and export data, submitting SQL jobs, canceling jobs, querying all jobs, previewing job results, querying job status, querying job details, checking SQL syntax, and exporting query results.

+

Package Group-related APIs

+

-

+

You can upload a group resource, query the group resource list, upload a group resource in JAR format, upload a PyFile group resource, upload a File type group resource, query a resource package in a group, delete a resource package in a group, and change the owner of a group or resource package.

+

APIs Related to Flink Jobs

+

-

+

You can authorize DLI to OBS, create and update SQL jobs and user-defined Flink jobs, run jobs in batches, query the job list, job details, job execution plans, and job monitoring information. You can also stop jobs in batches, delete and batch delete jobs, export and import Flink jobs, create IEF message channels, report Flink job status and callback Flink job actions at the edge, and report IEF system events.

+

APIs related to Spark jobs

+

Batch Processing-related APIs

+

Creating batch jobs, cancel batch jobs, querying batch job lists, querying batch job details, querying batch job status, and querying batch job logs.

+

APIs Related to Flink Job Templates

+

-

+

You can create, update, and delete a template, and query the template list.

+

APIs Related to Enhanced Datasource Connections

+

-

+

You can create and delete enhanced datasource connections, query the enhanced datasource connection list as well as the connections, bind and unbind queues, modify host information, and query enhanced datasource connection permissions.

+

APIs Related to Global Variables

+

-

+

You can creat, delete, modify, and query global variables.

+
+
+
+
+ diff --git a/docs/dli/api-ref/dli_02_0183.html b/docs/dli/api-ref/dli_02_0183.html new file mode 100644 index 00000000..7acdf4cd --- /dev/null +++ b/docs/dli/api-ref/dli_02_0183.html @@ -0,0 +1,33 @@ + + +

Obtaining a Project ID

+
{
+    "projects": [
+        {
+            "domain_id": "65382450e8f64ac0870cd180d14e684b",
+            "is_domain": false,
+            "parent_id": "65382450e8f64ac0870cd180d14e684b",
+            "name": "project_name",
+            "description": "",
+            "links": {
+                "next": null,
+                "previous": null,
+                "self": "https://www.example.com/v3/projects/a4a5d4098fb4474fa22cd05f897d6b99"
+            },
+            "id": "a4a5d4098fb4474fa22cd05f897d6b99",
+            "enabled": true
+        }
+    ],
+    "links": {
+        "next": null,
+        "previous": null,
+        "self": "https://www.example.com/v3/projects"
+    }
+}
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0186.html b/docs/dli/api-ref/dli_02_0186.html new file mode 100644 index 00000000..5cda52bd --- /dev/null +++ b/docs/dli/api-ref/dli_02_0186.html @@ -0,0 +1,26 @@ + + +

APIs Related to Enhanced Datasource Connections

+

+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0187.html b/docs/dli/api-ref/dli_02_0187.html new file mode 100644 index 00000000..12a7eed8 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0187.html @@ -0,0 +1,270 @@ + + +

Creating an Enhanced Datasource Connection

+

Function

This API is used to create an enhanced datasource connection with other services.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

name

+

Yes

+

String

+

Name of the connection.

+
  • The name can contain only letters, digits, and underscores (_), and cannot be left empty.
  • A maximum of 64 characters are allowed.
+

dest_vpc_id

+

Yes

+

String

+

The ID of the service VPC to be connected.

+

dest_network_id

+

Yes

+

String

+

The subnet ID of the to-be-connected service.

+

queues

+

No

+

Array of Strings

+

List of queue names that are available for datasource connections.

+

routetable_id

+

No

+

String

+

Route table associated with the subnet of the service.

+

hosts

+

No

+

Array of Objects

+

The user-defined host information. A maximum of 20,000 records are supported. For details, see hosts request parameters.

+

tags

+

No

+

Array of Objects

+

Tags of datasource connections. For details, see Table 4.

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 3 hosts request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

name

+

No

+

String

+

The user-defined host name. The value can consist of 128 characters, including digits, letters, underscores (_), hyphens (-), and periods (.). It must start with a letter.

+

ip

+

No

+

String

+

The IPv4 address of the host.

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 4 tags parameter

Parameter

+

Mandatory

+

Type

+

Description

+

key

+

Yes

+

String

+

Tag key.

+

value

+

Yes

+

String

+

Tag key.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + +
Table 5 Response parameters

Parameter

+

Type

+

Description

+

is_success

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

String

+

System prompt. If execution succeeds, the message may be left blank.

+

connection_id

+

String

+

Connection ID. Identifies the UUID of a datasource connection.

+
+
+
+

Example Request

{
+  "name": "test",
+  "dest_vpc_id": "22094d8f-c310-4621-913d-4c4d655d8495",
+  "dest_network_id": "78f2562a-36e4-4b39-95b9-f5aab22e1281",
+  "queues": [
+    "q1",
+    "q2"
+  ],
+  "hosts": [
+    {
+      "ip":"192.168.0.1",
+      "name":"ecs-97f8-0001"
+    },
+    {
+      "ip":"192.168.0.2", 
+      "name":"ecs-97f8-0002"
+    }
+  ]
+}
+
+

Example Response

{
+  "is_success": true,
+"message": "Create peer connection for queues:{queue list in the request parameter}",
+  "connection_id": "2a620c33-5609-40c9-affd-2b6453071b0f"
+}
+
+

Status Codes

Table 6 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 6 Status codes

Status Code

+

Description

+

201

+

The job is created successfully.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0188.html b/docs/dli/api-ref/dli_02_0188.html new file mode 100644 index 00000000..43d61013 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0188.html @@ -0,0 +1,114 @@ + + +

Deleting an Enhanced Datasource Connection

+

Function

This API is used to delete an enhanced datasource connection.

+

The connection that is being created cannot be deleted.

+
+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + +
Table 2 Response parameters

Parameter

+

Type

+

Description

+

is_success

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

String

+

System message. Value Deleted indicates that the operation is successful.

+
+
+
+

Example Request

None

+
+

Example Response

{
+    "is_success": true,
+    "message": "Deleted"
+}
+
+

Status Codes

Table 3 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 3 Status codes

Status Code

+

Description

+

200

+

Deletion succeeded.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0189.html b/docs/dli/api-ref/dli_02_0189.html new file mode 100644 index 00000000..2b342d0a --- /dev/null +++ b/docs/dli/api-ref/dli_02_0189.html @@ -0,0 +1,340 @@ + + +

Querying an Enhanced Datasource Connection

+

Function

This API is used to query the created enhanced datasource connections.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

id

+

No

+

String

+

Connection ID. Identifies the UUID of a datasource connection.

+

name

+

No

+

String

+

User-defined connection name.

+

status

+

No

+

String

+

Connection status. The options are as follows:

+
  • Active: The connection has been activated.
  • DELETED: The connection has been deleted.
+

available_queue_info

+

No

+

Array of Objects

+

For details about how to create a datasource connection for each queue, see Table 3.

+

dest_vpc_id

+

No

+

String

+

The VPC ID of the connected service.

+

dest_network_id

+

No

+

String

+

Subnet ID of the connected service.

+

create_time

+

No

+

Long

+

Time when a link is created. The time is converted to a UTC timestamp.

+

hosts

+

No

+

Array of Objects

+

User-defined host information. For details, see hosts parameter description.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 available_queue_info parameter description

Parameter

+

Mandatory

+

Type

+

Description

+

peer_id

+

No

+

String

+

ID of a datasource connection.

+

status

+

No

+

String

+

Connection status. For details about the status code, see Table 5.

+

name

+

No

+

String

+

Name of a queue.

+

err_msg

+

No

+

String

+

Detailed error message when the status is FAILED.

+

update_time

+

No

+

Long

+

Time when the available queue list was updated.

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 4 hosts parameter description

Parameter

+

Mandatory

+

Type

+

Description

+

name

+

No

+

String

+

The user-defined host name.

+

ip

+

No

+

String

+

The IPv4 address of the host.

+
+
+ +
+ + + + + + + + + + + + + + + + + +
Table 5 Connection status

Parameter

+

Definition

+

Description

+

CREATING

+

Creating

+

The datasource connection is being created.

+

ACTIVE

+

Active

+

The datasource connection has been created, and the connection to the destination address is normal.

+

FAILED

+

Failed

+

Failed to create a datasource connection.

+
+
+
+

Example Request

None

+
+

Example Response

{
+  "is_success": true,
+  "message": "",
+  "name": "withvpc",
+  "id": "4c693ecc-bab8-4113-a838-129cedc9a563",
+  "available_queue_info": [
+    {
+      "status": "ACTIVE",
+      "name": "resource_mode_1",
+      "peer_id": "d2ae6628-fa37-4e04-806d-c59c497492d1",
+      "err_msg": "",
+      "update_time": 1566889577861
+    }
+  ],
+  "dest_vpc_id": "22094d8f-c310-4621-913d-4c4d655d8495",
+  "dest_network_id": "78f2562a-36e4-4b39-95b9-f5aab22e1281",
+  "create_time": 1566888011125,
+  "status": "ACTIVE",
+  "hosts": [
+    {
+      "ip":"192.168.0.1",
+      "name":"ecs-97f8-0001"
+    },
+    {
+      "ip":"192.168.0.2", 
+      "name":"ecs-97f8-0002"
+    }
+  ]
+}
+
+

Status Codes

Table 6 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 6 Status codes

Status Code

+

Description

+

200

+

The query is successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0190.html b/docs/dli/api-ref/dli_02_0190.html new file mode 100644 index 00000000..2fb89aa5 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0190.html @@ -0,0 +1,433 @@ + + +

Querying an Enhanced Datasource Connection List

+

Function

This API is used to query the list of created enhanced datasource connections.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

connections

+

No

+

Array of Objects

+

Datasource connection information list. For details, see Table 4.

+

count

+

No

+

Integer

+

Number of returned datasource connections.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4 connections parameters

Parameter

+

Mandatory

+

Type

+

Description

+

id

+

No

+

String

+

Connection ID. Identifies the UUID of a datasource connection.

+

name

+

No

+

String

+

User-defined connection name.

+

status

+

No

+

String

+

Connection status. The options are as follows:

+
  • Active: The connection has been activated.
  • DELETED: The connection has been deleted.
+

available_queue_info

+

No

+

Array of Objects

+

For details about how to create a datasource connection for each queue, see Table 5.

+

dest_vpc_id

+

No

+

String

+

The VPC ID of the connected service.

+

dest_network_id

+

No

+

String

+

Subnet ID of the connected service.

+

isPrivis

+

No

+

Boolean

+

Whether the project permissions have been granted for the enhanced datasource connection. If the datasource connection has the permissions, the value of this field is false. Otherwise, the value is true.

+

create_time

+

No

+

Long

+

Time when a link is created. The time is converted to a UTC timestamp.

+

hosts

+

No

+

Array of Objects

+

User-defined host information. For details, see Table 6.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 5 available_queue_info parameter description

Parameter

+

Mandatory

+

Type

+

Description

+

peer_id

+

No

+

String

+

ID of a datasource connection.

+

status

+

No

+

String

+

Connection status. For details about the status code, see Table 7.

+

name

+

No

+

String

+

Name of a queue.

+

err_msg

+

No

+

String

+

Detailed error message when the status is FAILED.

+

update_time

+

No

+

Long

+

Time when the available queue list was updated.

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 6 hosts parameters

Parameter

+

Mandatory

+

Type

+

Description

+

name

+

No

+

String

+

Custom host name

+

ip

+

No

+

String

+

IPv4 address of the host

+
+
+ +
+ + + + + + + + + + + + + + + + + +
Table 7 Connection status

Parameter

+

Definition

+

Description

+

CREATING

+

Creating

+

The datasource connection is being created.

+

ACTIVE

+

Active

+

The datasource connection has been created, and the connection to the destination address is normal.

+

FAILED

+

Failed

+

Failed to create a datasource connection.

+
+
+
+

Example Request

None

+
+

Example Response

{
+  "is_success": true,
+  "message": "",
+  "count": 1,
+  "connections": [
+    {
+      "name": "withvpc",
+      "id": "4c693ecc-bab8-4113-a838-129cedc9a563",
+      "available_queue_info": [
+        {
+          "status": "ACTIVE",
+          "name": "resource_mode_1",
+          "peer_id": "d2ae6628-fa37-4e04-806d-c59c497492d1",
+          "err_msg": "",
+          "update_time": 1566889577861
+        }
+      ],
+      "dest_vpc_id": "22094d8f-c310-4621-913d-4c4d655d8495",
+      "dest_network_id": "78f2562a-36e4-4b39-95b9-f5aab22e1281",
+      "isPrivis": true,
+      "create_time": 1566888011125,
+      "status": "ACTIVE"
+    }
+  ]
+}
+
+

Status Codes

Table 8 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 8 Status codes

Status Code

+

Description

+

200

+

The query is successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0191.html b/docs/dli/api-ref/dli_02_0191.html new file mode 100644 index 00000000..ae0ed4d9 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0191.html @@ -0,0 +1,139 @@ + + +

Binding a Queue

+

Function

This API is used to bind a queue to a created enhanced datasource connection.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

queues

+

Yes

+

Array of Strings

+

List of queue names that are available for datasource connections.

+
+
+
+

Response

+
+ + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Type

+

Description

+

is_success

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

String

+

System prompt. If execution succeeds, the message may be left blank.

+
+
+
+

Example Request

{
+  "queues": [
+    "q1",
+    "q2"
+  ]
+}
+
+

Example Response

{
+  "is_success": true,
+  "message": "associated peer connection for queues: {q1,q2}."
+}
+
+

Status Codes

Table 4 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

Resource bound succeeded.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0192.html b/docs/dli/api-ref/dli_02_0192.html new file mode 100644 index 00000000..52208350 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0192.html @@ -0,0 +1,139 @@ + + +

Unbinding a Queue

+

Function

This API is used to unbind a queue from an enhanced datasource connection.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

queues

+

Yes

+

Array of String

+

List of queue names that are available for datasource connections.

+
+
+
+

Response

+
+ + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Type

+

Description

+

is_success

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

String

+

System prompt. If execution succeeds, the message may be left blank.

+
+
+
+

Example Request

{
+  "queues": [
+    "q1",
+    "q2"
+  ]
+}
+
+

Example Response

{
+  "is_success": true,
+  "message": "Disassociated peer connection for queues:{q1,q2}."
+}
+
+

Status Codes

Table 4 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Codes

+

Description

+

200

+

Resource unbound succeeded.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0193.html b/docs/dli/api-ref/dli_02_0193.html new file mode 100644 index 00000000..46d1caad --- /dev/null +++ b/docs/dli/api-ref/dli_02_0193.html @@ -0,0 +1,33 @@ + + +

Queue-related APIs (Recommended)

+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0194.html b/docs/dli/api-ref/dli_02_0194.html new file mode 100644 index 00000000..fc734dcb --- /dev/null +++ b/docs/dli/api-ref/dli_02_0194.html @@ -0,0 +1,266 @@ + + +

Creating a Queue

+

Function

This API is used to create a queue. The queue will be bound to specified compute resources.

+

It takes 5 to 15 minutes to start a job using a new queue for the first time.

+
+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

queue_name

+

Yes

+

String

+

Name of a newly created resource queue. The name can contain only digits, letters, and underscores (_), but cannot contain only digits or start with an underscore (_). Length range: 1 to 128 characters.

+
NOTE:

The queue name is case-insensitive. The uppercase letters will be automatically converted to lowercase letters.

+
+

queue_type

+

No

+

String

+

Queue type. The options are as follows:

+
  • sql: Queues used to run SQL jobs
  • general: Queues used to run Flink and Spark Jar jobs.
+
NOTE:

If the type is not specified, the default value sql is used.

+
+

description

+

No

+

String

+

Description of a queue.

+

cu_count

+

Yes

+

Integer

+

Minimum number of CUs that are bound to a queue. Currently, the value can only be 16, 64, or 256.

+

enterprise_project_id

+

No

+

String

+

Enterprise project ID. The value 0 indicates the default enterprise project.

+
NOTE:

Users who have enabled Enterprise Management can set this parameter to bind a specified project.

+
+

platform

+

No

+

String

+

CPU architecture of compute resources.

+
  • x86_64
+

resource_mode

+

No

+

Integer

+

Queue resource mode. The options are as follows:

+

0: indicates the shared resource mode.

+

1: indicates the exclusive resource mode.

+

labels

+

No

+

Array of Strings

+

Tag information of the queue to be created. Currently, the tag information includes whether the queue is cross-AZ (JSON character string). The value can only be 2, that is, a dual-AZ queue whose compute resources are distributed in two AZs is created.

+

tags

+

No

+

Array of Objects

+

Queue tags for identifying cloud resources. A tag consists of a key and tag value. For details, see Table 3.

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 3 tags parameters

Parameter

+

Mandatory

+

Type

+

Description

+

Key

+

Yes

+

String

+

Tag key.

+

value

+

Yes

+

String

+

Tag value.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 4 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

queue_name

+

No

+

String

+

Name of the created queue.

+
NOTE:

The queue name is case-insensitive. The uppercase letters will be automatically converted to lowercase letters.

+
+
+
+
+

Example Request

{
+    "queue_name": "queue1",
+    "description": "test",
+    "cu_count": 16,
+    "resource_mode": 1,
+    "queue_type": "general",
+    "labels": ["multi_az=2"]
+}
+
+

Example Response

{
+  "is_success": true,
+  "message": "",
+  "queue_name": "queue1"
+}
+
+

Status Codes

Table 5 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 5 Status codes

Status Code

+

Description

+

200

+

The job is created successfully.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0195.html b/docs/dli/api-ref/dli_02_0195.html new file mode 100644 index 00000000..f4413e60 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0195.html @@ -0,0 +1,120 @@ + + +

Deleting a Queue

+

Function

This API is used to delete a specified queue.

+

If a task is being executed in a specified queue, the queue cannot be deleted.

+
+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + +
Table 2 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+
+
+
+

Example Request

None

+
+

Example Response

{
+  "is_success": true,
+  "message": ""
+}
+
+

Status Codes

Table 3 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 3 Status codes

Status Code

+

Description

+

200

+

Deletion succeeded.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0196.html b/docs/dli/api-ref/dli_02_0196.html new file mode 100644 index 00000000..fbe75626 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0196.html @@ -0,0 +1,391 @@ + + +

Querying All Queues

+

Function

This API is used to list all queues under the project.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

queues

+

No

+

Array of Object

+

Queue information For details, see Table 4.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4 queues parameters description

Parameter

+

Mandatory

+

Type

+

Description

+

queue_name

+

No

+

String

+

Name of a queue.

+

description

+

No

+

String

+

Queue description.

+

owner

+

No

+

String

+

User who creates a queue.

+

create_time

+

No

+

Long

+

Time when the queue is created. The timestamp is expressed in milliseconds.

+

queue_type

+

No

+

String

+

Queue type.

+
  • sql
  • general
  • all
+

If this parameter is not specified, the default value sql is used.

+

cu_count

+

No

+

Integer

+

Number of compute units (CUs) bound to a queue, that is, the number of CUs in the current queue.

+

resource_id

+

No

+

String

+

Resource ID of a queue.

+

enterprise_project_id

+

No

+

String

+

Enterprise project ID. 0 indicates the default enterprise project.

+
NOTE:

Users who have enabled Enterprise Management can set this parameter to bind a specified project.

+
+

cidr_in_vpc

+

No

+

String

+

The VPC CIDR block of the queue.

+

cidr_in_mgntsubnet

+

No

+

String

+

CIDR block of the management subnet

+

cidr_in_subnet

+

No

+

String

+

Subnet CIDR block

+

resource_mode

+

No

+

Integer

+

Resource mode

+
  • 0: Shared queue
  • 1: Dedicated queue
+

platform

+

No

+

String

+

CPU architecture of queue compute resources.

+
  • x86_64
+

is_restarting

+

No

+

Boolean

+

Whether to restart the queue. The default value is false.

+

labels

+

No

+

String

+

Tag information of the queue to be created, including the JSON string indicating whether the queue is Dual-AZ. Currently, only the value 2 is supported, indicating that two queues are created.

+

cu_spec

+

No

+

Integer

+

Specifications of a queue. For a queue whose billing mode is yearly/monthly, this parameter indicates the CU value of the yearly/monthly part. For a pay-per-use queue, this parameter indicates the initial value when a user purchases a queue.

+

cu_scale_out_limit

+

No

+

Integer

+

Upper limit of the CU value for elastic scaling of the current queue.

+

cu_scale_in_limit

+

No

+

Integer

+

Lower limit of the CU value for elastic scaling of the current queue.

+
+
+
+

Example Request

None

+
+

Example Response

{
+      "is_success": "true",
+      "message": "",
+      "queues": [
+          {
+              "queue_name": "test",
+              "owner": "testuser",
+              "description": "",
+              "create_time": 1562221422671,
+              "queue_type": "spark",
+              "cu_count": 16,
+              "resource_id": "26afb850-d3c9-42c1-81c0-583d1163e80f",
+              "cidr_in_vpc": "10.0.0.0/8",
+              "cidr_in_subnet": "10.0.0.0/24",
+              "cidr_in_mgntsubnet": "10.23.128.0/24",
+              "resource_mode": 1,
+              "platform": "x86_64",
+              "is_restarting": "false",
+              "labels": "multi_az=2",
+              "resource_type": "vm",
+              "cu_spec": 16
+          }
+       ]      
+    }
+
+

Status Codes

Table 5 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 5 Status codes

Status Code

+

Description

+

200

+

The query is successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0200.html b/docs/dli/api-ref/dli_02_0200.html new file mode 100644 index 00000000..e81a78c6 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0200.html @@ -0,0 +1,177 @@ + + +

Modifying the Host Information

+

Function

This API is used to modify the host information of a connected datasource. Only full overwriting is supported.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

hosts

+

Yes

+

Array of objects

+

The user-defined host information. A maximum of 20,000 records are supported. For details, see hosts request parameters. If this parameter is left blank, all configured host information will be deleted.

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 3 hosts request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

name

+

No

+

String

+

The user-defined host name. The value can consist of 128 characters, including digits, letters, underscores (_), hyphens (-), and periods (.). It must start with a letter.

+

ip

+

No

+

String

+

The IPv4 address of the host.

+
+
+
+

Response

+
+ + + + + + + + + + + + + +
Table 4 Response parameters

Parameter

+

Type

+

Description

+

is_success

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

String

+

System prompt. If execution succeeds, the message may be left blank.

+
+
+
+

Example Request

{
+  "hosts": [
+    {
+      "ip":"192.168.0.1",
+      "name":"ecs-97f8-0001"
+    },
+    {
+      "ip":"192.168.0.2", 
+      "name":"ecs-97f8-0002"
+    }
+  ]
+}
+
+

Example Response

{
+  "is_success": true,
+  "message": ""
+}
+
+

Status Codes

Table 5 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 5 Status codes

Status Code

+

Description

+

200

+

The modification operations are successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0201.html b/docs/dli/api-ref/dli_02_0201.html new file mode 100644 index 00000000..b0fdf4c9 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0201.html @@ -0,0 +1,1170 @@ + + +

Permissions Policies and Supported Actions

+

This section describes fine-grained permissions management for your DLI. If your account does not need individual IAM users, then you may skip this section.

+

By default, new IAM users do not have permissions assigned. You need to add them to one or more groups, and attach permissions policies or roles to these groups. Users inherit permissions from the groups to which they are added After authorization, the user can perform specified operations on MRS based on the permissions.

+

+ +

Policy-based authorization is useful if you want to allow or deny the access to an API.

+
+

An account has all of the permissions required to call all APIs, but IAM users must have the required permissions specifically assigned. The permissions required for calling an API are determined by the actions supported by the API. Only users who have been granted permissions allowing the actions can call the API successfully. For example, if an IAM user needs to create buckets using an API, the user must have been granted permissions that allow the dli:queue:create_queue action.

+

Supported Actions

VBS provides system-defined policies that can be directly used in IAM. You can also create custom policies and use them to supplement system-defined policies, implementing more refined access control. Operations supported by policies are specific to APIs. The following are common concepts related to policies:

+ +

The check mark (√) indicates that an action takes effect. The cross mark (x) indicates that an action does not take effect.

+
+

DLI supports the following actions that can be defined in custom policies:

+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 1 Actions

Permission

+

API

+

Actions

+

Dependent Permission

+

IAM Project

+

(Project)

+

Enterprise Project

+

(Enterprise Project)

+

Creating a Queue

+

POST /v1.0/{project_id}/queues

+

dli:queue:create_queue

+

-

+

√

+

√

+

Deleting a Queue

+

DELETE /v1.0/{project_id}/queues/{queue_name}

+

dli:queue:drop_queue

+

-

+

√

+

√

+

Submitting a Job

+

POST /v1.0/{project_id}/jobs/submit-job

+

dli:queue:submit_job

+

-

+

√

+

×

+

Canceling a job

+

DELETE /v1.0/{project_id}/jobs/{job_id}

+

dli:queue:cancel_job

+

-

+

√

+

×

+

Viewing Queue Permissions of Other Users

+

GET /v1.0/{project_id}/queues/{queue_name}/users

+

dli:queue:show_privileges

+

-

+

√

+

×

+

Restarting a queue

+

PUT /v1.0/{project_id}/queues/{queue_name}/action

+

dli:queue:restart

+

-

+

√

+

×

+

Scaling out/in a queue

+

PUT /v1.0/{project_id}/queues/{queue_name}/action

+

dli:queue:scale_queue

+

-

+

√

+

×

+

Granting permissions to a specified user queue

+

PUT /v1.0/{project_id}/user-authorization

+

dli:queue:grant_privilege

+

-

+

√

+

×

+

Removing permissions of a specified user queue

+

POST /v1.0/{project_id}/databases

+

dli:database:create_database

+

-

+

√

+

×

+

Creating a Database

+

POST /v1.0/{project_id}/databases

+

dli:database:create_database

+

-

+

√

+

×

+

Deleting a Database

+

DELETE /v1.0/{project_id}/databases/{database_name}

+

dli:database:drop_database

+

-

+

√

+

×

+

Modifying database configuration

+

POST /v1.0/{project_id}/jobs/submit-job

+

dli:database:alter_database_properties

+

-

+

√

+

×

+

Explaining the SQL Statement as an Execution Plan

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:database:explain

+

-

+

√

+

×

+

Creating a Table

+

POST /v1.0/{project_id}/databases/{database_name}/tables

+

dli:database:create_table

+

-

+

√

+

×

+

Creating a View

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

database:create_view

+

-

+

√

+

×

+

Creating a Function

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:database:create_function

+

-

+

√

+

×

+

Describing a Function

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:database:describe_function

+

-

+

√

+

×

+

Deleting a Function

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:database:drop_function

+

-

+

√

+

×

+

Displaying a Function

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:database:show_functions

+

-

+

√

+

×

+

Creating a role

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:database:create_role

+

-

+

√

+

×

+

Deleting a role

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:database:drop_role

+

-

+

√

+

×

+

Displaying a Role

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:database:show_roles

+

-

+

√

+

×

+

Displaying All Roles

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:database:show_all_roles

+

-

+

√

+

×

+

Binding a Role

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:database:grant_role

+

-

+

√

+

×

+

Unbinding the Role

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:database:revoke_role

+

-

+

√

+

×

+

Displaying the Binding Relationships Between All Roles and Users

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:database:show_users

+

-

+

√

+

×

+

Viewing Database Permissions of Other Users

+

GET /v1.0/{project_id}/databases/{database_name}/users

+

dli:database:show_privileges

+

-

+

√

+

×

+

Displaying database

+

GET /v1.0/{project_id}/databases

+

dli:database:display_database

+

-

+

√

+

×

+

Displaying all databases

+

GET /v1.0/{project_id}/databases

+

dli:database:display_all_databases

+

-

+

√

+

×

+

Displaying all tables

+

GET /v1.0/{project_id}/databases

+

dli:database:display_all_tables

+

-

+

√

+

×

+

Granting database permissions to a specified user

+

PUT /v1.0/{project_id}/user-authorization

+

dli:database:grant_privilege

+

-

+

√

+

×

+

Removing database permissions of a specified user

+

PUT /v1.0/{project_id}/user-authorization

+

dli:database:revoke_privilege

+

-

+

√

+

×

+

Deleting a Table

+

DELETE /v1.0/{project_id}/databases/{database_name}/tables/{table_name}

+

dli:table:drop_table

+

-

+

√

+

×

+

Displaying Table Structure

+

GET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}

+

dli:table:describe_table

+

-

+

√

+

×

+

Querying a Table

+

GET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/preview

+

dli:table:select

+

-

+

√

+

×

+

Displaying table configuration

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:table:show_table_properties

+

-

+

√

+

×

+

Displaying the Table Creation Statement

+

GET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/preview

+

dli:table:show_create_table

+

-

+

√

+

×

+

Displaying All Partitions

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:table:show_partitions

+

-

+

√

+

×

+

Setting Table Configuration

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:table:alter_table_set_properties

+

-

+

√

+

×

+

Adding a Column

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:table:alter_table_add_columns

+

-

+

√

+

×

+

Adding Partitions to the Partitioned Table

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:table:alter_table_add_partition

+

-

+

√

+

×

+

Renaming a Table Partition

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:table:alter_table_rename_partition

+

-

+

√

+

×

+

Deleting Partitions from a Partitioned Table

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:table:alter_table_drop_partition

+

-

+

√

+

×

+

Restoring Table Partitions

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:table:alter_table_recover_partition

+

-

+

√

+

×

+

Renaming a Table

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:table:alter_table_rename

+

-

+

√

+

×

+

Setting the Partition Path

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:table:alter_table_set_location

+

-

+

√

+

×

+

Inserting data into a table

+

POST /v1.0/{project_id}/jobs/submit-job, statement invoking

+

dli:table:insert_into_table

+

-

+

√

+

×

+

Rewriting table data

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:table:insert_overwrite_table

+

-

+

√

+

×

+

Viewing Table Permissions of Other Users

+

GET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/users

+

dli:table:show_privileges

+

-

+

√

+

×

+

Clearing a table

+

POST /v1.0/{project_id}/jobs/submit-job

+

dli:table:truncate_table

+

-

+

√

+

×

+

Updating a table

+

POST /v1.0/{project_id}/jobs/submit-job

+

dli:table:update

+

-

+

√

+

×

+

Deleting data in a table

+

POST /v1.0/{project_id}/jobs/submit-job

+

dli:table:delete

+

-

+

√

+

×

+

Modifying column information

+

POST /v1.0/{project_id}/jobs/submit-job

+

dli:table:alter_table_change_column

+

-

+

√

+

×

+

Deleting a column

+

POST /v1.0/{project_id}/jobs/submit-job

+

dli:table:alter_table_drop_columns

+

-

+

√

+

×

+

Displaying data segments

+

POST /v1.0/{project_id}/jobs/submit-job

+

dli:table:show_segments

+

-

+

√

+

×

+

Merging data segments

+

POST /v1.0/{project_id}/jobs/submit-job

+

dli:table:compaction

+

-

+

√

+

×

+

Modifying a View

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:table:alter_view

+

-

+

√

+

×

+

Displaying a table

+

POST /v1.0/{project_id}/jobs/submit-job

+

dli:table:display_table

+

-

+

√

+

×

+

Granting data table permissions to a specified user

+

PUT /v1.0/{project_id}/user-authorization

+

dli:table:grant_privilege

+

-

+

√

+

×

+

Removing data table permissions of a specified user

+

PUT /v1.0/{project_id}/user-authorization

+

dli:table:revoke_privilege

+

-

+

√

+

×

+

Viewing the security authentication information permission list of other users

+

GET /v1.0/{project_id}/datasource/auth-infos/{auth_name}/users

+

dli:datasourceauth:show_privileges

+

-

+

√

+

×

+

Using security authentication information

+

POST /v1.0/{project_id}/jobs/submit-job

+

dli:datasourceauth:use_auth

+

-

+

√

+

×

+

Deleting security authentication information

+

DELETE /v2.0/{project_id}/datasource/auth-infos/{auth_info_name}

+

dli:datasourceauth:drop_auth

+

-

+

√

+

×

+

Granting security authentication permissions to a specified user

+

PUT /v1.0/{project_id}/user-authorization

+

dli:datasourceauth:grant_privilege

+

-

+

√

+

×

+

Updating security authentication information

+

PUT /v2.0/{project_id}/datasource/auth-infos

+

dli:datasourceauth:update_auth

+

-

+

√

+

×

+

Granting security authentication permissions to a specified user

+

PUT /v1.0/{project_id}/user-authorization

+

dli:datasourceauth:grant_privilege

+

-

+

√

+

×

+

Removing security authentication permissions of a specified user

+

PUT /v1.0/{project_id}/user-authorization

+

dli:datasourceauth:revoke_privilege

+

-

+

√

+

×

+

Querying job details

+

GET /v1.0/:x_project_id/streaming/jobs/:job_id

+

dli:jobs:get

+

-

+

√

+

×

+

Querying a job list

+

GET /v1.0/:x_project_id/streaming/jobs

+

dli:jobs:list_all

+

-

+

√

+

×

+

Creating a job

+

POST /v1.0/:x_project_id/streaming/sql-jobs

+

dli:jobs:create

+

-

+

√

+

×

+

Updating a job

+

PUT /v1.0/:x_project_id/streaming/sql-jobs/:job_id

+

dli:jobs:update

+

-

+

√

+

×

+

Deleting a job

+

POST /v1.0/:x_project_id/streaming/jobs/delete

+

dli:jobs:delete

+

-

+

√

+

×

+

Starting a job

+

POST /v1.0/:x_project_id/streaming/jobs/run

+

dli:jobs:start

+

-

+

√

+

×

+

Stopping a job

+

POST /v1.0/:x_project_id/streaming/jobs/stop

+

dli:jobs:stop

+

-

+

√

+

×

+

Exporting a job

+

POST /v1.0/:x_project_id/streaming/jobs/export

+

dli:jobs:export

+

-

+

√

+

×

+

Granting job permissions to a specified user

+

PUT /v1.0/{{project_id}}/authorization

+

dli:jobs:grant_privilege

+

-

+

√

+

×

+

Removing job permissions of a specified user

+

PUT /v1.0/{{project_id}}/authorization

+

dli:jobs:revokePrivilege

+

-

+

√

+

×

+

Querying a Column

+

POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking

+

dli:column:select

+

-

+

√

+

×

+

Granting permissions to a specified user queue

+

PUT /v1.0/{project_id}/user-authorization

+

dli:column:grant_privilege

+

-

+

√

+

×

+

Removing permissions of a specified user queue

+

PUT /v1.0/{project_id}/user-authorization

+

dli:column:revoke_privilege

+

-

+

√

+

×

+

Querying the Flink Job List

+

GET /v1.0/:x_project_id/jobs

+

dli:jobs:list_job

+

-

+

√

+

×

+

Querying Flink Job Details

+

GET /v1.0/:x_project_id/job/:job_id

+

dli:jobs:get_job

+

-

+

√

+

×

+

Creating a Flink Job

+

POST /v1.0/:x_project_id/sql_job

+

dli:jobs:create_job

+

-

+

√

+

×

+

Updating a Flink Job

+

PATCH /v1.0/:x_project_id/sql_job

+

dli:jobs:update_job

+

-

+

√

+

×

+

Deleting a Flink Job

+

DELETE /v1.0/:x_project_id/job/:job_id

+

dli:jobs:delete_job

+

-

+

√

+

×

+

Starting a Flink Job

+

POST v1.0/:x_project_id/job/:job_id/run

+

dli:queue:submit_job

+

-

+

√

+

×

+

Stopping a Flink Job

+

POST /v1.0/:x_project_id/job/:job_id/stop

+

dli:queue:cancel_job

+

-

+

√

+

×

+
+
+
+
+ diff --git a/docs/dli/api-ref/dli_02_0223.html b/docs/dli/api-ref/dli_02_0223.html new file mode 100644 index 00000000..3cab90c3 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0223.html @@ -0,0 +1,39 @@ + + +

APIs Related to Flink Jobs

+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0225.html b/docs/dli/api-ref/dli_02_0225.html new file mode 100644 index 00000000..a3e64768 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0225.html @@ -0,0 +1,130 @@ + + +

Granting OBS Permissions to DLI

+

Function

This API is used to grant DLI the permission to access OBS buckets for saving job checkpoints and run logs.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

obs_buckets

+

Yes

+

Array of Strings

+

List of OBS buckets.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

Message content.

+
+
+
+

Example Request

{
+    "obs_buckets": [
+        "bucket1"
+    ]
+}
+
+

Example Response

{
+    "is_success": "true",
+    "message": "The following OBS bucket is authorized successfully, bucket1."
+}
+
+

Status Codes

Table 4 describes the status code.

+ +
+ + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

Authorization succeeds.

+

400

+

Request error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0228.html b/docs/dli/api-ref/dli_02_0228.html new file mode 100644 index 00000000..4e9191f4 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0228.html @@ -0,0 +1,470 @@ + + +

Creating a SQL Job

+

Function

This API is used to create a Flink streaming SQL job.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

name

+

Yes

+

String

+

Name of the job. The value can contain 1 to 57 characters.

+

desc

+

No

+

String

+

Job description. Length range: 0 to 512 characters.

+

template_id

+

No

+

Integer

+

Template ID.

+

If both template_id and sql_body are specified, sql_body is used. If template_id is specified but sql_body is not, fill sql_body with the template_id value.

+

queue_name

+

No

+

String

+

Name of a queue. The value can contain 0 to 128 characters.

+

sql_body

+

No

+

String

+

Stream SQL statement, which includes at least the following three parts: source, query, and sink. Length range: 1024x1024 characters.

+

run_mode

+

No

+

String

+

Job running mode. The options are as follows:

+
  • shared_cluster: indicates that the job is running on a shared cluster.
  • exclusive_cluster: indicates that the job is running on an exclusive cluster.
  • edge_node: indicates that the job is running on an edge node.
+

The default value is shared_cluster.

+

cu_number

+

No

+

Integer

+

Number of CUs selected for a job. The default value is 2.

+

Sum of the number of compute units and job manager CUs of DLI. CU is also the billing unit of DLI. One CU equals one vCPU and 4 GB. The value is the number of CUs required for job running and cannot exceed the number of CUs in the bound queue. For details about how to set the number of CUs of JobManager, see manager_cu_number.

+

parallel_number

+

No

+

Integer

+

Number of parallel jobs set by a user. The default value is 1.

+

Number of Flink SQL jobs that run at the same time. Properly increasing the number of parallel threads improves the overall computing capability of the job. However, the switchover overhead caused by the increase of threads must be considered. This value cannot be greater than four times the compute units (number of CUs minus the number of JobManager CUs).

+

For details about how to set the number of JobManager CUs, see manager_cu_number.

+

checkpoint_enabled

+

No

+

Boolean

+

Whether to enable the automatic job snapshot function.

+
  • true: indicates to enable the automatic job snapshot function.
  • false: indicates to disable the automatic job snapshot function.
  • Default value: false
+

checkpoint_mode

+

No

+

Integer

+

Snapshot mode. There are two options:

+
  • 1: ExactlyOnce, indicates that data is processed only once.
  • 2: AtLeastOnce, indicates that data is processed at least once.
+

The default value is 1.

+

checkpoint_interval

+

No

+

Integer

+

Snapshot interval. The unit is second. The default value is 10.

+

obs_bucket

+

No

+

String

+

OBS path where users are authorized to save the snapshot. This parameter is valid only when checkpoint_enabled is set to true.

+

OBS path where users are authorized to save the snapshot. This parameter is valid only when log_enabled is set to true.

+

log_enabled

+

No

+

Boolean

+

Whether to enable the function of uploading job logs to users' OBS buckets. The default value is false.

+

smn_topic

+

No

+

String

+

SMN topic. If a job fails, the system will send a message to users subscribed to the SMN topic.

+

restart_when_exception

+

No

+

Boolean

+

Whether to enable the function of automatically restarting a job upon job exceptions. The default value is false.

+

idle_state_retention

+

No

+

Integer

+

Retention time of the idle state. The unit is hour. The default value is 1.

+

job_type

+

No

+

String

+

Job type. This parameter can be set to flink_sql_job.

+ +

dirty_data_strategy

+

No

+

String

+

Dirty data policy of a job.

+
  • 2:obsDir: Save. obsDir specifies the path for storing dirty data.
  • 1: Trigger a job exception
  • 0: Ignore
+

The default value is 0.

+

udf_jar_url

+

No

+

String

+

Name of the resource package that has been uploaded to the DLI resource management system. The UDF Jar file of the SQL job is specified by this parameter.

+

manager_cu_number

+

No

+

Integer

+

Number of CUs in the JobManager selected for a job. The default value is 1.

+

tm_cus

+

No

+

Integer

+

Number of CUs for each TaskManager. The default value is 1.

+

tm_slot_num

+

No

+

Integer

+

Number of slots in each TaskManager. The default value is (parallel_number*tm_cus)/(cu_number-manager_cu_number).

+

resume_checkpoint

+

No

+

Boolean

+

Whether the abnormal restart is recovered from the checkpoint.

+

resume_max_num

+

No

+

Integer

+

Maximum number of retry times upon exceptions. The unit is times/hour. Value range: -1 or greater than 0. The default value is -1, indicating that the number of times is unlimited.

+

tags

+

No

+

Array of Objects

+

Label of a Flink SQL job. For details, see Table 3.

+

runtime_config

+

No

+

String

+

Customizes optimization parameters when a Flink job is running.

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 3 tags parameters

Parameter

+

Mandatory

+

Type

+

Description

+

key

+

Yes

+

String

+

Tag key.

+

value

+

Yes

+

String

+

Tag key.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 4 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

Message content.

+

job

+

No

+

Object

+

Information about the job status. For details, see Table 5.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + +
Table 5 job parameters

Parameter

+

Mandatory

+

Type

+

Description

+

job_id

+

Yes

+

Long

+

Job ID.

+

status_name

+

No

+

String

+

Name of job status. For details, see the description of the status field in Querying Job Details.

+

status_desc

+

No

+

String

+

Status description. Causes and suggestions for the abnormal status.

+
+
+
+

Example Request

{
+    "name": "myjob",
+    "desc": "This is a job used for counting characters.",
+    "template_id": 100000,
+    "queue_name": "testQueue",
+    "sql_body": "select * from source_table",
+    "run_mode": "exclusive_cluster",
+    "cu_number": 2,
+    "parallel_number": 1,
+    "checkpoint_enabled": false,
+    "checkpoint_mode": "exactly_once",
+    "checkpoint_interval": 0,
+    "obs_bucket": "my_obs_bucket",
+    "log_enabled": false,
+    "restart_when_exception": false,
+    "idle_state_retention": 3600,
+    "job_type": "flink_sql_job",
+    "dirty_data_strategy": "0",
+    "udf_jar_url": "group/test.jar"
+}
+
+

Example Response

{
+    "is_success": "true",
+    "message": "A DLI job is created successfully.",
+    "job": {
+        "job_id": 148,
+        "status_name": "job_init",
+        "status_desc": ""
+    }
+}
+
+

Status Codes

Table 6 describes status codes.

+ +
+ + + + + + + + + + +
Table 6 Status codes

Status Code

+

Description

+

200

+

The job is created successfully.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0229.html b/docs/dli/api-ref/dli_02_0229.html new file mode 100644 index 00000000..16a88122 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0229.html @@ -0,0 +1,426 @@ + + +

Updating a SQL Job

+

Function

This API is used to modify a Flink SQL job.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

name

+

No

+

String

+

Name of a job. Length range: 0 to 57 characters.

+

desc

+

No

+

String

+

Job description. Length range: 0 to 512 characters.

+

queue_name

+

No

+

String

+

Name of a queue. The value can contain 0 to 128 characters.

+

sql_body

+

No

+

String

+

Stream SQL statement, which includes at least the following three parts: source, query, and sink. Length range: 0 to 1024x1024 characters.

+

run_mode

+

No

+

String

+

Job running mode. The options are as follows:

+
  • shared_cluster: indicates that the job is running on a shared cluster.
  • exclusive_cluster: indicates that the job is running on an exclusive cluster.
  • edge_node: indicates that the job is running on an edge node.
+

The default value is shared_cluster.

+

cu_number

+

No

+

Integer

+

Number of CUs selected for a job. The default value is 2.

+

parallel_number

+

No

+

Integer

+

Number of parallel jobs set by a user. The default value is 1.

+

checkpoint_enabled

+

No

+

Boolean

+

Whether to enable the automatic job snapshot function.

+
  • true: indicates to enable the automatic job snapshot function.
  • false: indicates to disable the automatic job snapshot function.
  • Default value: false
+

checkpoint_mode

+

No

+

Integer

+

Snapshot mode. There are two options:

+
  • 1: ExactlyOnce, indicates that data is processed only once.
  • 2: at_least_once, indicates that data is processed at least once.
+

The default value is 1.

+

checkpoint_interval

+

No

+

Integer

+

Snapshot interval. The unit is second. The default value is 10.

+

obs_bucket

+

No

+

String

+

OBS path where users are authorized to save the snapshot. This parameter is valid only when checkpoint_enabled is set to true.

+

OBS path where users are authorized to save the snapshot. This parameter is valid only when log_enabled is set to true.

+

log_enabled

+

No

+

Boolean

+

Whether to enable the function of uploading job logs to users' OBS buckets. The default value is false.

+

smn_topic

+

No

+

String

+

SMN topic. If a job fails, the system will send a message to users subscribed to the SMN topic.

+

restart_when_exception

+

No

+

Boolean

+

Whether to enable the function of automatically restarting a job upon job exceptions. The default value is false.

+

idle_state_retention

+

No

+

Integer

+

Expiration time, in seconds. The default value is 3600.

+

edge_group_ids

+

No

+

Array of Strings

+

List of edge computing group IDs. Use commas (,) to separate multiple IDs.

+

dirty_data_strategy

+

No

+

String

+

Dirty data policy of a job.

+
  • 2:obsDir: Save. obsDir specifies the path for storing dirty data.
  • 1: Trigger a job exception
  • 0: Ignore
+

The default value is 0.

+

udf_jar_url

+

No

+

String

+

Name of the resource package that has been uploaded to the DLI resource management system. The UDF Jar file of the SQL job is specified by this parameter.

+

manager_cu_number

+

No

+

Integer

+

Number of CUs in the JobManager selected for a job. The default value is 1.

+

tm_cus

+

No

+

Integer

+

Number of CUs for each TaskManager. The default value is 1.

+

tm_slot_num

+

No

+

Integer

+

Number of slots in each TaskManager. The default value is (parallel_number*tm_cus)/(cu_number-manager_cu_number).

+

operator_config

+

No

+

String

+

Degree of parallelism (DOP) of an operator.

+

resume_checkpoint

+

No

+

Boolean

+

Whether the abnormal restart is recovered from the checkpoint.

+

resume_max_num

+

No

+

Integer

+

Maximum number of retry times upon exceptions. The unit is times/hour. Value range: -1 or greater than 0. The default value is -1, indicating that the number of times is unlimited.

+

static_estimator_config

+

No

+

String

+

Traffic or hit ratio of each operator, which is a character string in JSON format. Example:

+
{"operator_list":[{"id":"0a448493b4782967b150582570326227","rate_factor":0.55},{"id":"6d2677a0ecc3fd8df0b72ec675edf8f4","rate_factor":1},{"id":"ea632d67b7d595e5b851708ae9ad79d6","rate_factor":0.55},{"id":"bc764cd8ddf7a0cff126f51c16239658","output_rate":2000}]}
+

runtime_config

+

No

+

String

+

Customizes optimization parameters when a Flink job is running.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

Message content.

+

job

+

No

+

Object

+

Information about job update. For details, see Table 4.

+
+
+ +
+ + + + + + + + + + + +
Table 4 job parameters

Parameter

+

Mandatory

+

Type

+

Description

+

update_time

+

No

+

Long

+

Job update time, expressed by milliseconds

+
+
+
+

Example Request

{
+    "name": "myjob",
+    "desc": "My first job",
+    "queue_name": "testQueue",
+    "sql_body": "select * from source_table",
+    "run_mode": "shared_cluster",
+    "cu_number": 4,
+    "parallel_number": 4,
+    "checkpoint_enabled": false,
+    "checkpoint_mode": "exactly_once",
+    "checkpoint_interval": 10,
+    "obs_bucket": "",
+    "log_enabled": false,
+    "smn_topic": "",
+    "restart_when_exception": false,
+    "idle_state_retention": 3600,
+    "edge_group_ids": [
+        "62de1e1c-066e-48a8-a79d-f461a31b2ee1",
+        "2eb00f85-99f2-4144-bcb7-d39ff47f9002"
+    ],
+    "dirty_data_strategy": "0",
+    "udf_jar_url": "group/test.jar"
+}
+
+

Example Response

{
+    "is_success": "true",
+    "message": "The job is updated successfully.",
+    "job": {
+        "update_time": 1578905682534
+    }
+}
+
+

Status Codes

Table 5 describes status codes.

+ +
+ + + + + + + + + + +
Table 5 Status codes

Status Code

+

Description

+

200

+

The job is updated successfully.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0230.html b/docs/dli/api-ref/dli_02_0230.html new file mode 100644 index 00000000..ae249435 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0230.html @@ -0,0 +1,431 @@ + + +

Creating a Flink Jar job

+

Function

This API is used to create custom jobs, which currently support the JAR format and run in dedicated queues.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Parameter description

Parameter

+

Mandatory

+

Type

+

Description

+

name

+

Yes

+

String

+

Name of the job. The value can contain 1 to 57 characters.

+

desc

+

No

+

String

+

Job description. Length range: 0 to 512 characters.

+

queue_name

+

No

+

String

+

Name of a queue. The value can contain 0 to 128 characters.

+

cu_number

+

No

+

Integer

+

Number of CUs selected for a job.

+

manager_cu_number

+

No

+

Integer

+

Number of CUs on the management node selected by the user for a job, which corresponds to the number of Flink job managers. The default value is 1.

+

parallel_number

+

No

+

Integer

+

Number of parallel operations selected for a job.

+

log_enabled

+

No

+

Boolean

+

Whether to enable the job log function.

+
  • true: indicates to enable the job log function.
  • false: indicates to disable the job log function.
  • Default value: false
+

obs_bucket

+

No

+

String

+

OBS bucket where users are authorized to save logs when log_enabled is set to true.

+

smn_topic

+

No

+

String

+

SMN topic. If a job fails, the system will send a message to users subscribed to the SMN topic.

+

main_class

+

No

+

String

+

Job entry class.

+

entrypoint_args

+

No

+

String

+

Job entry parameter. Multiple parameters are separated by spaces.

+

restart_when_exception

+

No

+

Boolean

+

Whether to enable the function of restart upon exceptions. The default value is false.

+

entrypoint

+

No

+

String

+

Name of the package that has been uploaded to the DLI resource management system. This parameter is used to customize the JAR file where the job main class is located.

+

dependency_jars

+

No

+

Array of Strings

+

Name of the package that has been uploaded to the DLI resource management system. This parameter is used to customize other dependency packages.

+

Example: myGroup/test.jar,myGroup/test1.jar.

+

dependency_files

+

No

+

Array of Strings

+

Name of the resource package that has been uploaded to the DLI resource management system. This parameter is used to customize dependency files.

+

Example: myGroup/test.cvs,myGroup/test1.csv.

+

You can add the following content to the application to access the corresponding dependency file: In the command, fileName indicates the name of the file to be accessed, and ClassName indicates the name of the class that needs to access the file.

+
ClassName.class.getClassLoader().getResource("userData/fileName")
+

tm_cus

+

No

+

Integer

+

Number of CUs for each TaskManager. The default value is 1.

+

tm_slot_num

+

No

+

Integer

+

Number of slots in each TaskManager. The default value is (parallel_number*tm_cus)/(cu_number-manager_cu_number).

+

resume_checkpoint

+

No

+

Boolean

+

Whether the abnormal restart is recovered from the checkpoint.

+

resume_max_num

+

No

+

Integer

+

Maximum number of retry times upon exceptions. The unit is times/hour. Value range: -1 or greater than 0. The default value is -1, indicating that the number of times is unlimited.

+

checkpoint_path

+

No

+

String

+

Storage address of the checkpoint in the JAR file of the user. The path must be unique.

+

tags

+

No

+

Array of Objects

+

Label of a Flink JAR job. For details, see Table 3.

+

runtime_config

+

No

+

String

+

Customizes optimization parameters when a Flink job is running.

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 3 tags parameter

Parameter

+

Mandatory

+

Type

+

Description

+

key

+

Yes

+

String

+

Tag key.

+

value

+

Yes

+

String

+

Tag key.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 4 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

Message content.

+

job

+

No

+

Object

+

Information about the job status. For details, see Table 5.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + +
Table 5 job parameters

Parameter

+

Mandatory

+

Type

+

Description

+

job_id

+

Yes

+

Long

+

Job ID.

+

status_name

+

No

+

String

+

Name of job status.

+

status_desc

+

No

+

String

+

Status description. Causes and suggestions for the abnormal status.

+
+
+
+

Example Request

{
+    "name": "test",
+    "desc": "job for test",
+    "queue_name": "testQueue",
+    "manager_cu_number": 1,
+    "cu_number": 2,
+    "parallel_number": 1,
+    "tm_cus": 1,
+    "tm_slot_num": 1,
+    "log_enabled": true,
+    "obs_bucket": "bucketName",
+    "smn_topic": "topic",
+    "main_class": "org.apache.flink.examples.streaming.JavaQueueStream",
+    "restart_when_exception": false,
+    "entrypoint": "javaQueueStream.jar",
+    "entrypoint_args":"-windowSize 2000 -rate 3",
+    "dependency_jars": [
+        "myGroup/test.jar",
+        "myGroup/test1.jar"
+    ],
+    "dependency_files": [
+        "myGroup/test.csv",
+        "myGroup/test1.csv"
+    ]
+}
+
+

Example Response

{
+  "is_success": true,
+  "message": "A Flink job is created successfully.",
+  "job": {
+    "job_id": 138,
+    "status_name": "job_init",
+    "status_desc": ""
+  }
+}
+
+

Status Codes

Table 6 describes status codes.

+ +
+ + + + + + + + + + +
Table 6 Status codes

Status Code

+

Description

+

200

+

The custom Flink job is created successfully.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0231.html b/docs/dli/api-ref/dli_02_0231.html new file mode 100644 index 00000000..8c787233 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0231.html @@ -0,0 +1,382 @@ + + +

Updating a Flink Jar Job

+

Function

This API is used to update custom jobs, which currently support the JAR format and run in dedicated queues.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Parameter description

Parameter

+

Mandatory

+

Type

+

Description

+

name

+

No

+

String

+

Name of the job. Length range: 0 to 57 characters.

+

desc

+

No

+

String

+

Job description. Length range: 0 to 512 characters.

+

queue_name

+

No

+

String

+

Name of a queue. Length range: 1 to 128 characters.

+

cu_number

+

No

+

Integer

+

Number of CUs selected for a job. The default value is 2.

+

manager_cu_number

+

No

+

Integer

+

Number of CUs on the management node selected by the user for a job, which corresponds to the number of Flink job managers. The default value is 1.

+

parallel_number

+

No

+

Integer

+

Number of parallel operations selected for a job. The default value is 1.

+

log_enabled

+

No

+

Boolean

+

Whether to enable the job log function.

+
  • true: indicates to enable the job log function.
  • false: indicates to disable the job log function.
  • Default value: false
+

obs_bucket

+

No

+

String

+

OBS path where users are authorized to save logs when log_enabled is set to true.

+

smn_topic

+

No

+

String

+

SMN topic. If a job fails, the system will send a message to users subscribed to the SMN topic.

+

main_class

+

No

+

String

+

Job entry class.

+

entrypoint_args

+

No

+

String

+

Job entry parameter. Multiple parameters are separated by spaces.

+

restart_when_exception

+

No

+

Boolean

+

Whether to enable the function of restart upon exceptions. The default value is false.

+

entrypoint

+

No

+

String

+

Name of the package that has been uploaded to the DLI resource management system. This parameter is used to customize the JAR file where the job main class is located.

+

dependency_jars

+

No

+

Array of Strings

+

Name of the package that has been uploaded to the DLI resource management system. This parameter is used to customize other dependency packages.

+

Example: myGroup/test.jar,myGroup/test1.jar.

+

dependency_files

+

No

+

Array of Strings

+

Name of the resource package that has been uploaded to the DLI resource management system. This parameter is used to customize dependency files.

+

Example: myGroup/test.cvs,myGroup/test1.csv.

+

tm_cus

+

No

+

Integer

+

Number of CUs for each TaskManager. The default value is 1.

+

tm_slot_num

+

No

+

Integer

+

Number of slots in each TaskManager. The default value is (parallel_number*tm_cus)/(cu_number-manager_cu_number).

+

resume_checkpoint

+

No

+

Boolean

+

Whether the abnormal restart is recovered from the checkpoint.

+

resume_max_num

+

No

+

Integer

+

Maximum number of retry times upon exceptions. The unit is times/hour. Value range: -1 or greater than 0. The default value is -1, indicating that the number of times is unlimited.

+

checkpoint_path

+

No

+

String

+

Storage address of the checkpoint in the JAR file of the user. The path must be unique.

+

runtime_config

+

No

+

String

+

Customizes optimization parameters when a Flink job is running.

+

job_type

+

No

+

String

+

Job types.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

Message content.

+

job

+

No

+

object

+

Information about job update. For details, see Table 4.

+
+
+ +
+ + + + + + + + + + + +
Table 4 job parameters

Parameter

+

Mandatory

+

Type

+

Description

+

update_time

+

No

+

Long

+

Time when a job is updated. The unit is millisecond.

+
+
+
+

Example Request

{
+    "name": "test1",
+    "desc": "job for test",
+    "job_type": "flink_jar_job",
+    "queue_name": "testQueue",
+    "manager_cu_number": 1,
+    "cu_number": 2,
+    "parallel_number": 1,
+    "log_enabled": false,
+    "main_class": "org.apache.flink.examples.streaming.JavaQueueStream",
+    "restart_when_exception": false,
+    "entrypoint": "FemaleInfoCollec.jar",
+    "dependency_jars": [
+        "myGroup/test.jar",
+        "myGroup/test1.jar"
+    ],
+    "dependency_files": [
+        "myGroup/test.csv",
+        "myGroup/test1.csv"
+    ]
+}
+
+

Example Response

{ 
+  "is_success": true,
+  "message": "The Flink job is updated successfully.",
+  "job": { 
+     "update_time": 1516952770835 
+  } 
+}
+
+

Status Codes

Table 5 describes status codes.

+ +
+ + + + + + + + + + +
Table 5 Status codes

Status Code

+

Description

+

200

+

The custom Flink job is updated successfully.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0233.html b/docs/dli/api-ref/dli_02_0233.html new file mode 100644 index 00000000..aa650de6 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0233.html @@ -0,0 +1,177 @@ + + +

Running Jobs in Batches

+

Function

This API is used to trigger batch job running.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

job_ids

+

Yes

+

Array of Long

+

Batch job ID. You can obtain the job ID by calling the API for creating a job or the API for querying a job.

+

resume_savepoint

+

No

+

Boolean

+

Whether to restore a job from the latest savepoint.

+
  • If resume_savepoint is set to true, the job is restored from the latest savepoint.
  • If resume_savepoint is set to false, the job is started normally, not from a specific savepoint.
+

The default value is false.

+
+
+
+

Response

+
+ + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

Array elements

+

No

+

Array of Objects

+

The response message returned is as follows: For details, see Table 4.

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 4 Array element parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

Message content.

+
+
+
+

Example Request

{
+    "job_ids": [131,130,138,137],
+    "resume_savepoint": true
+}
+
+

Example Response

[
+    {
+        "is_success": "true",
+        "message": "The request for submitting DLI jobs is delivered successfully."
+    },
+    {
+        "is_success": "true",
+        "message": "The request for submitting DLI jobs is delivered successfully."
+    },
+    {
+        "is_success": "true",
+        "message": "The request for submitting DLI jobs is delivered successfully."
+    },
+    {
+        "is_success": "true",
+        "message": "The request for submitting DLI jobs is delivered successfully."
+    }
+]
+
+

Status Codes

Table 5 describes status codes.

+ +
+ + + + + + + + + + +
Table 5 Status codes

Status Code

+

Description

+

200

+

Jobs are successfully run in batches.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0234.html b/docs/dli/api-ref/dli_02_0234.html new file mode 100644 index 00000000..149da331 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0234.html @@ -0,0 +1,699 @@ + + +

Querying the Job List

+

Function

This API is used to query the list of the current user's jobs. You can set the job ID as the ID and query jobs whose IDs are greater than or less than the ID. You can also query jobs in specific status, for example, in running status or other. By default, all jobs are queried.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

job_list

+

No

+

Object

+

Information about a job list. For details, see Table 4.

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 4 job_list parameters

Parameter

+

Mandatory

+

Type

+

Description

+

total_count

+

No

+

Integer

+

Number of records in the query result.

+

jobs

+

No

+

Array of Objects

+

Information about a job. For details, see Table 5.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 5 Jobs parameters

Parameter

+

Mandatory

+

Type

+

Description

+

job_id

+

No

+

Long

+

Job ID.

+

name

+

No

+

String

+

Name of the job. Length range: 0 to 57 characters.

+

desc

+

No

+

String

+

Job description. Length range: 0 to 512 characters.

+

user_name

+

No

+

String

+

Username. This parameter is valid only when show_detail is set to false.

+

job_type

+

No

+

String

+

Job type.

+
  • flink_sql_job: Flink SQL job
  • flink_jar_job: User-defined Flink job
+

status

+

No

+

String

+

Job status.

+

status_desc

+

No

+

String

+

Description of job status.

+

create_time

+

No

+

Long

+

Time when a job is created.

+

start_time

+

No

+

Long

+

Time when a job is started. The value 0 indicates that the process is not started.

+

duration

+

No

+

Long

+

Running duration of a job. Unit: ms. This parameter is valid only when show_detail is set to false.

+

root_id

+

No

+

Long

+

Parent job ID. This parameter is valid only when show_detail is set to false.

+

graph_editor_enabled

+

No

+

Boolean

+

Whether the flow diagram can be edited. Value true indicates that the flow diagram can be edited, and false indicates that the flow diagram cannot be edited.

+

has_savepoint

+

No

+

Boolean

+

Whether a job has a savepoint. Value true indicates that the job has a savepoint, and false indicates that the job does not have a savepoint.

+

user_id

+

No

+

String

+

ID of the user who creates the job. This parameter is valid only when show_detail is set to true.

+

project_id

+

No

+

String

+

ID of the project to which a job belongs. This parameter is valid only when show_detail is set to true.

+

sql_body

+

No

+

String

+

Stream SQL statement. This parameter is valid only when show_detail is set to false.

+

run_mode

+

No

+

String

+

Job running mode. The options are as follows: The value can be shared_cluster, exclusive_cluster, or edge_node. This parameter is valid only when show_detail is set to true.

+
  • shared_cluster: indicates that the job is running on a shared cluster.
  • exclusive_cluster: indicates that the job is running on an exclusive cluster.
  • edge_node: indicates that the job is running on an edge node.
+

job_config

+

No

+

Object

+

Job configuration. This parameter is valid only when show_detail is set to false. For details, see Table 6.

+

main_class

+

No

+

String

+

Main class of a JAR package. This parameter is valid only when show_detail is set to false.

+

entrypoint_args

+

No

+

String

+

Job running parameter of the JAR file. Multiple parameters are separated by spaces. This parameter is valid only when show_detail is set to true.

+

execution_graph

+

No

+

String

+

Job execution plan. This parameter is valid only when show_detail is set to false.

+

update_time

+

No

+

Long

+

Time when a job is updated. This parameter is valid only when show_detail is set to false.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 6 job_config parameters

Parameter

+

Mandatory

+

Type

+

Description

+

checkpoint_enabled

+

No

+

Boolean

+

Whether to enable the automatic job snapshot function.

+
  • true: The automatic job snapshot function is enabled.
  • false: The automatic job snapshot function is disabled.
+

The default value is false.

+

checkpoint_mode

+

No

+

String

+

Snapshot mode. There are two options:

+
  • exactly_once: indicates that data is processed only once.
  • at_least_once: indicates that data is processed at least once.
+

The default value is exactly_once.

+

checkpoint_interval

+

No

+

Integer

+

Snapshot interval. The unit is second. The default value is 10.

+

log_enabled

+

No

+

Boolean

+

Whether to enable the log storage function. The default value is false.

+

obs_bucket

+

No

+

String

+

Name of an OBS bucket.

+

smn_topic

+

No

+

String

+

SMN topic name. If a job fails, the system will send a message to users subscribed to the SMN topic.

+

root_id

+

No

+

Integer

+

Parent job ID.

+

edge_group_ids

+

No

+

Array of Strings

+

List of edge computing group IDs. Use commas (,) to separate multiple IDs.

+

manager_cu_number

+

No

+

Integer

+

Number of CUs of the management unit. The default value is 1.

+

cu_number

+

No

+

Integer

+

Number of CUs selected for a job. This parameter is valid only when show_detail is set to true.

+
  • Minimum value: 2
  • Maximum value: 400
+

The default value is 2.

+

parallel_number

+

No

+

Integer

+

Number of concurrent jobs set by a user. This parameter is valid only when show_detail is set to true.

+
  • Minimum value: 1
  • Maximum value: 2,000
+

The default value is 1.

+

restart_when_exception

+

No

+

Boolean

+

Whether to enable the function of restart upon exceptions.

+

idle_state_retention

+

No

+

Integer

+

Expiration time.

+

udf_jar_url

+

No

+

String

+

Name of the package that has been uploaded to the DLI resource management system. The UDF Jar file of the SQL job is uploaded through this parameter.

+

dirty_data_strategy

+

No

+

String

+

Dirty data policy of a job.

+
  • 2:obsDir: Save. obsDir specifies the path for storing dirty data.
  • 1: Trigger a job exception
  • 0: Ignore
+

entrypoint

+

No

+

String

+

Name of the package that has been uploaded to the DLI resource management system. This parameter is used to customize the JAR file where the job main class is located.

+

dependency_jars

+

No

+

Array of Strings

+

Name of the package that has been uploaded to the DLI resource management system. This parameter is used to customize other dependency packages.

+

dependency_files

+

No

+

Array of Strings

+

Name of the resource package that has been uploaded to the DLI resource management system. This parameter is used to customize dependency files.

+

executor_number

+

No

+

Integer

+

Number of compute nodes in a job.

+

executor_cu_number

+

No

+

Integer

+

Number of CUs in a compute node.

+

resume_checkpoint

+

No

+

Boolean

+

Whether to restore data from the latest checkpoint when the system automatically restarts upon an exception. The default value is false.

+
+
+
+

Example Request

None

+
+

Example Response

{
+    "is_success": "true",
+    "message": "Querying of the job list succeeds.",
+    "job_list": {
+        "total_count": 26,
+        "jobs": [
+            {
+                "job_id": 146,
+                "name": "aaaaa",
+                "desc": "",
+                "user_name": "",
+                "job_type": "flink_sql_job",
+                "status": "job_init",
+                "status_desc": "",
+                "create_time": 1578892414688,
+                "duration": 0,
+                "root_id": -1,
+                "graph_editor_enabled": false,
+                "has_savepoint": false
+            }
+        ]
+    }
+}
+
+

Status Codes

Table 7 describes the status code.

+ +
+ + + + + + + + + + +
Table 7 Status codes

Status Code

+

Description

+

200

+

Job list query succeeds.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0235.html b/docs/dli/api-ref/dli_02_0235.html new file mode 100644 index 00000000..c5830011 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0235.html @@ -0,0 +1,724 @@ + + +

Querying Job Details

+

Function

This API is used to query details of a job.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 2 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

job_detail

+

No

+

Object

+

Job details. For details, see Table 3.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 job_detail parameters

Parameter

+

Mandatory

+

Type

+

Description

+

job_id

+

No

+

Long

+

Job ID.

+

name

+

No

+

String

+

Name of the job. Length range: 0 to 57 characters.

+

desc

+

No

+

String

+

Job description. Length range: 0 to 512 characters.

+

job_type

+

No

+

String

+

Job type.

+
  • flink_sql_job: Flink SQL job
  • flink_jar_job: User-defined Flink job
+

status

+

No

+

String

+

Job status.

+

Available job statuses are as follows:

+
  • job_init: The job is in the draft status.
  • job_submitting: The job is being submitted.
  • job_submit_fail: The job fails to be submitted.
  • job_running: The job is running. (After the job is submitted, a normal result is returned.)
  • job_running_exception (The job stops running due to an exception.)
  • job_downloading: The job is being downloaded.
  • job_idle: The job is idle.
  • job_canceling: The job is being stopped.
  • job_cancel_success: The job has been stopped.
  • job_cancel_fail: The job fails to be stopped.
  • job_savepointing: The savepoint is being created.
  • job_finish: The job is completed.
+

status_desc

+

No

+

String

+

Description of job status.

+

create_time

+

No

+

Long

+

Time when a job is created.

+

start_time

+

No

+

Long

+

Time when a job is started.

+

user_id

+

No

+

String

+

ID of the user who creates the job.

+

queue_name

+

No

+

String

+

Name of a queue. Length range: 1 to 128 characters.

+

project_id

+

No

+

String

+

ID of the project to which a job belongs.

+

sql_body

+

No

+

String

+

Stream SQL statement.

+

savepoint_path

+

No

+

String

+

Path for storing manually generated checkpoints.

+

run_mode

+

No

+

String

+

Job running mode. The options are as follows:

+
  • shared_cluster: indicates that the job is running on a shared cluster.
  • exclusive_cluster: indicates that the job is running on an exclusive cluster.
  • edge_node: indicates that the job is running on an edge node.
+

job_config

+

No

+

Object

+

Job configurations. Refer to Table 4 for details.

+

main_class

+

No

+

String

+

Main class of a JAR package, for example, org.apache.spark.examples.streaming.JavaQueueStream.

+

entrypoint_args

+

No

+

String

+

Running parameter of a JAR package job. Multiple parameters are separated by spaces.

+

execution_graph

+

No

+

String

+

Job execution plan.

+

update_time

+

No

+

Long

+

Time when a job is updated.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4 job_config parameters

Parameter

+

Mandatory

+

Type

+

Description

+

checkpoint_enabled

+

No

+

Boolean

+

Whether to enable the automatic job snapshot function.

+
  • true: The automatic job snapshot function is enabled.
  • false: The automatic job snapshot function is disabled.
+

The default value is false.

+

checkpoint_interval

+

No

+

Integer

+

Snapshot interval. The unit is second. The default value is 10.

+

checkpoint_mode

+

No

+

String

+

Snapshot mode. There are two options:

+
  • exactly_once: indicates that data is processed only once.
  • at_least_once: indicates that data is processed at least once.
+

The default value is exactly_once.

+

log_enabled

+

No

+

Boolean

+

Whether to enable the log storage function. The default value is false.

+

obs_bucket

+

No

+

String

+

Name of an OBS bucket.

+

root_id

+

No

+

Integer

+

Parent job ID.

+

edge_group_ids

+

No

+

Array of Strings

+

List of edge computing group IDs. Use commas (,) to separate multiple IDs.

+

manager_cu_number

+

No

+

Integer

+

Number of CUs of the management unit. The default value is 1.

+

graph_editor_enabled

+

No

+

Boolean

+

Whether to enable flow diagram editing. The default value is false.

+

graph_editor_data

+

No

+

String

+

Data of flow diagram editing. The default value is null.

+

executor_number

+

No

+

Integer

+

Number of compute nodes in a job.

+

executor_cu_number

+

No

+

Integer

+

Number of CUs in a compute node.

+

cu_number

+

No

+

Integer

+

Number of CUs selected for a job. This parameter is valid only when show_detail is set to true.

+
  • Minimum value: 2
  • Maximum value: 400
+

The default value is 2.

+

parallel_number

+

No

+

Integer

+

Number of concurrent jobs set by a user. This parameter is valid only when show_detail is set to true.

+
  • Minimum value: 1
  • Maximum value: 2000
+

The default value is 1.

+

smn_topic

+

No

+

String

+

SMN topic name. If a job fails, the system will send a message to users subscribed to this SMN topic.

+

restart_when_exception

+

No

+

Boolean

+

Whether to enable the function of restart upon exceptions.

+

resume_checkpoint

+

No

+

Boolean

+

Whether to restore data from the latest checkpoint when the system automatically restarts upon an exception. The default value is false.

+

resume_max_num

+

No

+

Integer

+

Maximum retry attempts. –1 indicates there is no upper limit.

+

checkpoint_path

+

No

+

String

+

Path for saving the checkpoint.

+

idle_state_retention

+

No

+

Integer

+

Expiration time.

+

config_url

+

No

+

String

+

OBS path of the config package uploaded by the user.

+

udf_jar_url

+

No

+

String

+

Name of the package that has been uploaded to the DLI resource management system. The UDF Jar file of the SQL job is uploaded through this parameter.

+

dirty_data_strategy

+

No

+

String

+

Dirty data policy of a job.

+
  • 2:obsDir: Save. obsDir specifies the path for storing dirty data.
  • 1: Trigger a job exception
  • 0: Ignore
+

entrypoint

+

No

+

String

+

Name of the package that has been uploaded to the DLI resource management system. This parameter is used to customize the JAR file where the job main class is located.

+

dependency_jars

+

No

+

Array of Strings

+

Name of the package that has been uploaded to the DLI resource management system. This parameter is used to customize other dependency packages.

+

dependency_files

+

No

+

Array of Strings

+

Name of the resource package that has been uploaded to the DLI resource management system. This parameter is used to customize dependency files.

+

tm_cus

+

No

+

int

+

Number of CUs per TaskManager node.

+

tm_slot_num

+

No

+

int

+

Number of slots per TaskManager node.

+

operator_config

+

No

+

String

+

Operator's parallelism degree. The operator ID and degree of parallelism are displayed in JSON format.

+

static_estimator_config

+

No

+

String

+

Estimation of static flow diagram resources.

+

runtime_config

+

No

+

String

+

Customizes optimization parameters when a Flink job is running.

+
+
+
+

Example Request

None

+
+

Example Response

+
+

Status Codes

Table 5 describes the status code.

+ +
+ + + + + + + + + + +
Table 5 Status codes

Status Code

+

Description

+

200

+

Querying details of a job succeeds.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0236.html b/docs/dli/api-ref/dli_02_0236.html new file mode 100644 index 00000000..db215ad4 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0236.html @@ -0,0 +1,207 @@ + + +

Querying the Job Execution Plan

+

Function

This API is used to query a job execution plan.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 2 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successful.

+

message

+

No

+

String

+

Message content.

+

execute_graph

+

No

+

Object

+

Response parameter for querying a job plan. For details, see Table 3.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 execute_graph parameters

Parameter

+

Mandatory

+

Type

+

Description

+

jid

+

No

+

String

+

ID of a Flink job.

+

name

+

No

+

String

+

Name of a Flink job.

+

isStoppable

+

No

+

Boolean

+

Whether a job can be stopped.

+

state

+

No

+

String

+

Execution status of a job.

+

start-time

+

No

+

Long

+

Time when a job is started.

+

end-time

+

No

+

Long

+

Time when a job is stopped.

+

duration

+

No

+

Long

+

Running duration of a job.

+
+
+
+

Example Request

None

+
+

Example Response

{
+    "is_success": "true",
+  "message": "Querying the job execution graph succeeds.",
+    "execute_graph": {
+        "jid": "4e966f43f2c90b0e1bf3188ecf55504b",
+        "name": "",
+        "isStoppable": false,
+        "state": "RUNNING",
+        "start-time": 1578904488436,
+        "end-time": -1,
+        "duration": 516274
+    }
+}
+
+

Status Codes

+
+ + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

Querying the job execution plan succeeds.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0241.html b/docs/dli/api-ref/dli_02_0241.html new file mode 100644 index 00000000..3e39ab1e --- /dev/null +++ b/docs/dli/api-ref/dli_02_0241.html @@ -0,0 +1,160 @@ + + +

Stopping Jobs in Batches

+

Function

This API is used to stop running jobs in batches.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

job_ids

+

Yes

+

Array of Long

+

Job ID.

+

trigger_savepoint

+

No

+

Boolean

+

Whether to create a savepoint for a job to store the job status information before stopping it. The data type is Boolean.

+
  • If this parameter is set to true, a savepoint is created.
  • If this parameter is set to false, no savepoint is created. The default value is false.
+
+
+
+

Response

+
+ + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

Array elements

+

No

+

Array of Objects

+

The response message returned is as follows: For details, see Table 4.

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 4 Array element parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

Message content

+
+
+
+

Example Request

{
+  "job_ids": [128, 137],
+  "trigger_savepoint": false
+}
+
+

Example Response

[{"is_success":"true",
+"message": "The request for stopping DLI jobs is delivered successfully."}]
+
+

Status Codes

Table 5 describes status codes.

+ +
+ + + + + + + + + + +
Table 5 Status codes

Status Code

+

Description

+

200

+

The request of stopping a job is sent successfully.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0242.html b/docs/dli/api-ref/dli_02_0242.html new file mode 100644 index 00000000..c0b3adda --- /dev/null +++ b/docs/dli/api-ref/dli_02_0242.html @@ -0,0 +1,115 @@ + + +

Deleting a Job

+

Function

This API is used to delete a Flink job at any state.

+

The job records will not be deleted.

+
+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + +
Table 2 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+
+
+
+

Example Request

None

+
+

Example Response

{
+    "is_success": "true",
+    "message": "The job is deleted successfully.",
+}
+
+

Status Code

Table 3 describes status codes.

+ +
+ + + + + + + + + + +
Table 3 Status codes

Status Code

+

Description

+

200

+

The job is deleted successfully.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0243.html b/docs/dli/api-ref/dli_02_0243.html new file mode 100644 index 00000000..2ae9e3d0 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0243.html @@ -0,0 +1,128 @@ + + +

Deleting Jobs in Batches

+

Function

This API is used to batch delete jobs at any state.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

job_ids

+

Yes

+

[Long]

+

Job ID.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+
+
+
+

Example Request

{
+  "job_ids":[12,232]
+}
+
+

Example Response

[{
+    "is_success": "true",
+    "message": "The job is deleted successfully.",
+}]
+
+

Status Codes

Table 4 describes status codes.

+ +
+ + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

The job is deleted successfully.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0244.html b/docs/dli/api-ref/dli_02_0244.html new file mode 100644 index 00000000..a5df3bf7 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0244.html @@ -0,0 +1,18 @@ + + +

APIs Related to Flink Job Templates

+

+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0245.html b/docs/dli/api-ref/dli_02_0245.html new file mode 100644 index 00000000..358cbb9c --- /dev/null +++ b/docs/dli/api-ref/dli_02_0245.html @@ -0,0 +1,273 @@ + + +

Creating a Template

+

Function

This API is used to create a user template for the DLI service. A maximum of 100 user templates can be created.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

name

+

Yes

+

String

+

Template name. The value can contain 1 to 64 characters.

+

desc

+

No

+

String

+

Template description. Length range: 0 to 512 characters.

+

sql_body

+

No

+

String

+

Stream SQL statement, which includes at least the following three parts: source, query, and sink. Length range: 0 to 2,048 characters.

+

tags

+

No

+

Array of Objects

+

Label of a Flink job template. For details, see Table 3.

+

job_type

+

No

+

String

+

Flink job template type. The default value is flink_sql_job. You can set this parameter to flink_sql_job or flink_opensource_sql_job only.

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 3 tags parameter

Parameter

+

Mandatory

+

Type

+

Description

+

key

+

Yes

+

String

+

Tag key.

+

value

+

Yes

+

String

+

Tag key.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 4 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successful.

+

message

+

No

+

String

+

Message content.

+

template

+

No

+

Object

+

Information about job update. For details, see Table 5.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 5 template parameters

Parameter

+

Mandatory

+

Type

+

Description

+

template_id

+

No

+

Long

+

Template ID.

+

name

+

No

+

String

+

Template name.

+

desc

+

No

+

String

+

Template description.

+

create_time

+

No

+

Long

+

Time when the template is created.

+

job_type

+

No

+

String

+

Job template type

+
+
+
+

Example Request

{
+    "name": "simple_stream_sql",
+    "desc": "Example of quick start",
+    "sql_body": "select * from source_table"
+}
+
+

Example Response

{
+    "is_success": true,
+    "message": "A template is created successfully.",
+    "template": {
+        "template_id": 0,
+        "name": "IoT_example",
+       "desc": "Example of quick start",
+        "create_time": 1516952710040,
+        "job_type": "flink_sql_job"
+    }
+}
+
+

Status Codes

Table 6 describes status codes.

+ +
+ + + + + + + + + + +
Table 6 Status codes

Status Code

+

Description

+

200

+

A template is created successfully.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0246.html b/docs/dli/api-ref/dli_02_0246.html new file mode 100644 index 00000000..fea57bcf --- /dev/null +++ b/docs/dli/api-ref/dli_02_0246.html @@ -0,0 +1,157 @@ + + +

Updating a Template

+

Function

This API is used to update existing templates in DLI.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

name

+

No

+

String

+

Template name. Length range: 0 to 57 characters.

+

desc

+

No

+

String

+

Template description. Length range: 0 to 512 characters.

+

sql_body

+

No

+

String

+

Stream SQL statement, which includes at least the following three parts: source, query, and sink. Length range: 0 to 1024 x 1024 characters.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+
+
+
+

Example Request

{
+    "name": "simple_stream_sql",
+    "desc": "Example of quick start",
+    "sql_body": "select * from source_table"
+}
+
+

Example Response

{
+    "is_success": "true",
+    "message": "The template is updated successfully.",
+}
+
+

Status Codes

Table 4 describes status codes.

+ +
+ + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

A template is updated successfully.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0247.html b/docs/dli/api-ref/dli_02_0247.html new file mode 100644 index 00000000..9e0d4d91 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0247.html @@ -0,0 +1,149 @@ + + +

Deleting a Template

+

Function

This API is used to delete a template. A template used by jobs can also be deleted.

+
+

URI

+
+

Request

None

+
+

Response

+
+

Example Request

None

+
+

Example Response

{
+    "is_success": "true",
+    "message": "The template is deleted successfully.",
+    "template": {
+        "template_id": 2
+    }
+}
+
+

Status Codes

Table 4 describes status codes.

+ +
+ + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

A template is deleted successfully.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0248.html b/docs/dli/api-ref/dli_02_0248.html new file mode 100644 index 00000000..0117d25d --- /dev/null +++ b/docs/dli/api-ref/dli_02_0248.html @@ -0,0 +1,305 @@ + + +

Querying the Template List

+

Function

This API is used to query the job template list. Currently, only custom templates can be queried.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successful.

+

message

+

No

+

String

+

Message content.

+

template_list

+

No

+

Object

+

Information about the template list. For details, see Table 4.

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 4 template_list parameters

Parameter

+

Mandatory

+

Type

+

Description

+

total_count

+

No

+

Integer

+

Total number of templates.

+

templates

+

No

+

Array of Objects

+

Detailed information about a template. For details, see Table 5.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 5 templates parameters

Parameter

+

Mandatory

+

Type

+

Description

+

template_id

+

No

+

Integer

+

Template ID.

+

name

+

No

+

String

+

Template name.

+

desc

+

No

+

String

+

Template description.

+

create_time

+

No

+

Long

+

Time when the template is created.

+

update_time

+

No

+

Long

+

Time when the template is updated.

+

sql_body

+

No

+

String

+

Stream SQL statement. Contains at least the source, query, and sink parts.

+

job_type

+

No

+

String

+

Job template type.

+
+
+
+

Example Request

None

+
+

Example Response

{
+    "is_success": "true",
+    "message": "The template list is obtained successfully.",
+    "template_list": {
+        "total_count": 2,
+        "templates": [
+            {
+                "template_id": 2,
+                "name": "updatetest",
+                 "desc": "Example of quick start",
+                "create_time": 1578748092000,
+                "update_time": 1578748092000,
+                "sql_body": "select * from source_table",
+                "job_type": "flink_sql_job"
+            },
+            {
+                "template_id": 1,
+                "name": "we",
+                "desc": "qwe",
+                "create_time": 1577951045000,
+                "update_time": 1577951045000,
+                "sql_body": ""
+            }
+        ]
+    }
+}
+
+

Status Codes

Table 6 describes status codes.

+ +
+ + + + + + + + + + +
Table 6 Status codes

Status Code

+

Description

+

200

+

Template list query succeeds.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0249.html b/docs/dli/api-ref/dli_02_0249.html new file mode 100644 index 00000000..97f94d2f --- /dev/null +++ b/docs/dli/api-ref/dli_02_0249.html @@ -0,0 +1,228 @@ + + +

Restarting, Scaling Out, and Scaling In Queues

+

Function

This API is used to restart, scale out, and scale in queues.

+

Only SQL queues in the Available status can be restarted. (The queue status is Available only after the SQL job is successfully executed.)

+
+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

action

+

Yes

+

String

+

Operations to be performed:

+
  • restart: Restart a service. Only queues for SQL jobs can be restarted.
  • scale_out: Scale out the queue
  • scale_in: Scale in the queue
+
NOTE:

Currently, only restart, scale_out, and scale_in operations are supported.

+
+

force

+

No

+

Boolean

+

Specifies whether to forcibly restart the queue. This parameter is optional when action is set to restart. The default value is false.

+

cu_count

+

No

+

Integer

+

Number of CUs to be scaled in or out. This parameter is optional when action is set to scale_out or scale_in. The value of cu_count must be a multiple of 16.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

job_id

+

No

+

String

+

Specifies the job ID returned when force is set to true.

+

queue_name

+

No

+

String

+

Name of the queue to be scaled in or out.

+

result

+

No

+

Boolean

+

Indicates the scaling result.

+
+
+
+

Example Request

+
+

Example Response

+
+

Status Codes

Table 4 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

The operation is successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+ +
+ + + + + + + + + + +
Table 5 Error codes

Error Code

+

Error Message

+

DLI.0015

+

Token info for token is null, return.

+

DLI.0013

+

X-Auth-Token is not defined in request. It is mandatory. Please define and send the request.

+
+
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0250.html b/docs/dli/api-ref/dli_02_0250.html new file mode 100644 index 00000000..c68a464a --- /dev/null +++ b/docs/dli/api-ref/dli_02_0250.html @@ -0,0 +1,345 @@ + + +

Obtaining the Partition List

+

Function

This API is used to obtain the partition list.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

partitions

+

No

+

Object

+

Partition information. For details, see Table 4.

+
+
+ +
+ + + + + + + + + + + + + + + + +
Table 4 partitions parameter description

Parameter

+

Mandatory

+

Type

+

Description

+

total_count

+

Yes

+

Long

+

Total number of partitions.

+

partition_infos

+

Yes

+

Array of Objects

+

List of partitions. For details, see Table 5.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 5 partition_infos parameter description

Parameter

+

Mandatory

+

Type

+

Description

+

partition_name

+

Yes

+

String

+

Partition name.

+

create_time

+

Yes

+

Long

+

Time when a partition is created.

+

last_access_time

+

Yes

+

Long

+

Last update time.

+

locations

+

No

+

Array of Strings

+

Path. This parameter is displayed only for non-DLI tables.

+

last_ddl_time

+

No

+

Long

+

Execution time of the last DDL statement, in seconds.

+

num_rows

+

No

+

Long

+

Total rows in the partition.

+

num_files

+

No

+

Long

+

Number of files in a partition.

+

total_size

+

No

+

Long

+

Total size of data in the partition, in bytes.

+
+
+
+

Example Request

None

+
+

Example Response

{
+    "is_success": true,
+    "message": "list partitions succeed",
+    "partitions": {
+        "total_count": 5,
+        "partition_infos": [
+            {
+                "partition_name": "name=test",
+                "create_time": 1579520179000,
+                "last_access_time": 1579520179000,
+                "locations": [
+                    "obs://test/partition"
+                ]
+            },
+            {
+                "partition_name": "name=test1",
+                "create_time": 1579521406000,
+                "last_access_time": 1579521406000,
+                "locations": [
+                    "obs://test/partition"
+                ]
+            },
+            {
+                "partition_name": "name=test2",
+                "create_time": 1579521884000,
+                "last_access_time": 1579521884000,
+                "locations": [
+                    "obs://test/partition"
+                ]
+            },
+            {
+                "partition_name": "name=test3",
+                "create_time": 1579522085000,
+                "last_access_time": 1579522085000,
+                "locations": [
+                    "obs://test/partition"
+                ]
+            },
+            {
+                "partition_name": "name=name1/age=age1",
+                "create_time": 1581409182000,
+                "last_access_time": 1581409182000,
+                "locations": [
+                    "obs://test/0117"
+                ],
+                "last_ddl_time": 1581409182,
+                "total_size": 2130,
+                "num_rows": -1,
+                "num_files": 2
+            }
+        ]
+    }
+}
+
+

Status Codes

Table 6 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 6 Status codes

Status Code

+

Description

+

200

+

The operation is successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0252.html b/docs/dli/api-ref/dli_02_0252.html new file mode 100644 index 00000000..261abea5 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0252.html @@ -0,0 +1,268 @@ + + +

Viewing the Granted Permissions of a User

+

Function

This API is used to view the permissions granted to a user.

+
+

URI

+
+ + +
+ + + + + + + + + + + +
Table 1 URI parameters

Parameter

+

Mandatory

+

Type

+

Description

+

project_id

+

Yes

+

String

+

Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + +
Table 2 query parameter description

Parameter

+

Mandatory

+

Type

+

Description

+

object

+

Yes

+

String

+

Data object to be assigned, which corresponds to the object in API permission assignment.

+
  • jobs.flink.Fink job ID, data in the specified job will be queried.
  • groups. Package group name, data in the specified package group will be queried.
  • resources.Package name, data in the specified package will be queried.
    NOTE:

    When you view the packages in a group, the object format is resources.package group name/package name.

    +
    +
+

offset

+

No

+

Integer

+

Specifies the offset of the page-based query.

+

limit

+

No

+

Integer

+

Number of records to be displayed of the page-based query.

+
+
+

The following is an example of the URL containing the query parameter:

+

GET /v1.0/{project_id}/authorization/privileges?object={object}

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

Yes

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

Yes

+

String

+

Indicates the system prompt. If execution succeeds, this parameter may be left blank.

+

object_name

+

Yes

+

String

+

Object name.

+

object_type

+

Yes

+

String

+

Object type.

+

privileges

+

No

+

Array of Object

+

Permission information. For details, see Table 4.

+

count

+

No

+

Integer

+

Total number of permissions.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + +
Table 4 privileges parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_admin

+

No

+

Boolean

+

Whether the database user is an administrator.

+

user_name

+

No

+

String

+

Name of the user who has permission on the current database.

+

privileges

+

No

+

Array of Strings

+

Permission of the user on the database.

+
+
+
+

Example Request

None

+
+

Example Response

{
+    "is_success": true,
+    "message": "",
+    "object_name": "9561",
+    "object_type": "flink",
+    "count": 2,
+    "privileges": [
+        {
+            "user_name": "testuser1",
+            "is_admin": true,
+            "privileges": [
+                "ALL"
+            ]
+        },
+        {
+            "user_name": "user1",
+            "is_admin": false,
+            "privileges": [
+                "GET"
+            ]
+        }
+    ]
+}
+
+

Status Codes

Table 5 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 5 Status codes

Status Code

+

Description

+

200

+

Authorization succeeds.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+ +
+ + + + + + + +
Table 6 Error codes

Error Code

+

Error Message

+

DLI.0001

+

user input validation failed, object_type sql or saprk is not supported now

+
+
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0253.html b/docs/dli/api-ref/dli_02_0253.html new file mode 100644 index 00000000..db937421 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0253.html @@ -0,0 +1,166 @@ + + +

Changing the Owner of a Group or Resource Package

+

Function

This API is used to change the owner of a program package.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

new_owner

+

Yes

+

String

+

New username. The name contains 5 to 32 characters, including only digits, letters, underscores (_), and hyphens (-). It cannot start with a digit.

+

group_name

+

Yes

+

String

+

Group name. The name contains a maximum of 64 characters. Only digits, letters, periods (.), underscores (_), and hyphens (-) are allowed.

+

resource_name

+

No

+

String

+

Package name. The name can contain only digits, letters, underscores (_), exclamation marks (!), hyphens (-), and periods (.), but cannot start with a period. The length (including the file name extension) cannot exceed 128 characters.

+

This parameter is mandatory if you want to change the owner of a resource package in a group.

+
+
+

group_name and resource_name can be used independently or together.

+
  • To change the owner of a group, use group_name.
  • To change the owner of a resource package, use resource_name.
  • To change the owner of a resource package in a group, use group_name and resource_name at the same time.
+
+
+

Response

+
+ + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+
+
+
+

Example Request

{
+    "new_owner": "scuser1",
+    "group_name": "groupName"
+}
+
+

Example Response

{
+    "is_success": "true",
+    "message": ""
+}
+
+

Status Codes

Table 4 describes the status code.

+ +
+ + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

The modification operations are successful.

+

404

+

Request error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+ +
+ + + + + + + +
Table 5 Error codes

Error Code

+

Error Message

+

DLI.0002

+

No such user. userName:ssssss.

+
+
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0254.html b/docs/dli/api-ref/dli_02_0254.html new file mode 100644 index 00000000..f19eb0bc --- /dev/null +++ b/docs/dli/api-ref/dli_02_0254.html @@ -0,0 +1,160 @@ + + +

Exporting a Flink Job

+

Function

This API is used to export Flink job data.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

obs_dir

+

Yes

+

String

+

OBS path for storing exported job files.

+

is_selected

+

Yes

+

Boolean

+

Whether to export a specified job.

+

job_selected

+

No

+

Array of Longs

+

This parameter indicates the ID set of jobs to be exported if is_selected is set to true.

+
NOTE:

This parameter is mandatory when is_selected is set to true.

+
+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

zip_file

+

No

+

Array of Strings

+

Name of the ZIP package containing exported jobs. The ZIP package is stored on OBS.

+
+
+
+

Example Request

{
+    "obs_dir": "obs-test",
+    "is_selected": true,
+    "job_selected": [100]
+}
+
+

Example Response

{
+    "is_success": true,
+    "message": "The job is exported successfully.",
+    "zip_file": ["obs-test/aggregate_1582677879475.zip"]
+}
+
+

Status Codes

Table 4 describes status codes.

+ +
+ + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

The job is exported successfully.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0255.html b/docs/dli/api-ref/dli_02_0255.html new file mode 100644 index 00000000..e6f3a5fa --- /dev/null +++ b/docs/dli/api-ref/dli_02_0255.html @@ -0,0 +1,197 @@ + + +

Importing a Flink Job

+

Function

This API is used to import Flink job data.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

zip_file

+

Yes

+

String

+

Path of the job ZIP file imported from OBS. You can enter a folder path to import all ZIP files in the folder.

+
NOTE:

The folder can contain only .zip files.

+
+

is_cover

+

No

+

Boolean

+

Whether to overwrite an existing job if the name of the imported job is the same as that of the existing job in the service.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

job_mapping

+

No

+

Array of Objects

+

Information about the imported job. For details, see Table 4.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + +
Table 4 job_mapping parameter description

Parameter

+

Mandatory

+

Type

+

Description

+

old_job_id

+

No

+

Long

+

ID of a job before being imported.

+

new_job_id

+

No

+

Long

+

ID of a job after being imported. If is_cover is set to false and a job with the same name exists in the service, the returned value of this parameter is -1.

+

remark

+

No

+

String

+

Results about an imported job.

+
+
+
+

Example Request

{
+    "zip_file": "test/ggregate_1582677879475.zip",
+    "is_cover": true
+}
+
+

Example Response

{
+    "is_success": true,
+    "message": "The job is imported successfully.",
+    "job_mapping": [
+        {
+            "old_job_id": "100",
+            "new_job_id": "200",
+            "remark": "Job successfully created"
+        }
+    ]
+}
+
+

Status Codes

Table 5 describes status codes.

+ +
+ + + + + + + + + + +
Table 5 Status codes

Status Code

+

Description

+

200

+

The job is imported successfully.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0256.html b/docs/dli/api-ref/dli_02_0256.html new file mode 100644 index 00000000..177d8bc0 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0256.html @@ -0,0 +1,195 @@ + + +

Querying Authorization of an Enhanced Datasource Connection

+

Function

This API is used to query the authorization about an enhanced datasource connection.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

connection_id

+

No

+

String

+

Enhanced datasource connection ID, which is used to identify the UUID of a datasource connection.

+

privileges

+

No

+

Array of Object

+

Datasource connection information about each authorized project. For details, see Table 3.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + +
Table 3 privileges parameters

Parameter

+

Mandatory

+

Type

+

Description

+

object

+

No

+

String

+

Object information during authorization.

+

applicant_project_id

+

No

+

String

+

ID of an authorized project.

+

privileges

+

No

+

Array of Strings

+

Authorization operation information.

+
+
+
+

Example Request

None

+
+

Example Response

{
+    "is_success": true,
+    "message": "",
+    "privileges": [
+        {
+            "object": "edsconnections.503fc86a-5e60-4349-92c2-7e399404fa8a",
+            "applicant_project_id": "330e068af1334c9782f4226acc00a2e2",
+            "privileges": ["BIND_QUEUE"]
+        }
+    ],
+    "connection_id": "503fc86a-5e60-4349-92c2-7e399404fa8a"
+}
+
+

Status Codes

Table 4 describes status codes.

+ +
+ + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

The query is successful.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+ +
+ + + + + + + +
Table 5 Error codes

Error Code

+

Error Message

+

DLI.0001

+

Connection 503fc86a-5e60-4349-92c2-7e399404fa8a does not exist.

+
+
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0257.html b/docs/dli/api-ref/dli_02_0257.html new file mode 100644 index 00000000..97ecb72a --- /dev/null +++ b/docs/dli/api-ref/dli_02_0257.html @@ -0,0 +1,21 @@ + + + +

Global Variable-related APIs

+ +

+
+ +
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0258.html b/docs/dli/api-ref/dli_02_0258.html new file mode 100644 index 00000000..83eaaec1 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0258.html @@ -0,0 +1,168 @@ + + +

Creating a Global Variable

+

Function

This API is used to create a global variable.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

var_name

+

Yes

+

String

+

A global variable name can contain a maximum of 128 characters, including only digits, letters, and underscores (_), but cannot start with an underscore (_) or contain only digits.

+

var_value

+

Yes

+

String

+

Global variable value.

+

is_sensitive

+

No

+

Boolean

+

Whether to set a variable as a sensitive variable. The default value is false.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

Message content.

+
+
+
+

Example Request

{
+    "var_name": "string",
+    "var_value": "string",
+    "is_sensitive": true
+}
+
+

Example Response

{
+    "is_success": true,
+    "message": "string"
+}
+
+

Status Codes

Table 4 describes status codes.

+ +
+ + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

A variable is created successfully.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+ +
+ + + + + + + + + + +
Table 5 Error codes

Error Code

+

Error Message

+

DLI.0001

+

Parameter check errors occur.

+

DLI.0999

+

The object exists.

+
+
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0259.html b/docs/dli/api-ref/dli_02_0259.html new file mode 100644 index 00000000..2cb2db44 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0259.html @@ -0,0 +1,136 @@ + + +

Deleting a Global Variable

+

Function

This API is used to delete a global variable.

+

Only the user who creates a global variable can delete the variable.

+
+
+

URI

+
+

Request

None

+
+

Response

+
+

Example Request

None

+
+

Example Response

{
+    "is_success": true,
+    "message": "string"
+}
+
+

Status Codes

Table 3 describes status codes.

+ +
+ + + + + + + + + + +
Table 3 Status codes

Status Code

+

Description

+

200

+

A variable is deleted successfully.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+ +
+ + + + + + + + + + +
Table 4 Error codes

Error Code

+

Error Message

+

DLI.0001

+

Parameter check errors occur.

+

DLI.0999

+

Server-side errors occur.

+
+
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0260.html b/docs/dli/api-ref/dli_02_0260.html new file mode 100644 index 00000000..cb7d8268 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0260.html @@ -0,0 +1,162 @@ + + +

Modifying a Global Variable

+

Function

This API is used to modify a global variable.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

var_value

+

Yes

+

String

+

Global variable value.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

Message content.

+
+
+
+

Example Request

{
+    "var_value": "string"
+}
+
+

Example Response

{
+    "is_success": true,
+    "message": "string"
+}
+
+

Status Codes

Table 4 describes status codes.

+ +
+ + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

A variable is modified successfully.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+ +
+ + + + + + + + + + + + + +
Table 5 Error codes

Error Code

+

Error Message

+

DLI.0001

+

Parameter check errors occur.

+

DLI.0999

+

Server-side errors occur.

+

DLI.12004

+

The job does not exist. Check the reason or create a job.

+
+
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0261.html b/docs/dli/api-ref/dli_02_0261.html new file mode 100644 index 00000000..85d9d395 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0261.html @@ -0,0 +1,278 @@ + + +

Querying All Global Variables

+

Function

This API is used to query information about all global variables in the current project.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

count

+

No

+

Integer

+

Number of global variables.

+

global_vars

+

No

+

Array of Objects

+

Global variable information. For details, see Table 4.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 4 global_vars parameters

Parameter

+

Mandatory

+

Type

+

Description

+

id

+

No

+

Long

+

Global variable ID.

+

var_name

+

Yes

+

String

+

Global variable name.

+

var_value

+

Yes

+

String

+

Global variable value.

+

project_id

+

No

+

String

+

Project ID.

+

user_id

+

No

+

String

+

User ID.

+

user_name

+

No

+

String

+

Username

+

is_sensitive

+

No

+

Boolean

+

Whether to set a variable as a sensitive variable.

+

create_time

+

No

+

Long

+

Creation time

+

update_time

+

No

+

Long

+

Update time

+
+
+
+

Example Request

None

+
+

Example Response

{
+    "is_success": true,
+    "message": "string",
+    "count": 0,
+    "global_vars": [
+        {
+            "id": 0,
+            "var_name": "string",
+            "var_value": "string",
+            "project_id": "string",
+            "user_id": "string"
+        }
+    ]
+}
+
+

Status Codes

+
+ + + + + + + + + + +
Table 5 Status codes

Status Code

+

Description

+

200

+

All variables are queried successfully.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+ +
+ + + + + + + + + + +
Table 6 Error codes

Error Code

+

Error Message

+

DLI.0001

+

Parameter check errors occur.

+

DLI.0999

+

Server-side errors occur.

+
+
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0284.html b/docs/dli/api-ref/dli_02_0284.html new file mode 100644 index 00000000..3a9c2dcc --- /dev/null +++ b/docs/dli/api-ref/dli_02_0284.html @@ -0,0 +1,152 @@ + + +

Creating an Address Connectivity Test Request

+

Function

This API is used to send an address connectivity test request to a specified queue and insert the test address into the table.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

address

+

Yes

+

String

+

Test address. The format is IP address or domain name:port.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

Yes

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

Yes

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

task_id

+

Yes

+

String

+

Request ID

+
+
+
+

Example Request

{
+    "address": "iam.xxx.com:443"
+}
+
+

Example Response

{
+    "is_success": true,
+    "message": "check connectivity to address:iam.xxx.com with port: 443 successfully",
+    "task_id": 9
+}
+
+

Status Codes

Table 4 describes status codes.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

The job is created successfully.

+

400

+

Request failure.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0285.html b/docs/dli/api-ref/dli_02_0285.html new file mode 100644 index 00000000..e4b536aa --- /dev/null +++ b/docs/dli/api-ref/dli_02_0285.html @@ -0,0 +1,137 @@ + + +

Querying Connectivity Test Details of a Specified Address

+

Function

This API is used to query the connectivity test result after the test is submitted.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 2 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

Yes

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

Yes

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

connectivity

+

Yes

+

String

+

Indicates the connectivity test result.

+
+
+
+

Example Request

None

+
+

Example Response

{
+    "is_success": true,
+    "message": "Get node connectivity status successfully for addressId:9",
+    "connectivity": "REACHABLE"
+}
+
+

Status Codes

Table 3 describes status codes.

+ +
+ + + + + + + + + + + + + +
Table 3 Status codes

Status Code

+

Description

+

200

+

The query is successful.

+

400

+

Request failure.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0291.html b/docs/dli/api-ref/dli_02_0291.html new file mode 100644 index 00000000..4add05b1 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0291.html @@ -0,0 +1,233 @@ + + +

Creating a Scheduled CU Change

+

Function

This API is used to create a scheduled CU change, that is, to create a scheduled CU change for a specified queue.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

plan_name

+

Yes

+

String

+

Name of a CU change. The name can contain only digits, letters, and underscores (_), but cannot contain only digits or start with an underscore (_).

+

target_cu

+

Yes

+

Integer

+

Target value of the CU in the scheduled CU change.

+

start_hour

+

Yes

+

Integer

+

Specifies the start hour of the scheduled CU change.

+

start_minute

+

Yes

+

Integer

+

Specifies the start minute of a scheduled CU change.

+

repeat_day

+

Yes

+

Array of strings

+

Specifies the repetition period of a scheduled CU change. You can select one or more days from Monday to Sunday, or do not select any day. If this parameter is not specified, the scheduled CU change will be executed at the time specified by start_hour: start_minute after the current time. Example:

+
"repeat_day": ["MONDAY", "TUESDAY", "WEDNESDAY","SUNDAY"]
+

valid_date_begin

+

No

+

Long

+

Start time of the validity period (13-digit timestamp)

+

valid_date_end

+

No

+

Long

+

End time of the validity period (13-digit timestamp)

+

activate

+

No

+

Boolean

+

Indicates whether the scheduled CU change is activated. The default value is true, indicating that the change is activated.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+
+
+
+

Example Request

{
+    "plan_name": "plan_A",
+    "target_cu": 64,
+    "start_hour": 20,
+    "start_minute": 30,
+    "repeat_day": [
+        "MONDAY",
+        "TUESDAY",
+        "WEDNESDAY",
+        "SUNDAY"
+    ],
+    "valid_date_begin": 1590949800000,
+    "valid_date_end": 1591727400000,
+    "activate": true
+}
+
+

Example Response

{
+    "is_success": true,
+    "message": ""
+}
+
+

Status Codes

Table 4 describes status codes.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

Scheduled CU change created successfully.

+

400

+

Request failure.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+ +
+ + + + + + + +
Table 5 Error codes

Error Code

+

Error Message

+

DLI.0999

+

Queue plans create failed. The plan plan_A can not generate a scale plan, please check all time settings for the plan.

+
+
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0292.html b/docs/dli/api-ref/dli_02_0292.html new file mode 100644 index 00000000..f56f17db --- /dev/null +++ b/docs/dli/api-ref/dli_02_0292.html @@ -0,0 +1,282 @@ + + +

Viewing a Scheduled CU Change

+

Function

This API is used to query the scheduled CU changes and list the changes of a specified queue.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + +
Table 2 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

plans

+

No

+

Array of Objects

+

Scheduled scaling plan information. For details, see Table 3.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 plans parameters

Parameter

+

Mandatory

+

Type

+

Description

+

id

+

No

+

Long

+

ID of a scheduled CU change.

+

plan_name

+

No

+

String

+

Name of a CU change. The name can contain only digits, letters, and underscores (_), but cannot contain only digits or start with an underscore (_).

+

target_cu

+

No

+

Integer

+

Target value of the CU in the scheduled CU change.

+

start_hour

+

No

+

Integer

+

Start hour of a queue scaling plan, in the 24-hour format.

+

start_minute

+

No

+

Integer

+

Specifies the start minute of a scheduled CU change.

+

repeat_day

+

Yes

+

Array of strings

+

Specifies the repetition period of a scheduled CU change. You can select one or more days from Monday to Sunday, or do not select any day. If this parameter is not specified, the scheduled CU change will be executed at the time specified by start_hour: start_minute after the current time. Example:

+
"repeat_day": ["MONDAY", "TUESDAY", "WEDNESDAY","SUNDAY"]
+

valid_date_begin

+

No

+

Long

+

Start time of the validity period (13-digit timestamp)

+

valid_date_end

+

No

+

Long

+

End time of the validity period (13-digit timestamp)

+

activate

+

No

+

Boolean

+

Indicates whether the scheduled CU change is activated. The default value is true, indicating that the change is activated.

+

last_execute_time

+

No

+

Long

+

Time when the scaling plan was last executed.

+
+
+
+

Example Request

None

+
+

Example Response

{
+    "is_success": true,
+    "message": "",
+    "plans": [
+        {
+            "id": 1,
+            "plan_name": "plan_Aa",
+            "target_cu": 32,
+            "start_hour": 11,
+            "start_minute": 15,
+            "repeat_day": [
+                "MONDAY",
+                "TUESDAY",
+                "WEDNESDAY",
+                "SUNDAY"
+            ],
+            "activate": true,
+            "last_execute_time": 1593573428857
+        },
+        {
+            "id": 6,
+            "plan_name": "plan_Ab",
+            "target_cu": 16,
+            "start_hour": 14,
+            "start_minute": 25,
+            "repeat_day": [
+                "MONDAY",
+                "TUESDAY",
+                "WEDNESDAY",
+                "SUNDAY",
+                "THURSDAY",
+                "FRIDAY",
+                "SATURDAY"
+            ],
+            "activate": true,
+            "last_execute_time": 1593584829260
+        }
+    ]
+}
+
+

Status Codes

Table 4 describes status codes.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

The query is successful.

+

400

+

Request failure.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+ +
+ + + + + + + +
Table 5 Error codes

Error Code

+

Error Message

+

DLI.0008

+

There is no queue named queue1.

+
+
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0293.html b/docs/dli/api-ref/dli_02_0293.html new file mode 100644 index 00000000..a378f3d5 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0293.html @@ -0,0 +1,157 @@ + + +

Deleting Scheduled CU Changes in Batches

+

Function

This API is used to delete scheduled CU changes in batches.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

plan_ids

+

Yes

+

Array of Long

+

Scaling policy IDs of the queues you want to delete. For details, see Viewing a Scheduled CU Change. Example: "plan_ids": [8,10]

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+
+
+
+

Example Request

{
+   "plan_ids": [3,4]
+}
+
+

Example Response

{
+    "is_success": true,
+    "message": ""
+}
+
+

Status Codes

Table 4 describes status codes.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

Deletion succeeded.

+

400

+

Request failure.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+ +
+ + + + + + + +
Table 5 Error codes

Error Code

+

Error Message

+

DLI.0002

+

The plans with id 8, 9 do not exist.

+
+
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0294.html b/docs/dli/api-ref/dli_02_0294.html new file mode 100644 index 00000000..260fab09 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0294.html @@ -0,0 +1,142 @@ + + +

Deleting a Scheduled CU Change

+

Function

This API is used to delete a scheduled CU change for a queue with a specified ID.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + +
Table 2 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+
+
+
+

Example Request

None

+
+

Example Response

{
+    "is_success": true,
+    "message": ""
+}
+
+

Status Codes

Table 3 describes status codes.

+ +
+ + + + + + + + + + + + + +
Table 3 Status codes

Status Code

+

Description

+

200

+

The directory is successfully deleted.

+

400

+

Request failure.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+ +
+ + + + + + + +
Table 4 Error codes

Error Code

+

Error Message

+

DLI.0002

+

The plan with id 8 does not exist.

+
+
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0295.html b/docs/dli/api-ref/dli_02_0295.html new file mode 100644 index 00000000..e9c76800 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0295.html @@ -0,0 +1,256 @@ + + +

Modifying a Scheduled CU Change

+

Function

This API is used to modify a scheduled CU change for a queue with a specified ID.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

plan_name

+

Yes

+

String

+

Name of a CU change. The name can contain only digits, letters, and underscores (_), but cannot contain only digits or start with an underscore (_).

+

target_cu

+

Yes

+

Integer

+

Target value of the CU in the scheduled CU change.

+

start_hour

+

Yes

+

Integer

+

Specifies the start hour of the scheduled CU change.

+

start_minute

+

Yes

+

Integer

+

Specifies the start minute of a scheduled CU change.

+

repeat_day

+

Yes

+

Array of strings

+

Specifies the repetition period of a scheduled CU change. You can select one or more days from Monday to Sunday, or do not select any day. If this parameter is not specified, the scheduled CU change will be executed at the time specified by start_hour: start_minute after the current time. Example:

+
"repeat_day": ["MONDAY", "TUESDAY", "WEDNESDAY","SUNDAY"]
+

valid_date_begin

+

No

+

Long

+

Start time of the validity period (13-digit timestamp)

+

valid_date_end

+

No

+

Long

+

End time of the validity period (13-digit timestamp)

+

activate

+

No

+

Boolean

+

Indicates whether the scheduled CU change is activated. The default value is true, indicating that the change is activated.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

queue_name

+

No

+

String

+

Name of the queue for which the scheduled CU change is to be modified. The name contains 1 to 128 characters. Use commas (,) to separate multiple queue names.

+

plan_id

+

No

+

String

+

ID of scheduled CU change to be modified. Use commas (,) to separate multiple IDs.

+
+
+
+

Example Request

{
+    "plan_name": "plan_Ad",
+    "target_cu": 64,
+    "start_hour": 19,
+    "start_minute": 30,
+    "repeat_day": ["THURSDAY","friday"],
+    "activate": false
+}
+
+

Example Response

{
+    "is_success": true,
+    "message": "",
+    "queue_name": "queue1",
+    "plan_id": 3
+}
+
+

Status Codes

Table 4 describes status codes.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

The modification operations are successful.

+

400

+

Request failure.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+ +
+ + + + + + + +
Table 5 Error codes

Error Code

+

Error Message

+

DLI.0999

+

Queue plans create failed. The plan plan_A can not generate a scale plan, please check all time

+

settings for the plan.

+
+
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0296.html b/docs/dli/api-ref/dli_02_0296.html new file mode 100644 index 00000000..6d55a562 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0296.html @@ -0,0 +1,395 @@ + + +

Querying the Job Execution Progress

+

Function

This API is used to obtain the job execution progress. If a job is being executed, information about its subjobs can be obtained. If a job has just started or has ended, information about its subjobs cannot be obtained.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

Yes

+

Boolean

+

Indicates whether the request is successfully sent. Value true indicates that the request is successfully sent.

+

message

+

Yes

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

job_id

+

No

+

String

+

ID of a job returned after a job is generated and submitted by using SQL statements. The job ID can be used to query the job status and results.

+

status

+

Yes

+

String

+

Job status. The status can be RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, or CANCELLED.

+

sub_job_id

+

No

+

Integer

+

ID of a subjob that is running. If the subjob is not running or it is already finished, the subjob ID may be empty.

+

progress

+

No

+

Double

+

Progress of a running subjob or the entire job. The value can only be a rough estimate of the subjob progress and does not indicate the detailed job progress.

+
  • If the job is just started or being submitted, the progress is displayed as 0. If the job execution is complete, the progress is displayed as 1. In this case, progress indicates the running progress of the entire job. Because no subjob is running, sub_job_id is not displayed.
  • If a subjob is running, the running progress of the subjob is displayed. The calculation method of progress is as follows: Number of completed tasks of the subjob/Total number of tasks of the subjob. In this case, progress indicates the running progress of the subjob, and sub_job_id indicates the subjob ID.
+

sub_jobs

+

No

+

Array of Object

+

Details about a subjob of a running job. A job may contain multiple subjobs. For details, see Table 3.

+
+
+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Parameters in the sub_jobs field

Parameter

+

Mandatory

+

Type

+

Description

+

id

+

No

+

Integer

+

Subjob ID, corresponding to jobId of the open-source spark JobData.

+

name

+

No

+

String

+

Subjob name, corresponding to the name of the open-source spark JobData.

+

description

+

No

+

String

+

Description of a subjob, corresponding to the description of the open-source spark JobData.

+

submission_time

+

No

+

String

+

Submission time of a subjob, corresponding to the submissionTime of open-source Spark JobData.

+

completion_time

+

No

+

String

+

Completion time of a subjob, corresponding to the completionTime of the open-source Spark JobData.

+

stage_ids

+

No

+

Array of Integer

+

Stage ID of the subjob, corresponding to the stageIds of the open-source spark JobData.

+

job_group

+

No

+

String

+

ID of a DLI job, corresponding to the jobGroup of open-source Spark JobData.

+

status

+

No

+

String

+

Subjob status, corresponding to the status of open-source spark JobData.

+

num_tasks

+

No

+

Integer

+

Number of subjobs, corresponding to numTasks of the open-source Spark JobData.

+

num_active_tasks

+

No

+

Integer

+

Number of running tasks in a subjob, corresponding to numActiveTasks of the open-source Spark JobData.

+

num_completed_tasks

+

No

+

Integer

+

Number of tasks that have been completed in a subjob, corresponding to numCompletedTasks of open-source Spark JobData.

+

num_skipped_tasks

+

No

+

Integer

+

Number of tasks skipped in a subjob, corresponding to numSkippedTasks of open-source Spark JobData.

+

num_failed_tasks

+

No

+

Integer

+

Number of subtasks that fail to be skipped, corresponding to numFailedTasks of open-source Spark JobData.

+

num_killed_tasks

+

No

+

Integer

+

Number of tasks killed in the subjob, corresponding to numKilledTasks of the open-source Spark JobData.

+

num_completed_indices

+

No

+

Integer

+

Subjob completion index, corresponding to the numCompletedIndices of the open-source Spark JobData.

+

num_active_stages

+

No

+

Integer

+

Number of stages that are running in the subjob, corresponding to numActiveStages of the open-source Spark JobData.

+

num_completed_stages

+

No

+

Integer

+

Number of stages that have been completed in the subjob, corresponding to numCompletedStages of the open-source Spark JobData.

+

num_skipped_stages

+

No

+

Integer

+

Number of stages skipped in the subjob, corresponding to numSkippedStages of the open-source Spark JobData.

+

num_failed_stages

+

No

+

Integer

+

Number of failed stages in a subjob, corresponding to numFailedStages of the open-source Spark JobData.

+

killed_tasks_summary

+

No

+

Map<string,integer>

+

Summary of the killed tasks in the subjob, corresponding to killedTasksSummary of open-source spark JobData.

+
+
+
+

Example Request

None

+
+

Example Response

{
+    "is_success": true,
+    "message": "",
+    "job_id": "85798b38-ae44-48eb-bb90-7cf0dcdafe7b",
+    "status": "RUNNING",
+    "sub_job_id": 0,
+    "progress": 0,
+    "sub_jobs": [
+        {
+            "id": 0,
+            "name": "runJob at FileFormatWriter.scala:266",
+            "submission_time": "Mon Jul 27 17:24:03 CST 2020",
+            "stage_ids": [
+                0
+            ],
+            "job_group": "85798b38-ae44-48eb-bb90-7cf0dcdafe7b",
+            "status": "RUNNING",
+            "num_tasks": 1,
+            "num_active_tasks": 1,
+            "num_completed_tasks": 0,
+            "num_skipped_tasks": 0,
+            "num_failed_tasks": 0,
+            "num_killed_tasks": 0,
+            "num_completed_indices": 0,
+            "num_active_stages": 1,
+            "num_completed_stages": 0,
+            "num_skipped_stages": 0,
+            "num_failed_stages": 0
+        }
+    ]
+}
+
+

Status Codes

Table 4 describes the status code.

+ +
+ + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

The query is successful.

+

400

+

Request error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+ +
+ + + + + + + +
Table 5 Error codes

Error Code

+

Error Message

+

DLI.0999

+

The queue backend version is too old or the queue is busy.

+
+
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0297.html b/docs/dli/api-ref/dli_02_0297.html new file mode 100644 index 00000000..201a5aa5 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0297.html @@ -0,0 +1,17 @@ + + + +

Agency-related APIs

+ +

+
+ +
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0298.html b/docs/dli/api-ref/dli_02_0298.html new file mode 100644 index 00000000..42748d80 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0298.html @@ -0,0 +1,167 @@ + + +

Obtaining DLI Agency Information

+

Function

This API is used to obtain the agency information of a DLI user.

+
+

URI

+
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

version

+

No

+

String

+

Agency version information.

+

current_roles

+

No

+

Array of Strings

+

Role. The supported values are as follows:

+

obs_adm: Administrator permissions for accessing and using the Object Storage Service.

+

dis_adm: Administrator permissions for using Data Ingestion Service data as the data source

+

ctable_adm: Administrator permissions for accessing and using the CloudTable service

+

vpc_netadm: Administrator permissions for using the Virtual Private Cloud service

+

smn_adm: Administrator permissions for using the Simple Message Notification service

+

te_admin: Tenant Administrator permissions

+
+
+
+

Example Request

None

+
+

Example Response

{
+    "is_success": true,
+    "message": "",
+    "version": "v2",
+    "current_roles": [
+        "ctable_adm",
+        "vpc_netadm",
+        "ief_adm",
+        "dis_adm",
+        "smn_adm",
+        "obs_adm"
+    ]
+}
+
+

Status Codes

Table 3 describes status codes.

+ +
+ + + + + + + + + + + + + + + + +
Table 3 Status codes

Status Code

+

Description

+

200

+

The agency information is obtained.

+

400

+

Request failure.

+

404

+

Not found.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+ +
+ + + + + + + + + + +
Table 4 Error codes

Error Code

+

Error Message

+

DLI.0002

+

The object does not exist.

+

DLI.0999

+

An internal error occurre

+
+
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0299.html b/docs/dli/api-ref/dli_02_0299.html new file mode 100644 index 00000000..9ab4816f --- /dev/null +++ b/docs/dli/api-ref/dli_02_0299.html @@ -0,0 +1,140 @@ + + +

Creating a DLI Agency

+

Function

This API is used to create an agency for a DLI user.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

roles

+

Yes

+

Array of Strings

+

Role. Currently, only obs_adm, dis_adm, ctable_adm, vpc_netadm, smn_adm, and te_admin are supported.

+

obs_adm: Administrator permissions for accessing and using the Object Storage Service.

+

dis_adm: Administrator permissions for using Data Ingestion Service data as the data source

+

ctable_adm: Administrator permissions for accessing and using the CloudTable service

+

vpc_netadm: Administrator permissions for using the Virtual Private Cloud service

+

smn_adm: Administrator permissions for using the Simple Message Notification service

+

te_admin: Tenant Administrator permissions

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+
+
+
+

Example Request

{
+    "roles": [
+        "ctable_adm",
+        "vpc_netadm",
+        "dis_adm",
+        "smn_adm",
+        "obs_adm"
+    ]
+}
+
+

Example Response

{
+    "is_success": true,
+    "message": ""
+}
+
+

Status Codes

Table 4 describes status codes.

+ +
+ + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

The job is created successfully.

+

400

+

Request failure.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0306.html b/docs/dli/api-ref/dli_02_0306.html new file mode 100644 index 00000000..b6fa38b7 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0306.html @@ -0,0 +1,23 @@ + + + +

Getting Started

+ +

+
+ +
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0307.html b/docs/dli/api-ref/dli_02_0307.html new file mode 100644 index 00000000..88766a44 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0307.html @@ -0,0 +1,51 @@ + + +

Creating a Queue

+

Scenario Description

This section describes how to create and query a queue using APIs.

+
+

Constraints

+
+

Involved APIs

+
+

Procedure

  1. Create a queue.
    • API

      URI format: POST /v1.0/{project_id}/queues

      + +
    • Request example
      • Description: Create an SQL queue named queue1 in the project whose ID is 48cc2c48765f481480c7db940d6409d1.
      • Example URL: POST https://{endpoint}/v1.0/48cc2c48765f481480c7db940d6409d1/queues
      • Body:
        {
        +    "queue_name": "queue1",
        +    "description": "test",
        +    "cu_count": 16,
        +    "resource_mode": 1,
        +    "queue_type": "sql"
        +}
        +
      +
    • Example response
      {
      +  "is_success": true,
      +  "message": "",
      +  "queue_name": "queue1"
      +}
      +
    +
  2. Verify that the queue is created successfully.
    • API

      URI format: GET /v1.0/{project_id}/queues/{queue_name}

      + +
    • Request example
      • Description: Query details about queue1 in the project whose ID is 48cc2c48765f481480c7db940d6409d1.
      • Example URL: GET https://{endpoint}/v1.0/48cc2c48765f481480c7db940d6409d1/queues/queue1
      • Body:
        {}
        +
      +
    • Example response
      {
      +    "is_success": true,
      +    "message": "",
      +    "owner": "testuser",
      +    "description": "",
      +    "queue_name": "queue1",
      +    "create_time": 1587613028851,
      +    "queue_type": "sql",
      +    "cu_count": 16,
      +    "resource_id": "03d51b88-db63-4611-b779-9a72ba0cf58b",
      +    "resource_mode": 0
      +}
      +
    +
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0308.html b/docs/dli/api-ref/dli_02_0308.html new file mode 100644 index 00000000..48f6916c --- /dev/null +++ b/docs/dli/api-ref/dli_02_0308.html @@ -0,0 +1,81 @@ + + +

Creating and Submitting a SQL Job

+

Scenario Description

This section describes how to create and query SQL jobs using APIs.

+
+

Constraints

+
+

Involved APIs

+
+

Procedure

  1. Create a SQL queue. For details, see Creating a Queue.
  2. Create a database.
    • API

      URI format: POST /v1.0/{project_id}/databases

      + +
    • Request example
      • Description: Creates a database named db1 in the project whose ID is 48cc2c48765f481480c7db940d6409d1.
      • Example URL: POST https://{endpoint}/v1.0/48cc2c48765f481480c7db940d6409d1/databases
      • Body:
        {
        +     "database_name": "db1",
        +     "description": "this is for test"
        +}
        +
      +
    • Example response
      {
      +  "is_success": true,
      +  "message": ""
      +}
      +
    +
  3. Create a table.
    • API

      URI format: POST /v1.0/{project_id}/databases/{database_name}/tables

      + +
    • Request example
      • Description: In the project whose ID is 48cc2c48765f481480c7db940d6409d1, create a table named tb1 in the db1 database.
      • Example URL: POST https://{endpoint}/v1.0/48cc2c48765f481480c7db940d6409d1/databases/db1/tables
      • Body:
        {
        +  "table_name": "tb1",
        +  "data_location": "OBS",
        +  "description": "",
        +  "data_type": "csv",
        +  "data_path": "obs://obs/path1/test.csv",
        +  "columns": [
        +  {
        +     "column_name": "column1",
        +     "type": "string",
        +     "description": "",
        +     "is_partition_column": true
        +  },
        +  {
        +     "column_name": "column2",
        +     "type": "string",
        +     "description": "",
        +     "is_partition_column": false
        +  }
        +  ],
        +  "with_column_header": true,
        +  "delimiter": ",",
        +  "quote_char": "\"",
        +  "escape_char": "\\",
        +  "date_format": "yyyy-MM-dd",
        +  "timestamp_format": "yyyy-MM-dd HH:mm:ss"
        +}
        +
      +
    • Example response
      {
      +  "is_success": true,
      +  "message": ""
      +}
      +
    +
  4. (Optional) If the table to be created does not contain data, use the Importing Data API to import data to the table.
  5. (Optional) After data is imported, you can use the Querying Job Details API to check whether the imported data is correct.
  6. Submit a query job.
    • API

      URI format: POST /v1.0/{project_id}/jobs/submit-job

      + +
    • Request example
      • Description: Submit a SQL job in the project whose ID is 48cc2c48765f481480c7db940d6409d1 and query data in the tb1 table in the database db1.
      • Example URL: POST https://{endpoint}/v1.0/48cc2c48765f481480c7db940d6409d1/jobs/submit-job
      • Body:
        {
        +    "currentdb": "db1",
        +    "sql": "select * from tb1 limit 10",
        +    "queue_name": "queue1"
        +}
        +
      +
    • Example response
      {
      +  "is_success": true,
      +  "message": "",
      +  "job_id":""95fcc908-9f1b-446c-8643-5653891d9fd9",
      +  "job_type": "QUERY",
      +  "job_mode": "async"
      +}
      +
    +
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0309.html b/docs/dli/api-ref/dli_02_0309.html new file mode 100644 index 00000000..d88246b3 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0309.html @@ -0,0 +1,141 @@ + + +

Creating and Submitting a Spark Job

+

Scenario Description

This section describes how to create and submit Spark jobs using APIs.

+
+

Constraints

+
+

Involved APIs

+
+

Procedure

  1. Create a common queue. For details, see Creating a Queue.
  2. Upload a package group.
    • API

      URI format: POST /v2.0/{project_id}/resources

      + +
    • Request example
      • Description: Upload resources in the GATK group to the project whose ID is 48cc2c48765f481480c7db940d6409d1.
      • Example URL: POST https://{endpoint}/v2.0/48cc2c48765f481480c7db940d6409d1/resources
      • Body:
        {
        +    "paths": [
        +        "https://test.obs.xxx.com/txr_test/jars/spark-sdv-app.jar"
        +    ],
        +    "kind": "jar",
        +    "group": "gatk",
        +    "is_async":"true"
        +}
        +
      +
    • Example response
      {
      +    "group_name": "gatk",
      +    "status": "READY",
      +    "resources": [
      +        "spark-sdv-app.jar",
      +        "wordcount",
      +        "wordcount.py"
      +    ],
      +    "details": [
      +        {
      +            "create_time": 0,
      +            "update_time": 0,
      +            "resource_type": "jar",
      +            "resource_name": "spark-sdv-app.jar",
      +            "status": "READY",
      +            "underlying_name": "987e208d-d46e-4475-a8c0-a62f0275750b_spark-sdv-app.jar"
      +        },
      +        {
      +            "create_time": 0,
      +            "update_time": 0,
      +            "resource_type": "jar",
      +            "resource_name": "wordcount",
      +            "status": "READY",
      +            "underlying_name": "987e208d-d46e-4475-a8c0-a62f0275750b_wordcount"
      +        },
      +        {
      +            "create_time": 0,
      +            "update_time": 0,
      +            "resource_type": "jar",
      +            "resource_name": "wordcount.py",
      +            "status": "READY",
      +            "underlying_name": "987e208d-d46e-4475-a8c0-a62f0275750b_wordcount.py"
      +        }
      +    ],
      +    "create_time": 1551334579654,
      +    "update_time": 1551345369070
      +}
      +
    +
  3. View resource packages in a group.
    • API

      URI format: GET /v2.0/{project_id}/resources/{resource_name}

      + +
    • Request example
      • Description: Query the resource package named luxor-router-1.1.1.jar in the GATK group under the project whose ID is 48cc2c48765f481480c7db940d6409d1.
      • Example URL: GET https://{endpoint}/v2.0/48cc2c48765f481480c7db940d6409d1/resources/luxor-router-1.1.1.jar?group=gatk
      • Body:
        {}
        +
      +
    • Example response
      {
      +    "create_time": 1522055409139,
      +    "update_time": 1522228350501,
      +    "resource_type": "jar",
      +    "resource_name": "luxor-router-1.1.1.jar",
      +    "status": "uploading",
      +    "underlying_name": "7885d26e-c532-40f3-a755-c82c442f19b8_luxor-router-1.1.1.jar",
      +    "owner": "****"
      +}
      +
    +
  4. Create and submit a Spark batch processing job.
    • API

      URI format: POST /v2.0/{project_id}/batches

      + +
    • Request example
      • Description: In the 48cc2c48765f481480c7db940d6409d1 project, create a batch processing job named TestDemo4 in queue1.
      • Example URL: POST https://{endpoint}/v2.0/48cc2c48765f481480c7db940d6409d1/batches
      • Body:
        {
        +  "sc_type": "A",
        +  "jars": [
        +   
        +"spark-examples_2.11-2.1.0.luxor.jar"
        +  ],
        +  "driverMemory": "1G",
        +  "driverCores": 1,
        +  "executorMemory": "1G",
        +  "executorCores": 1,
        +  "numExecutors": 1,
        +  "queue": "cce_general",
        +  "file":
        +"spark-examples_2.11-2.1.0.luxor.jar",
        +  "className":
        +"org.apache.spark.examples.SparkPi",
        +  "minRecoveryDelayTime": 10000,
        +  "maxRetryTimes": 20
        +}
        +
      +
    • Example response
      {
      +  "id": "07a3e4e6-9a28-4e92-8d3f-9c538621a166",
      +  "appId": "",
      +  "name": "",
      +  "owner": "test1",
      +  "proxyUser": "",
      +  "state": "starting",
      +  "kind": "",
      +  "log": [],
      +  "sc_type": "CUSTOMIZED",
      +  "cluster_name": "aaa",
      +  "queue": "aaa",
      +  "create_time": 1607589874156,
      +  "update_time": 1607589874156
      +}
      +
    +
  5. Query a batch job status.
    • API

      URI format: GET /v2.0/{project_id}/batches/{batch_id}/state

      + +
    • Request example
      • Description: Query the status of the batch processing job whose ID is 0a324461-d9d9-45da-a52a-3b3c7a3d809e in the project whose ID is 48cc2c48765f481480c7db940d6409d1.
      • Example URL: GET https://{endpoint}/v2.0/48cc2c48765f481480c7db940d6409d1/batches/0a324461-d9d9-45da-a52a-3b3c7a3d809e/state
      • Body:
        {}
        +
      +
    • Example response
      {
      +   "id":"0a324461-d9d9-45da-a52a-3b3c7a3d809e",
      +   "state":"Success"
      +}
      +
    +
  6. Query batch job logs.
    • API

      URI format: GET /v2.0/{project_id}/batches/{batch_id}/log

      + +
    • Request example
      • Description: Query the background logs of the batch processing job 0a324461-d9d9-45da-a52a-3b3c7a3d809e in the 48cc2c48765f481480c7db940d6409d1 project.
      • Example URL: GET https://{endpoint}/v2.0/48cc2c48765f481480c7db940d6409d1/batches/0a324461-d9d9-45da-a52a-3b3c7a3d809e/log
      • Body:
        {}
        +
      +
    • Example response
      {
      +    "id": "0a324461-d9d9-45da-a52a-3b3c7a3d809e",
      +    "from": 0,
      +    "total": 3,
      +    "log": [
      +           "Detailed information about job logs"
      +    ]
      +}
      +
    +
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0310.html b/docs/dli/api-ref/dli_02_0310.html new file mode 100644 index 00000000..6b8ac4c1 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0310.html @@ -0,0 +1,82 @@ + + +

Creating and Submitting a Flink Job

+

Scenario Description

This section describes how to create and run a user-defined Flink job using APIs.

+
+

Constraints

+
+

Involved APIs

+
+

Procedure

  1. Create a queue for general use. For details, see Creating a Queue. In the request, set resource_mode to 1 to create a dedicated queue.
  2. Upload the resource package of the user-defined Flink job. For details, see 2.
  3. Query resource packages in a group. For details, see 3.
  4. Create a custom flink job.
    • API

      URI format: POST /v1.0/{project_id}/streaming/flink-jobs

      + +
    • Request example
      • Description: Create a user-defined Flink job in the project whose ID is 48cc2c48765f481480c7db940d6409d1.
      • Example URL: POST https://{endpoint}/v1.0/48cc2c48765f481480c7db940d6409d1/streaming/flink-jobs
      • Body:
        {
        +    "name": "test",
        +    "desc": "job for test",
        +    "queue_name": "testQueue",
        +    "manager_cu_number": 1,
        +    "cu_number": 2,
        +    "parallel_number": 1,
        +    "tm_cus": 1,
        +    "tm_slot_num": 1,
        +    "log_enabled": true,
        +    "obs_bucket": "bucketName",
        +    "smn_topic": "topic",
        +    "main_class": "org.apache.flink.examples.streaming.JavaQueueStream",
        +    "restart_when_exception": false,
        +    "entrypoint": "javaQueueStream.jar",
        +    "entrypoint_args":"-windowSize 2000 -rate3",
        +    "dependency_jars": [
        +        "myGroup/test.jar",
        +        "myGroup/test1.jar"
        +    ],
        +    "dependency_files": [
        +        "myGroup/test.csv",
        +        "myGroup/test1.csv"
        +    ]
        +}
        +
      +
    • Example response
      {
      +  "is_success": true,
      +  "message": "A Flink job is created successfully.",
      +  "job": {
      +    "job_id": 138,
      +    "status_name": "job_init",
      +    "status_desc": ""
      +  }
      +}
      +
    +
  5. Run jobs in batches.
    • API

      URI format: POST /v1.0/{project_id}/streaming/jobs/run

      + +
    • Request example
      • Description: Run the jobs whose job_id is 298765 and 298766 in the project whose ID is 48cc2c48765f481480c7db940d6409d1.
      • Example URL: POST https://{endpoint}/v1.0/48cc2c48765f481480c7db940d6409d1/streaming/jobs/run
      • Body:
        {
        +    "job_ids": [131,130,138,137],
        +    "resume_savepoint": true
        +}
        +
      +
    • Example response
      [
      +    {
      +        "is_success": "true",
      +        "message": "The request for submitting DLI jobs is delivered successfully."
      +    },
      +    {
      +        "is_success": "true",
      +        "message": "The request for submitting DLI jobs is delivered successfully."
      +    },
      +    {
      +        "is_success": "true",
      +        "message": "The request for submitting DLI jobs is delivered successfully."
      +    },
      +    {
      +        "is_success": "true",
      +        "message": "The request for submitting DLI jobs is delivered successfully."
      +    }
      +]
      +
    +
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0311.html b/docs/dli/api-ref/dli_02_0311.html new file mode 100644 index 00000000..5e391a8c --- /dev/null +++ b/docs/dli/api-ref/dli_02_0311.html @@ -0,0 +1,78 @@ + + +

Creating and Using a Datasource Connection

+

Scenario Description

This section describes how to create an enhanced datasource connection using an API.

+
+

Constraints

+
+

Involved APIs

+
+

Procedure

  1. Create a queue. For details, see Creating a Queue. In the request, set resource_mode to 1 to create a dedicated queue.
  2. Create an enhanced datasource connection.
    • API

      URI format: POST /v2.0/{project_id}/datasource/enhanced-connections

      + +
    • Request example
      • Description: Create an enhanced datasource connection named test1 in project 48cc2c48765f481480c7db940d6409d1.
      • Example URL: POST https://{endpoint}/v2.0/48cc2c48765f481480c7db940d6409d1/datasource/enhanced-connections
      • Body:
        {
        +  "name": "test1",
        +  "dest_vpc_id": "22094d8f-c310-4621-913d-4c4d655d8495",
        +  "dest_network_id": "78f2562a-36e4-4b39-95b9-f5aab22e1281",
        +  "queues": ["q1","q2"],
        +  "hosts": [
        +    {
        +      "ip":"192.168.0.1",
        +      "name":"ecs-97f8-0001"
        +    },
        +    {
        +      "ip":"192.168.0.2", 
        +      "name":"ecs-97f8-0002"
        +    }
        +  ]
        +}
        +
      +
    • Example response
      {
      +  "is_success": true,
      +  "message": "",
      +  "connection_id": "2a620c33-5609-40c9-affd-2b6453071b0f"
      +}
      +
    +
  3. (Optional) If no queue is bound when you create an enhanced datasource connection, you can use the Binding a Queue API to bind a queue.
  4. Verify that the enhanced datasource connection is created successfully.
    • API

      URI format: GET /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}

      + +
    • Request example
      • Description: Query an enhanced datasource connection whose ID is 2a620c33-5609-40c9-affd-2b6453071b0f in project 48cc2c48765f481480c7db940d6409d1.
      • Example URL: GET https://{endpoint}/v2.0/48cc2c48765f481480c7db940d6409d1/datasource/enhanced-connections/2a620c33-5609-40c9-affd-2b6453071b0f
      • Body:
        {}
        +
      +
    • Example response
      {
      +  "is_success": true,
      +  "message": "",
      +  "name": "test1",
      +  "id": "2a620c33-5609-40c9-affd-2b6453071b0f",
      +  "available_queue_info": [
      +    {
      +      "status": "ACTIVE",
      +      "name": "queue1",
      +      "peer_id": "2a620c33-5609-40c9-affd-2b6453071b0f",
      +      "err_msg": "",
      +      "update_time": 1566889577861
      +    }
      +  ],
      +  "dest_vpc_id": "22094d8f-c310-4621-913d-4c4d655d8495",
      +  "dest_network_id": "78f2562a-36e4-4b39-95b9-f5aab22e1281",
      +  "isPrivis": true,
      +  "create_time": 1566888011125,
      +  "status": "ACTIVE",
      +  "hosts": [
      +    {
      +      "ip":"192.168.0.1",
      +      "name":"ecs-97f8-0001"
      +    },
      +    {
      +      "ip":"192.168.0.2", 
      +      "name":"ecs-97f8-0002"
      +    }
      +  ]
      +}
      +
    +
+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0312.html b/docs/dli/api-ref/dli_02_0312.html new file mode 100644 index 00000000..e81f86ff --- /dev/null +++ b/docs/dli/api-ref/dli_02_0312.html @@ -0,0 +1,228 @@ + + +

Previewing SQL Job Query Results

+

Function

This API is used to view the job execution result after a job is executed using SQL query statements. Currently, you can only query execution results of jobs of the QUERY type.

+

This API can be used to view only the first 1000 result records and does not support pagination query. To view all query results, you need to export the query results first. For details, see Exporting Query Results.

+
+

URI

+ +
+

Request

None

+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

No

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

No

+

String

+

System prompt. If execution succeeds, the parameter setting may be left blank.

+

job_id

+

No

+

String

+

Job ID You can get the value by calling Submitting a SQL Job (Recommended).

+

job_type

+

No

+

String

+

Job type, including DDL, DCL, IMPORT, EXPORT, QUERY, INSERT, DATA_MIGRATION, UPDATE, DELETE, RESTART_QUEUE and SCALE_QUEUE.

+

Currently, you can only query execution results of jobs of the QUERY type.

+

row_count

+

No

+

Integer

+

Total number of job results.

+

input_size

+

No

+

long

+

Amount of data scanned during job execution.

+

schema

+

No

+

Array of Objects

+

Name and type of the job result column.

+

rows

+

No

+

Array of Strings

+

Job results set.

+
+
+
+

Example Request

None

+
+

Example Response

{
+  "is_success": true,
+  "message": "",
+  "job_id": "ead0b276-8ed4-4eb5-b520-58f1511e7033",
+  "job_type": "QUERY",
+  "row_count": 1,
+  "input_size": 74,
+  "schema": [
+    {
+      "c1": "int"
+    },
+    {
+      "c2": "string"
+    }
+  ],
+  "rows": [
+    [
+      23,
+      "sda"
+    ]
+  ]
+}
+
+

Status Codes

Table 4 describes the status code.

+ +
+ + + + + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

The query is successful.

+

400

+

Request error.

+

500

+

Internal service error.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0316.html b/docs/dli/api-ref/dli_02_0316.html new file mode 100644 index 00000000..6296955b --- /dev/null +++ b/docs/dli/api-ref/dli_02_0316.html @@ -0,0 +1,249 @@ + + +

Generating a Static Stream Graph for a Flink SQL Job

+

Function

This API is used to generate a static stream graph for a Flink SQL job.

+
+

URI

+
+

Request

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 2 Request parameters

Parameter

+

Mandatory

+

Type

+

Description

+

sql_body

+

Yes

+

String

+

SQL

+

cu_number

+

No

+

Integer

+

Total number of CUs.

+

manager_cu_number

+

No

+

Integer

+

Number of CUs of the management unit.

+

parallel_number

+

No

+

Integer

+

Maximum degree of parallelism.

+

tm_cus

+

No

+

Integer

+

Number of CUs in a taskManager.

+

tm_slot_num

+

No

+

Integer

+

Number of slots in a taskManager.

+

operator_config

+

No

+

String

+

Operator configurations.

+

static_estimator

+

No

+

Boolean

+

Whether to estimate static resources.

+

job_type

+

No

+

String

+

Job types. Only flink_opensource_sql_job job is supported.

+

graph_type

+

No

+

String

+

Stream graph type. Currently, the following two types of stream graphs are supported:

+
  • simple_graph: Simplified stream graph
  • job_graph: Static stream graph
+

static_estimator_config

+

No

+

String

+

Traffic or hit ratio of each operator, which is a character string in JSON format.

+
+
+
+

Response

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + +
Table 3 Response parameters

Parameter

+

Mandatory

+

Type

+

Description

+

is_success

+

Yes

+

Boolean

+

Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed.

+

message

+

Yes

+

String

+

System prompt. If execution succeeds, the message may be left blank.

+

error_code

+

Yes

+

String

+

Error codes.

+

stream_graph

+

Yes

+

String

+

Description of a static stream graph.

+
+
+
+

Example Request

{
+   "cu_number": 4,
+   "manager_cu_number": 1,
+   "parallel_number": 4,
+   "tm_cus": 1,
+   "tm_slot_num": 1,
+   "sql_body": "",
+   "operator_config": "",
+   "static_estimator": true,
+   "job_type": "flink_opensource_sql_job",
+   "graph_type": "job_graph"
+ }
+
+

Example Response

{
+    "is_success": true,
+    "message": "",
+    "error_code": "",
+    "stream_graph": "{\n  \"nodes\" : [ {\n    \"id\" : 1,\n    \"operator_id\" : \"bc764cd8ddf7a0cff126f51c16239658\",\n    \"type\" : \"Source\",\n    
+ \"contents\" : \"kafkaSource\",\n    \"parallelism\" : 1\n  }, {\n    \"id\" : 2,\n    \"operator_id\" : \"0a448493b4782967b150582570326227\",\n    \"type\" : \"select\",\n    \"contents\" : \"car_id, car_owner, car_brand, car_speed\",\n    \"parallelism\" : 1,\n    \"predecessors\" : [ {\n      \"id\" : 1\n    } ]\n  }, {\n    \"id\" : 4,\n    \"operator_id\" : \"6d2677a0ecc3fd8df0b72ec675edf8f4\",\n    \"type\" : \"Sink\",\n    \"contents\" : \"kafkaSink\",\n    \"parallelism\" : 1,\n    \"predecessors\" : [ {\n      \"id\" : 2\n    } ]\n  } ]\n}"
+}
+
+

Status Codes

Table 4 describes status codes.

+ +
+ + + + + + + + + + +
Table 4 Status codes

Status Code

+

Description

+

200

+

The operation is successful.

+

400

+

The input parameter is invalid.

+
+
+
+

Error Codes

If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.

+
+
+
+ +
+ diff --git a/docs/dli/api-ref/dli_02_0500.html b/docs/dli/api-ref/dli_02_0500.html new file mode 100644 index 00000000..82c37349 --- /dev/null +++ b/docs/dli/api-ref/dli_02_0500.html @@ -0,0 +1,8 @@ + + +

Calling APIs

+

Public cloud APIs comply with the RESTful API design principles. REST-based Web services are organized into resources. Each resource is identified by one or more Uniform Resource Identifiers (URIs). An application accesses a resource based on the resource's Unified Resource Locator (URL). A URL is usually in the following format: https://Endpoint/uri. In the URL, uri indicates the resource path, that is, the API access path.

+

Public cloud APIs use HTTPS as the transmission protocol. Requests/Responses are transmitted by using JSON messages, with media type represented by Application/json.

+

For details about how to use APIs, see API Usage Guidelines.

+
+ diff --git a/docs/dli/api-ref/public_sys-resources/caution_3.0-en-us.png b/docs/dli/api-ref/public_sys-resources/caution_3.0-en-us.png new file mode 100644 index 00000000..60f60762 Binary files /dev/null and b/docs/dli/api-ref/public_sys-resources/caution_3.0-en-us.png differ diff --git a/docs/dli/api-ref/public_sys-resources/danger_3.0-en-us.png b/docs/dli/api-ref/public_sys-resources/danger_3.0-en-us.png new file mode 100644 index 00000000..47a9c723 Binary files /dev/null and b/docs/dli/api-ref/public_sys-resources/danger_3.0-en-us.png differ diff --git a/docs/dli/api-ref/public_sys-resources/delta.gif b/docs/dli/api-ref/public_sys-resources/delta.gif new file mode 100644 index 00000000..0d1b1f67 Binary files /dev/null and b/docs/dli/api-ref/public_sys-resources/delta.gif differ diff --git a/docs/dli/api-ref/public_sys-resources/deltaend.gif b/docs/dli/api-ref/public_sys-resources/deltaend.gif new file mode 100644 index 00000000..cc7da0fc Binary files /dev/null and b/docs/dli/api-ref/public_sys-resources/deltaend.gif differ diff --git a/docs/dli/api-ref/public_sys-resources/icon-arrowdn.gif b/docs/dli/api-ref/public_sys-resources/icon-arrowdn.gif new file mode 100644 index 00000000..37942803 Binary files /dev/null and b/docs/dli/api-ref/public_sys-resources/icon-arrowdn.gif differ diff --git a/docs/dli/api-ref/public_sys-resources/icon-arrowrt.gif b/docs/dli/api-ref/public_sys-resources/icon-arrowrt.gif new file mode 100644 index 00000000..6aaaa11c Binary files /dev/null and b/docs/dli/api-ref/public_sys-resources/icon-arrowrt.gif differ diff --git a/docs/dli/api-ref/public_sys-resources/icon-caution.gif b/docs/dli/api-ref/public_sys-resources/icon-caution.gif new file mode 100644 index 00000000..079c79b2 Binary files /dev/null and b/docs/dli/api-ref/public_sys-resources/icon-caution.gif differ diff --git a/docs/dli/api-ref/public_sys-resources/icon-danger.gif b/docs/dli/api-ref/public_sys-resources/icon-danger.gif new file mode 100644 index 00000000..079c79b2 Binary files /dev/null and b/docs/dli/api-ref/public_sys-resources/icon-danger.gif differ diff --git a/docs/dli/api-ref/public_sys-resources/icon-huawei.gif b/docs/dli/api-ref/public_sys-resources/icon-huawei.gif new file mode 100644 index 00000000..a31d60f8 Binary files /dev/null and b/docs/dli/api-ref/public_sys-resources/icon-huawei.gif differ diff --git a/docs/dli/api-ref/public_sys-resources/icon-note.gif b/docs/dli/api-ref/public_sys-resources/icon-note.gif new file mode 100644 index 00000000..31be2b03 Binary files /dev/null and b/docs/dli/api-ref/public_sys-resources/icon-note.gif differ diff --git a/docs/dli/api-ref/public_sys-resources/icon-notice.gif b/docs/dli/api-ref/public_sys-resources/icon-notice.gif new file mode 100644 index 00000000..40907065 Binary files /dev/null and b/docs/dli/api-ref/public_sys-resources/icon-notice.gif differ diff --git a/docs/dli/api-ref/public_sys-resources/icon-tip.gif b/docs/dli/api-ref/public_sys-resources/icon-tip.gif new file mode 100644 index 00000000..c47bae05 Binary files /dev/null and b/docs/dli/api-ref/public_sys-resources/icon-tip.gif differ diff --git a/docs/dli/api-ref/public_sys-resources/icon-warning.gif b/docs/dli/api-ref/public_sys-resources/icon-warning.gif new file mode 100644 index 00000000..079c79b2 Binary files /dev/null and b/docs/dli/api-ref/public_sys-resources/icon-warning.gif differ diff --git a/docs/dli/api-ref/public_sys-resources/note_3.0-en-us.png b/docs/dli/api-ref/public_sys-resources/note_3.0-en-us.png new file mode 100644 index 00000000..57a0e1f5 Binary files /dev/null and b/docs/dli/api-ref/public_sys-resources/note_3.0-en-us.png differ diff --git a/docs/dli/api-ref/public_sys-resources/notice_3.0-en-us.png b/docs/dli/api-ref/public_sys-resources/notice_3.0-en-us.png new file mode 100644 index 00000000..fa4b6499 Binary files /dev/null and b/docs/dli/api-ref/public_sys-resources/notice_3.0-en-us.png differ diff --git a/docs/dli/api-ref/public_sys-resources/warning_3.0-en-us.png b/docs/dli/api-ref/public_sys-resources/warning_3.0-en-us.png new file mode 100644 index 00000000..def5c356 Binary files /dev/null and b/docs/dli/api-ref/public_sys-resources/warning_3.0-en-us.png differ