diff --git a/docs/dli/api-ref/ALL_META.TXT.json b/docs/dli/api-ref/ALL_META.TXT.json new file mode 100644 index 00000000..59cb6e9c --- /dev/null +++ b/docs/dli/api-ref/ALL_META.TXT.json @@ -0,0 +1,1162 @@ +[ + { + "uri":"dli_02_0500.html", + "product_code":"dli", + "code":"1", + "des":"Public cloud APIs comply with the RESTful API design principles. REST-based Web services are organized into resources. Each resource is identified by one or more Uniform ", + "doc_type":"api", + "kw":"Calling APIs,API Reference", + "title":"Calling APIs", + "githuburl":"" + }, + { + "uri":"dli_02_0181.html", + "product_code":"dli", + "code":"2", + "des":"This section describes the APIs provided by DLI.", + "doc_type":"api", + "kw":"Overview,API Reference", + "title":"Overview", + "githuburl":"" + }, + { + "uri":"dli_02_0306.html", + "product_code":"dli", + "code":"3", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Getting Started", + "title":"Getting Started", + "githuburl":"" + }, + { + "uri":"dli_02_0307.html", + "product_code":"dli", + "code":"4", + "des":"This section describes how to create and query a queue using APIs.Queues created using this API will be bound to specified compute resources.It takes 6 to 10 minutes to s", + "doc_type":"api", + "kw":"Creating a Queue,Getting Started,API Reference", + "title":"Creating a Queue", + "githuburl":"" + }, + { + "uri":"dli_02_0308.html", + "product_code":"dli", + "code":"5", + "des":"This section describes how to create and query SQL jobs using APIs.It takes 6 to 10 minutes to start a job using a new queue for the first time.Creating a Queue: Create a", + "doc_type":"api", + "kw":"Creating and Submitting a SQL Job,Getting Started,API Reference", + "title":"Creating and Submitting a SQL Job", + "githuburl":"" + }, + { + "uri":"dli_02_0309.html", + "product_code":"dli", + "code":"6", + "des":"This section describes how to create and submit Spark jobs using APIs.It takes 6 to 10 minutes to start a job using a new queue for the first time.Creating a Queue: Creat", + "doc_type":"api", + "kw":"Creating and Submitting a Spark Job,Getting Started,API Reference", + "title":"Creating and Submitting a Spark Job", + "githuburl":"" + }, + { + "uri":"dli_02_0310.html", + "product_code":"dli", + "code":"7", + "des":"This section describes how to create and run a user-defined Flink job using APIs.It takes 6 to 10 minutes to start a job using a new queue for the first time.Creating a Q", + "doc_type":"api", + "kw":"Creating and Submitting a Flink Job,Getting Started,API Reference", + "title":"Creating and Submitting a Flink Job", + "githuburl":"" + }, + { + "uri":"dli_02_0311.html", + "product_code":"dli", + "code":"8", + "des":"This section describes how to create an enhanced datasource connection using an API.It takes 6 to 10 minutes to start a job using a new queue for the first time.Before cr", + "doc_type":"api", + "kw":"Creating and Using a Datasource Connection,Getting Started,API Reference", + "title":"Creating and Using a Datasource Connection", + "githuburl":"" + }, + { + "uri":"dli_02_0036.html", + "product_code":"dli", + "code":"9", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Permission-related APIs", + "title":"Permission-related APIs", + "githuburl":"" + }, + { + "uri":"dli_02_0037.html", + "product_code":"dli", + "code":"10", + "des":"This API is used to share a specific queue with other users. You can grant users with the permission to use the specified queue or revoke the permission.URI formatPUT /v1", + "doc_type":"api", + "kw":"Granting Users with the Queue Usage Permission,Permission-related APIs,API Reference", + "title":"Granting Users with the Queue Usage Permission", + "githuburl":"" + }, + { + "uri":"dli_02_0038.html", + "product_code":"dli", + "code":"11", + "des":"This API is used to query names of all users who can use a specified queue.URI formatGET /v1.0/{project_id}/queues/{queue_name}/usersGET /v1.0/{project_id}/queues/{queue_", + "doc_type":"api", + "kw":"Querying Queue Users,Permission-related APIs,API Reference", + "title":"Querying Queue Users", + "githuburl":"" + }, + { + "uri":"dli_02_0039.html", + "product_code":"dli", + "code":"12", + "des":"This API is used to grant database or table data usage permission to specified users.URI formatPUT /v1.0/{project_id}/user-authorizationPUT /v1.0/{project_id}/user-author", + "doc_type":"api", + "kw":"Granting Data Permission to Users,Permission-related APIs,API Reference", + "title":"Granting Data Permission to Users", + "githuburl":"" + }, + { + "uri":"dli_02_0040.html", + "product_code":"dli", + "code":"13", + "des":"This API is used query names of all users who have permission to use or access the database.URI formatGET /v1.0/{project_id}/databases/{database_name}/usersGET /v1.0/{pro", + "doc_type":"api", + "kw":"Querying Database Users,Permission-related APIs,API Reference", + "title":"Querying Database Users", + "githuburl":"" + }, + { + "uri":"dli_02_0041.html", + "product_code":"dli", + "code":"14", + "des":"This API is used to query users who have permission to access the specified table or column in the table.URI formatGET /v1.0/{project_id}/databases/{database_name}/tables", + "doc_type":"api", + "kw":"Querying Table Users,Permission-related APIs,API Reference", + "title":"Querying Table Users", + "githuburl":"" + }, + { + "uri":"dli_02_0042.html", + "product_code":"dli", + "code":"15", + "des":"This API is used to query the permission of a specified user on a table.URI formatGET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/users/{user_name}GE", + "doc_type":"api", + "kw":"Querying a User's Table Permissions,Permission-related APIs,API Reference", + "title":"Querying a User's Table Permissions", + "githuburl":"" + }, + { + "uri":"dli_02_0252.html", + "product_code":"dli", + "code":"16", + "des":"This API is used to view the permissions granted to a user.URI formatGET /v1.0/{project_id}/authorization/privilegesGET /v1.0/{project_id}/authorization/privilegesParamet", + "doc_type":"api", + "kw":"Viewing the Granted Permissions of a User,Permission-related APIs,API Reference", + "title":"Viewing the Granted Permissions of a User", + "githuburl":"" + }, + { + "uri":"dli_02_0297.html", + "product_code":"dli", + "code":"17", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Agency-related APIs", + "title":"Agency-related APIs", + "githuburl":"" + }, + { + "uri":"dli_02_0298.html", + "product_code":"dli", + "code":"18", + "des":"This API is used to obtain the agency information of a DLI user.URI formatGET /v2/{project_id}/agencyGET /v2/{project_id}/agencyParameter descriptionURI parametersParam", + "doc_type":"api", + "kw":"Obtaining DLI Agency Information,Agency-related APIs,API Reference", + "title":"Obtaining DLI Agency Information", + "githuburl":"" + }, + { + "uri":"dli_02_0299.html", + "product_code":"dli", + "code":"19", + "des":"This API is used to create an agency for a DLI user.URI formatPOST /v2/{project_id}/agencyPOST /v2/{project_id}/agencyParameter descriptionURI parametersParameterMandat", + "doc_type":"api", + "kw":"Creating a DLI Agency,Agency-related APIs,API Reference", + "title":"Creating a DLI Agency", + "githuburl":"" + }, + { + "uri":"dli_02_0193.html", + "product_code":"dli", + "code":"20", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Queue-related APIs (Recommended)", + "title":"Queue-related APIs (Recommended)", + "githuburl":"" + }, + { + "uri":"dli_02_0194.html", + "product_code":"dli", + "code":"21", + "des":"This API is used to create a queue. The queue will be bound to specified compute resources.It takes 5 to 15 minutes to start a job using a new queue for the first time.UR", + "doc_type":"api", + "kw":"Creating a Queue,Queue-related APIs (Recommended),API Reference", + "title":"Creating a Queue", + "githuburl":"" + }, + { + "uri":"dli_02_0195.html", + "product_code":"dli", + "code":"22", + "des":"This API is used to delete a specified queue.If a task is being executed in a specified queue, the queue cannot be deleted.URI formatDELETE /v1.0/{project_id}/queues/{que", + "doc_type":"api", + "kw":"Deleting a Queue,Queue-related APIs (Recommended),API Reference", + "title":"Deleting a Queue", + "githuburl":"" + }, + { + "uri":"dli_02_0196.html", + "product_code":"dli", + "code":"23", + "des":"This API is used to list all queues under the project.URI formatGET/v1.0/{project_id}/queuesGET/v1.0/{project_id}/queuesParameter descriptionURI parameterParameterMandato", + "doc_type":"api", + "kw":"Querying All Queues,Queue-related APIs (Recommended),API Reference", + "title":"Querying All Queues", + "githuburl":"" + }, + { + "uri":"dli_02_0016.html", + "product_code":"dli", + "code":"24", + "des":"This API is used to list details of a specific queue in a project.URI formatGET /v1.0/{project_id}/queues/{queue_name}GET /v1.0/{project_id}/queues/{queue_name}Parameter ", + "doc_type":"api", + "kw":"Viewing Details of a Queue,Queue-related APIs (Recommended),API Reference", + "title":"Viewing Details of a Queue", + "githuburl":"" + }, + { + "uri":"dli_02_0249.html", + "product_code":"dli", + "code":"25", + "des":"This API is used to restart, scale out, and scale in queues.Only SQL queues in the Available status can be restarted. (The queue status is Available only after the SQL jo", + "doc_type":"api", + "kw":"Restarting, Scaling Out, and Scaling In Queues,Queue-related APIs (Recommended),API Reference", + "title":"Restarting, Scaling Out, and Scaling In Queues", + "githuburl":"" + }, + { + "uri":"dli_02_0284.html", + "product_code":"dli", + "code":"26", + "des":"This API is used to send an address connectivity test request to a specified queue and insert the test address into the table.URI formatPOST /v1.0/{project_id}/queues/{q", + "doc_type":"api", + "kw":"Creating an Address Connectivity Test Request,Queue-related APIs (Recommended),API Reference", + "title":"Creating an Address Connectivity Test Request", + "githuburl":"" + }, + { + "uri":"dli_02_0285.html", + "product_code":"dli", + "code":"27", + "des":"This API is used to query the connectivity test result after the test is submitted.URI formatGET /v1.0/{project_id}/queues/{queue_name}/connection-test/{task_id}GET /v1", + "doc_type":"api", + "kw":"Querying Connectivity Test Details of a Specified Address,Queue-related APIs (Recommended),API Refer", + "title":"Querying Connectivity Test Details of a Specified Address", + "githuburl":"" + }, + { + "uri":"dli_02_0291.html", + "product_code":"dli", + "code":"28", + "des":"This API is used to create a scheduled CU change, that is, to create a scheduled CU change for a specified queue.URI formatPOST /v1/{project_id}/queues/{queue_name}/plan", + "doc_type":"api", + "kw":"Creating a Scheduled CU Change,Queue-related APIs (Recommended),API Reference", + "title":"Creating a Scheduled CU Change", + "githuburl":"" + }, + { + "uri":"dli_02_0292.html", + "product_code":"dli", + "code":"29", + "des":"This API is used to query the scheduled CU changes and list the changes of a specified queue.URI formatGET /v1/{project_id}/queues/{queue_name}/plansGET /v1/{project_id}/", + "doc_type":"api", + "kw":"Viewing a Scheduled CU Change,Queue-related APIs (Recommended),API Reference", + "title":"Viewing a Scheduled CU Change", + "githuburl":"" + }, + { + "uri":"dli_02_0293.html", + "product_code":"dli", + "code":"30", + "des":"This API is used to delete scheduled CU changes in batches.URI formatPOST /v1/{project_id}/queues/{queue_name}/plans/batch-deletePOST /v1/{project_id}/queues/{queue_name}", + "doc_type":"api", + "kw":"Deleting Scheduled CU Changes in Batches,Queue-related APIs (Recommended),API Reference", + "title":"Deleting Scheduled CU Changes in Batches", + "githuburl":"" + }, + { + "uri":"dli_02_0294.html", + "product_code":"dli", + "code":"31", + "des":"This API is used to delete a scheduled CU change for a queue with a specified ID.URI formatDELETE /v1/{project_id}/queues/{queue_name}/plans/{plan_id}DELETE /v1/{projec", + "doc_type":"api", + "kw":"Deleting a Scheduled CU Change,Queue-related APIs (Recommended),API Reference", + "title":"Deleting a Scheduled CU Change", + "githuburl":"" + }, + { + "uri":"dli_02_0295.html", + "product_code":"dli", + "code":"32", + "des":"This API is used to modify a scheduled CU change for a queue with a specified ID.URI formatPUT /v1/{project_id}/queues/{queue_name}/plans/{plan_id}PUT /v1/{project_id}/", + "doc_type":"api", + "kw":"Modifying a Scheduled CU Change,Queue-related APIs (Recommended),API Reference", + "title":"Modifying a Scheduled CU Change", + "githuburl":"" + }, + { + "uri":"dli_02_0158.html", + "product_code":"dli", + "code":"33", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"APIs Related to SQL Jobs", + "title":"APIs Related to SQL Jobs", + "githuburl":"" + }, + { + "uri":"dli_02_0027.html", + "product_code":"dli", + "code":"34", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Database-related APIs", + "title":"Database-related APIs", + "githuburl":"" + }, + { + "uri":"dli_02_0028.html", + "product_code":"dli", + "code":"35", + "des":"This API is used to add a database.URI formatPOST /v1.0/{project_id}/databasesPOST /v1.0/{project_id}/databasesParameter descriptionURI parameterParameterMandatoryTypeDes", + "doc_type":"api", + "kw":"Creating a Database,Database-related APIs,API Reference", + "title":"Creating a Database", + "githuburl":"" + }, + { + "uri":"dli_02_0030.html", + "product_code":"dli", + "code":"36", + "des":"This API is used to delete an empty database. If there are tables in the database to be deleted, delete all tables first. For details about the API used to delete tables,", + "doc_type":"api", + "kw":"Deleting a Database,Database-related APIs,API Reference", + "title":"Deleting a Database", + "githuburl":"" + }, + { + "uri":"dli_02_0029.html", + "product_code":"dli", + "code":"37", + "des":"This API is used to query the information about all the databases.URI formatGET /v1.0/{project_id}/databasesGET /v1.0/{project_id}/databasesParameter descriptionURI param", + "doc_type":"api", + "kw":"Querying All Databases,Database-related APIs,API Reference", + "title":"Querying All Databases", + "githuburl":"" + }, + { + "uri":"dli_02_0164.html", + "product_code":"dli", + "code":"38", + "des":"This API is used to modify the owner of a database.URI formatPUT /v1.0/{project_id}/databases/{database_name}/ownerPUT /v1.0/{project_id}/databases/{database_name}/ownerP", + "doc_type":"api", + "kw":"Modifying a Database Owner,Database-related APIs,API Reference", + "title":"Modifying a Database Owner", + "githuburl":"" + }, + { + "uri":"dli_02_0031.html", + "product_code":"dli", + "code":"39", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Table-related APIs", + "title":"Table-related APIs", + "githuburl":"" + }, + { + "uri":"dli_02_0034.html", + "product_code":"dli", + "code":"40", + "des":"This API is used to create a table.This API is a synchronous API.URI formatPOST /v1.0/{project_id}/databases/{database_name}/tablesPOST /v1.0/{project_id}/databases/{data", + "doc_type":"api", + "kw":"Creating a Table,Table-related APIs,API Reference", + "title":"Creating a Table", + "githuburl":"" + }, + { + "uri":"dli_02_0035.html", + "product_code":"dli", + "code":"41", + "des":"This API is used to delete a specified table.URI formatDELETE /v1.0/{project_id}/databases/{database_name}/tables/{table_name}DELETE /v1.0/{project_id}/databases/{databas", + "doc_type":"api", + "kw":"Deleting a Table,Table-related APIs,API Reference", + "title":"Deleting a Table", + "githuburl":"" + }, + { + "uri":"dli_02_0105.html", + "product_code":"dli", + "code":"42", + "des":"This API is used to query information about tables that meet the filtering criteria or all the tables in the specified database.URI formatGET /v1.0/{project_id}/databases", + "doc_type":"api", + "kw":"Querying All Tables (Recommended),Table-related APIs,API Reference", + "title":"Querying All Tables (Recommended)", + "githuburl":"" + }, + { + "uri":"dli_02_0033.html", + "product_code":"dli", + "code":"43", + "des":"This API is used to describe metadata information in the specified table.URI formatGET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}GET /v1.0/{project_", + "doc_type":"api", + "kw":"Describing the Table Information,Table-related APIs,API Reference", + "title":"Describing the Table Information", + "githuburl":"" + }, + { + "uri":"dli_02_0108.html", + "product_code":"dli", + "code":"44", + "des":"This API is used to preview the first ten rows of a table.URI formatGET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/previewGET /v1.0/{project_id}/dat", + "doc_type":"api", + "kw":"Previewing Table Content,Table-related APIs,API Reference", + "title":"Previewing Table Content", + "githuburl":"" + }, + { + "uri":"dli_02_0250.html", + "product_code":"dli", + "code":"45", + "des":"This API is used to obtain the partition list.URI formatGET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/partitionsGET /v1.0/{project_id}/databases/{d", + "doc_type":"api", + "kw":"Obtaining the Partition List,Table-related APIs,API Reference", + "title":"Obtaining the Partition List", + "githuburl":"" + }, + { + "uri":"dli_02_0017.html", + "product_code":"dli", + "code":"46", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Job-related APIs", + "title":"Job-related APIs", + "githuburl":"" + }, + { + "uri":"dli_02_0019.html", + "product_code":"dli", + "code":"47", + "des":"This API is used to import data from a file to a DLI or OBS table. Currently, only OBS data can be imported to a DLI or OBS table.This API is asynchronous.When importing ", + "doc_type":"api", + "kw":"Importing Data,Job-related APIs,API Reference", + "title":"Importing Data", + "githuburl":"" + }, + { + "uri":"dli_02_0020.html", + "product_code":"dli", + "code":"48", + "des":"This API is used to export data from a DLI table to a file.This API is asynchronous.Currently, data can be exported only from a DLI table to OBS, and the OBS path must be", + "doc_type":"api", + "kw":"Exporting Data,Job-related APIs,API Reference", + "title":"Exporting Data", + "githuburl":"" + }, + { + "uri":"dli_02_0102.html", + "product_code":"dli", + "code":"49", + "des":"This API is used to submit jobs to a queue using SQL statements.The job types support DDL, DCL, IMPORT, QUERY, and INSERT. The IMPORT function is the same as that describ", + "doc_type":"api", + "kw":"Submitting a SQL Job (Recommended),Job-related APIs,API Reference", + "title":"Submitting a SQL Job (Recommended)", + "githuburl":"" + }, + { + "uri":"dli_02_0104.html", + "product_code":"dli", + "code":"50", + "des":"This API is used to cancel a submitted job. If execution of a job completes or fails, this job cannot be canceled.URI formatDELETE /v1.0/{project_id}/jobs/{job_id}DELETE ", + "doc_type":"api", + "kw":"Canceling a Job (Recommended),Job-related APIs,API Reference", + "title":"Canceling a Job (Recommended)", + "githuburl":"" + }, + { + "uri":"dli_02_0025.html", + "product_code":"dli", + "code":"51", + "des":"This API is used to query information about all jobs in the current project.URI formatGET /v1.0/{project_id}/jobsGET /v1.0/{project_id}/jobsParameter descriptionURI param", + "doc_type":"api", + "kw":"Querying All Jobs,Job-related APIs,API Reference", + "title":"Querying All Jobs", + "githuburl":"" + }, + { + "uri":"dli_02_0312.html", + "product_code":"dli", + "code":"52", + "des":"This API is used to view the job execution result after a job is executed using SQL query statements. Currently, you can only query execution results of jobs of the QUERY", + "doc_type":"api", + "kw":"Previewing SQL Job Query Results,Job-related APIs,API Reference", + "title":"Previewing SQL Job Query Results", + "githuburl":"" + }, + { + "uri":"dli_02_0021.html", + "product_code":"dli", + "code":"53", + "des":"This API is used to query the status of a submitted job.URI formatGET /v1.0/{project_id}/jobs/{job_id}/statusGET /v1.0/{project_id}/jobs/{job_id}/statusParameter descript", + "doc_type":"api", + "kw":"Querying Job Status,Job-related APIs,API Reference", + "title":"Querying Job Status", + "githuburl":"" + }, + { + "uri":"dli_02_0022.html", + "product_code":"dli", + "code":"54", + "des":"This API is used to query details about jobs, including databasename, tablename, file size, and export mode.URI formatGET/v1.0/{project_id}/jobs/{job_id}/detailGET/v1.0/{", + "doc_type":"api", + "kw":"Querying Job Details,Job-related APIs,API Reference", + "title":"Querying Job Details", + "githuburl":"" + }, + { + "uri":"dli_02_0107.html", + "product_code":"dli", + "code":"55", + "des":"This API is used to check the SQL syntax.URI formatPOST /v1.0/{project_id}/jobs/check-sqlPOST /v1.0/{project_id}/jobs/check-sqlParameter descriptionURI parametersParamete", + "doc_type":"api", + "kw":"Checking SQL Syntax,Job-related APIs,API Reference", + "title":"Checking SQL Syntax", + "githuburl":"" + }, + { + "uri":"dli_02_0024.html", + "product_code":"dli", + "code":"56", + "des":"This API is used to export results returned from the query using SQL statements to OBS. Only the query result of QUERY jobs can be exported.This API is asynchronous.Curre", + "doc_type":"api", + "kw":"Exporting Query Results,Job-related APIs,API Reference", + "title":"Exporting Query Results", + "githuburl":"" + }, + { + "uri":"dli_02_0296.html", + "product_code":"dli", + "code":"57", + "des":"This API is used to obtain the job execution progress. If a job is being executed, information about its subjobs can be obtained. If a job has just started or has ended, ", + "doc_type":"api", + "kw":"Querying the Job Execution Progress,Job-related APIs,API Reference", + "title":"Querying the Job Execution Progress", + "githuburl":"" + }, + { + "uri":"dli_02_0166.html", + "product_code":"dli", + "code":"58", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Package Group-related APIs", + "title":"Package Group-related APIs", + "githuburl":"" + }, + { + "uri":"dli_02_0130.html", + "product_code":"dli", + "code":"59", + "des":"This API is used to upload a package group to a project. The function is similar to creating a package on the management console.URI formatPOST /v2.0/{project_id}/resourc", + "doc_type":"api", + "kw":"Uploading a Package Group,Package Group-related APIs,API Reference", + "title":"Uploading a Package Group", + "githuburl":"" + }, + { + "uri":"dli_02_0168.html", + "product_code":"dli", + "code":"60", + "des":"This API is used to query all resources in a project, including groups.URI formatGET /v2.0/{project_id}/resourcesGET /v2.0/{project_id}/resourcesParameter descriptionURI ", + "doc_type":"api", + "kw":"Querying Package Group List,Package Group-related APIs,API Reference", + "title":"Querying Package Group List", + "githuburl":"" + }, + { + "uri":"dli_02_0169.html", + "product_code":"dli", + "code":"61", + "des":"This API is used to upload a group of JAR packages to a project.When a resource group with the same name is uploaded, the new group overwrites the old group.URI formatPOS", + "doc_type":"api", + "kw":"Uploading a JAR Package Group,Package Group-related APIs,API Reference", + "title":"Uploading a JAR Package Group", + "githuburl":"" + }, + { + "uri":"dli_02_0170.html", + "product_code":"dli", + "code":"62", + "des":"This API is used to upload a group of PyFile packages to a project.When a group with the same name as the PyFile package is uploaded, the new group overwrites the old gro", + "doc_type":"api", + "kw":"Uploading a PyFile Package Group,Package Group-related APIs,API Reference", + "title":"Uploading a PyFile Package Group", + "githuburl":"" + }, + { + "uri":"dli_02_0171.html", + "product_code":"dli", + "code":"63", + "des":"This API is used to upload a group of File packages to a project.When the File package group with the same name is uploaded, the new group overwrites the old group.URI fo", + "doc_type":"api", + "kw":"Uploading a File Package Group,Package Group-related APIs,API Reference", + "title":"Uploading a File Package Group", + "githuburl":"" + }, + { + "uri":"dli_02_0172.html", + "product_code":"dli", + "code":"64", + "des":"This API is used to query resource information of a package group in a Project.URI formatGET /v2.0/{project_id}/resources/{resource_name}GET /v2.0/{project_id}/resources/", + "doc_type":"api", + "kw":"Querying Resource Packages in a Group,Package Group-related APIs,API Reference", + "title":"Querying Resource Packages in a Group", + "githuburl":"" + }, + { + "uri":"dli_02_0173.html", + "product_code":"dli", + "code":"65", + "des":"This API is used to delete resource packages in a group in a Project.URI formatDELETE /v2.0/{project_id}/resources/{resource_name}DELETE /v2.0/{project_id}/resources/{res", + "doc_type":"api", + "kw":"Deleting a Resource Package from a Group,Package Group-related APIs,API Reference", + "title":"Deleting a Resource Package from a Group", + "githuburl":"" + }, + { + "uri":"dli_02_0253.html", + "product_code":"dli", + "code":"66", + "des":"This API is used to change the owner of a program package.URI formatPUT /v2.0/{project_id}/resources/ownerPUT /v2.0/{project_id}/resources/ownerParameter descriptionURI p", + "doc_type":"api", + "kw":"Changing the Owner of a Group or Resource Package,Package Group-related APIs,API Reference", + "title":"Changing the Owner of a Group or Resource Package", + "githuburl":"" + }, + { + "uri":"dli_02_0223.html", + "product_code":"dli", + "code":"67", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"APIs Related to Flink Jobs", + "title":"APIs Related to Flink Jobs", + "githuburl":"" + }, + { + "uri":"dli_02_0225.html", + "product_code":"dli", + "code":"68", + "des":"This API is used to grant DLI the permission to access OBS buckets for saving job checkpoints and run logs.URI formatPOST /v1.0/{project_id}/dli/obs-authorizePOST /v1.0/{", + "doc_type":"api", + "kw":"Granting OBS Permissions to DLI,APIs Related to Flink Jobs,API Reference", + "title":"Granting OBS Permissions to DLI", + "githuburl":"" + }, + { + "uri":"dli_02_0228.html", + "product_code":"dli", + "code":"69", + "des":"This API is used to create a Flink streaming SQL job.URI formatPOST /v1.0/{project_id}/streaming/sql-jobsPOST /v1.0/{project_id}/streaming/sql-jobsParameter descriptionUR", + "doc_type":"api", + "kw":"Creating a SQL Job,APIs Related to Flink Jobs,API Reference", + "title":"Creating a SQL Job", + "githuburl":"" + }, + { + "uri":"dli_02_0229.html", + "product_code":"dli", + "code":"70", + "des":"This API is used to modify a Flink SQL job.URI formatPUT /v1.0/{project_id}/streaming/sql-jobs/{job_id}PUT /v1.0/{project_id}/streaming/sql-jobs/{job_id}Parameter descrip", + "doc_type":"api", + "kw":"Updating a SQL Job,APIs Related to Flink Jobs,API Reference", + "title":"Updating a SQL Job", + "githuburl":"" + }, + { + "uri":"dli_02_0230.html", + "product_code":"dli", + "code":"71", + "des":"This API is used to create custom jobs, which currently support the JAR format and run in dedicated queues.URI formatPOST /v1.0/{project_id}/streaming/flink-jobsPOST /v1.", + "doc_type":"api", + "kw":"Creating a Flink Jar job,APIs Related to Flink Jobs,API Reference", + "title":"Creating a Flink Jar job", + "githuburl":"" + }, + { + "uri":"dli_02_0231.html", + "product_code":"dli", + "code":"72", + "des":"This API is used to update custom jobs, which currently support the JAR format and run in dedicated queues.URI formatPUT /v1.0/{project_id}/streaming/flink-jobs/{job_id}P", + "doc_type":"api", + "kw":"Updating a Flink Jar Job,APIs Related to Flink Jobs,API Reference", + "title":"Updating a Flink Jar Job", + "githuburl":"" + }, + { + "uri":"dli_02_0233.html", + "product_code":"dli", + "code":"73", + "des":"This API is used to trigger batch job running.URI formatPOST /v1.0/{project_id}/streaming/jobs/runPOST /v1.0/{project_id}/streaming/jobs/runParameter descriptionURI param", + "doc_type":"api", + "kw":"Running Jobs in Batches,APIs Related to Flink Jobs,API Reference", + "title":"Running Jobs in Batches", + "githuburl":"" + }, + { + "uri":"dli_02_0234.html", + "product_code":"dli", + "code":"74", + "des":"This API is used to query the list of the current user's jobs. You can set the job ID as the ID and query jobs whose IDs are greater than or less than the ID. You can als", + "doc_type":"api", + "kw":"Querying the Job List,APIs Related to Flink Jobs,API Reference", + "title":"Querying the Job List", + "githuburl":"" + }, + { + "uri":"dli_02_0235.html", + "product_code":"dli", + "code":"75", + "des":"This API is used to query details of a job.URI formatGET /v1.0/{project_id}/streaming/jobs/{job_id}GET /v1.0/{project_id}/streaming/jobs/{job_id}Parameter descriptionURI ", + "doc_type":"api", + "kw":"Querying Job Details,APIs Related to Flink Jobs,API Reference", + "title":"Querying Job Details", + "githuburl":"" + }, + { + "uri":"dli_02_0236.html", + "product_code":"dli", + "code":"76", + "des":"This API is used to query a job execution plan.URI formatGET /v1.0/{project_id}/streaming/jobs/{job_id}/execute-graphGET /v1.0/{project_id}/streaming/jobs/{job_id}/execut", + "doc_type":"api", + "kw":"Querying the Job Execution Plan,APIs Related to Flink Jobs,API Reference", + "title":"Querying the Job Execution Plan", + "githuburl":"" + }, + { + "uri":"dli_02_0241.html", + "product_code":"dli", + "code":"77", + "des":"This API is used to stop running jobs in batches.URI formatPOST /v1.0/{project_id}/streaming/jobs/stopPOST /v1.0/{project_id}/streaming/jobs/stopParameter descriptionURI ", + "doc_type":"api", + "kw":"Stopping Jobs in Batches,APIs Related to Flink Jobs,API Reference", + "title":"Stopping Jobs in Batches", + "githuburl":"" + }, + { + "uri":"dli_02_0242.html", + "product_code":"dli", + "code":"78", + "des":"This API is used to delete a Flink job at any state.The job records will not be deleted.URI formatDELETE /v1.0/{project_id}/streaming/jobs/{job_id}DELETE /v1.0/{project_i", + "doc_type":"api", + "kw":"Deleting a Job,APIs Related to Flink Jobs,API Reference", + "title":"Deleting a Job", + "githuburl":"" + }, + { + "uri":"dli_02_0243.html", + "product_code":"dli", + "code":"79", + "des":"This API is used to batch delete jobs at any state.URI formatPOST /v1.0/{project_id}/streaming/jobs/deletePOST /v1.0/{project_id}/streaming/jobs/deleteParameter descripti", + "doc_type":"api", + "kw":"Deleting Jobs in Batches,APIs Related to Flink Jobs,API Reference", + "title":"Deleting Jobs in Batches", + "githuburl":"" + }, + { + "uri":"dli_02_0254.html", + "product_code":"dli", + "code":"80", + "des":"This API is used to export Flink job data.URI formatPOST /v1.0/{project_id}/streaming/jobs/exportPOST /v1.0/{project_id}/streaming/jobs/exportParameter descriptionURI par", + "doc_type":"api", + "kw":"Exporting a Flink Job,APIs Related to Flink Jobs,API Reference", + "title":"Exporting a Flink Job", + "githuburl":"" + }, + { + "uri":"dli_02_0255.html", + "product_code":"dli", + "code":"81", + "des":"This API is used to import Flink job data.URI formatPOST /v1.0/{project_id}/streaming/jobs/importPOST /v1.0/{project_id}/streaming/jobs/importParameter descriptionURI par", + "doc_type":"api", + "kw":"Importing a Flink Job,APIs Related to Flink Jobs,API Reference", + "title":"Importing a Flink Job", + "githuburl":"" + }, + { + "uri":"dli_02_0316.html", + "product_code":"dli", + "code":"82", + "des":"This API is used to generate a static stream graph for a Flink SQL job.URI formatPOST /v3/{project_id}/streaming/jobs/{job_id}/gen-graphPOST /v3/{project_id}/streaming/jo", + "doc_type":"api", + "kw":"Generating a Static Stream Graph for a Flink SQL Job,APIs Related to Flink Jobs,API Reference", + "title":"Generating a Static Stream Graph for a Flink SQL Job", + "githuburl":"" + }, + { + "uri":"dli_02_0109.html", + "product_code":"dli", + "code":"83", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"APIs Related to Spark jobs", + "title":"APIs Related to Spark jobs", + "githuburl":"" + }, + { + "uri":"dli_02_0162.html", + "product_code":"dli", + "code":"84", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Batch Processing-related APIs", + "title":"Batch Processing-related APIs", + "githuburl":"" + }, + { + "uri":"dli_02_0124.html", + "product_code":"dli", + "code":"85", + "des":"This API is used to create a batch processing job in a queue.URI formatPOST /v2.0/{project_id}/batchesPOST /v2.0/{project_id}/batchesParameter descriptionURI parameterPar", + "doc_type":"api", + "kw":"Creating a Batch Processing Job,Batch Processing-related APIs,API Reference", + "title":"Creating a Batch Processing Job", + "githuburl":"" + }, + { + "uri":"dli_02_0129.html", + "product_code":"dli", + "code":"86", + "des":"This API is used to cancel a batch processing job.Batch processing jobs in the Successful or Failed state cannot be canceled.URI formatDELETE /v2.0/{project_id}/batches/{", + "doc_type":"api", + "kw":"Canceling a Batch Processing Job,Batch Processing-related APIs,API Reference", + "title":"Canceling a Batch Processing Job", + "githuburl":"" + }, + { + "uri":"dli_02_0125.html", + "product_code":"dli", + "code":"87", + "des":"This API is used to obtain the list of batch processing jobs in a queue of a project.URI formatGET /v2.0/{project_id}/batchesGET /v2.0/{project_id}/batchesParameter descr", + "doc_type":"api", + "kw":"Obtaining the List of Batch Processing Jobs,Batch Processing-related APIs,API Reference", + "title":"Obtaining the List of Batch Processing Jobs", + "githuburl":"" + }, + { + "uri":"dli_02_0126.html", + "product_code":"dli", + "code":"88", + "des":"This API is used to query details about a batch processing job based on the job ID.URI formatGET /v2.0/{project_id}/batches/{batch_id}GET /v2.0/{project_id}/batches/{batc", + "doc_type":"api", + "kw":"Querying Batch Job Details,Batch Processing-related APIs,API Reference", + "title":"Querying Batch Job Details", + "githuburl":"" + }, + { + "uri":"dli_02_0127.html", + "product_code":"dli", + "code":"89", + "des":"This API is used to obtain the execution status of a batch processing job.URI formatGET /v2.0/{project_id}/batches/{batch_id}/stateGET /v2.0/{project_id}/batches/{batch_i", + "doc_type":"api", + "kw":"Querying a Batch Job Status,Batch Processing-related APIs,API Reference", + "title":"Querying a Batch Job Status", + "githuburl":"" + }, + { + "uri":"dli_02_0128.html", + "product_code":"dli", + "code":"90", + "des":"This API is used to query the back-end logs of batch processing jobs.URI formatGET /v2.0/{project_id}/batches/{batch_id}/logGET /v2.0/{project_id}/batches/{batch_id}/logP", + "doc_type":"api", + "kw":"Querying Batch Job Logs,Batch Processing-related APIs,API Reference", + "title":"Querying Batch Job Logs", + "githuburl":"" + }, + { + "uri":"dli_02_0244.html", + "product_code":"dli", + "code":"91", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"APIs Related to Flink Job Templates", + "title":"APIs Related to Flink Job Templates", + "githuburl":"" + }, + { + "uri":"dli_02_0245.html", + "product_code":"dli", + "code":"92", + "des":"This API is used to create a user template for the DLI service. A maximum of 100 user templates can be created.URI formatPOST /v1.0/{project_id}/streaming/job-templatesPO", + "doc_type":"api", + "kw":"Creating a Template,APIs Related to Flink Job Templates,API Reference", + "title":"Creating a Template", + "githuburl":"" + }, + { + "uri":"dli_02_0246.html", + "product_code":"dli", + "code":"93", + "des":"This API is used to update existing templates in DLI.URI formatPUT /v1.0/{project_id}/streaming/job-templates/{template_id}PUT /v1.0/{project_id}/streaming/job-templates/", + "doc_type":"api", + "kw":"Updating a Template,APIs Related to Flink Job Templates,API Reference", + "title":"Updating a Template", + "githuburl":"" + }, + { + "uri":"dli_02_0247.html", + "product_code":"dli", + "code":"94", + "des":"This API is used to delete a template. A template used by jobs can also be deleted.URI formatDELETE /v1.0/{project_id}/streaming/job-templates/{template_id}DELETE /v1.0/{", + "doc_type":"api", + "kw":"Deleting a Template,APIs Related to Flink Job Templates,API Reference", + "title":"Deleting a Template", + "githuburl":"" + }, + { + "uri":"dli_02_0248.html", + "product_code":"dli", + "code":"95", + "des":"This API is used to query the job template list. Currently, only custom templates can be queried.URI formatGET /v1.0/{project_id}/streaming/job-templatesGET /v1.0/{projec", + "doc_type":"api", + "kw":"Querying the Template List,APIs Related to Flink Job Templates,API Reference", + "title":"Querying the Template List", + "githuburl":"" + }, + { + "uri":"dli_02_0186.html", + "product_code":"dli", + "code":"96", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"APIs Related to Enhanced Datasource Connections", + "title":"APIs Related to Enhanced Datasource Connections", + "githuburl":"" + }, + { + "uri":"dli_02_0187.html", + "product_code":"dli", + "code":"97", + "des":"This API is used to create an enhanced datasource connection with other services.URI formatPOST /v2.0/{project_id}/datasource/enhanced-connectionsPOST /v2.0/{project_id}/", + "doc_type":"api", + "kw":"Creating an Enhanced Datasource Connection,APIs Related to Enhanced Datasource Connections,API Refer", + "title":"Creating an Enhanced Datasource Connection", + "githuburl":"" + }, + { + "uri":"dli_02_0188.html", + "product_code":"dli", + "code":"98", + "des":"This API is used to delete an enhanced datasource connection.The connection that is being created cannot be deleted.URI formatDELETE /v2.0/{project_id}/datasource/enhance", + "doc_type":"api", + "kw":"Deleting an Enhanced Datasource Connection,APIs Related to Enhanced Datasource Connections,API Refer", + "title":"Deleting an Enhanced Datasource Connection", + "githuburl":"" + }, + { + "uri":"dli_02_0190.html", + "product_code":"dli", + "code":"99", + "des":"This API is used to query the list of created enhanced datasource connections.URI formatGET /v2.0/{project_id}/datasource/enhanced-connectionsGET /v2.0/{project_id}/datas", + "doc_type":"api", + "kw":"Querying an Enhanced Datasource Connection List,APIs Related to Enhanced Datasource Connections,API ", + "title":"Querying an Enhanced Datasource Connection List", + "githuburl":"" + }, + { + "uri":"dli_02_0189.html", + "product_code":"dli", + "code":"100", + "des":"This API is used to query the created enhanced datasource connections.URI formatGET /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}GET /v2.0/{project_i", + "doc_type":"api", + "kw":"Querying an Enhanced Datasource Connection,APIs Related to Enhanced Datasource Connections,API Refer", + "title":"Querying an Enhanced Datasource Connection", + "githuburl":"" + }, + { + "uri":"dli_02_0191.html", + "product_code":"dli", + "code":"101", + "des":"This API is used to bind a queue to a created enhanced datasource connection.URI formatPOST /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}/associate-q", + "doc_type":"api", + "kw":"Binding a Queue,APIs Related to Enhanced Datasource Connections,API Reference", + "title":"Binding a Queue", + "githuburl":"" + }, + { + "uri":"dli_02_0192.html", + "product_code":"dli", + "code":"102", + "des":"This API is used to unbind a queue from an enhanced datasource connection.URI formatPOST /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}/disassociate-q", + "doc_type":"api", + "kw":"Unbinding a Queue,APIs Related to Enhanced Datasource Connections,API Reference", + "title":"Unbinding a Queue", + "githuburl":"" + }, + { + "uri":"dli_02_0200.html", + "product_code":"dli", + "code":"103", + "des":"This API is used to modify the host information of a connected datasource. Only full overwriting is supported.URI formatPUT /v2.0/{project_id}/datasource/enhanced-connect", + "doc_type":"api", + "kw":"Modifying the Host Information,APIs Related to Enhanced Datasource Connections,API Reference", + "title":"Modifying the Host Information", + "githuburl":"" + }, + { + "uri":"dli_02_0256.html", + "product_code":"dli", + "code":"104", + "des":"This API is used to query the authorization about an enhanced datasource connection.URI formatGET /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}/privi", + "doc_type":"api", + "kw":"Querying Authorization of an Enhanced Datasource Connection,APIs Related to Enhanced Datasource Conn", + "title":"Querying Authorization of an Enhanced Datasource Connection", + "githuburl":"" + }, + { + "uri":"dli_02_0257.html", + "product_code":"dli", + "code":"105", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Global Variable-related APIs", + "title":"Global Variable-related APIs", + "githuburl":"" + }, + { + "uri":"dli_02_0258.html", + "product_code":"dli", + "code":"106", + "des":"This API is used to create a global variable.URI formatPOST /v1.0/{project_id}/variablesPOST /v1.0/{project_id}/variablesParameter descriptionURI parametersParameterManda", + "doc_type":"api", + "kw":"Creating a Global Variable,Global Variable-related APIs,API Reference", + "title":"Creating a Global Variable", + "githuburl":"" + }, + { + "uri":"dli_02_0259.html", + "product_code":"dli", + "code":"107", + "des":"This API is used to delete a global variable.Only the user who creates a global variable can delete the variable.URI formatDELETE /v1.0/{project_id}/variables/{var_name}D", + "doc_type":"api", + "kw":"Deleting a Global Variable,Global Variable-related APIs,API Reference", + "title":"Deleting a Global Variable", + "githuburl":"" + }, + { + "uri":"dli_02_0260.html", + "product_code":"dli", + "code":"108", + "des":"This API is used to modify a global variable.URI formatPUT /v1.0/{project_id}/variables/{var_name}PUT /v1.0/{project_id}/variables/{var_name}Parameter descriptionURI para", + "doc_type":"api", + "kw":"Modifying a Global Variable,Global Variable-related APIs,API Reference", + "title":"Modifying a Global Variable", + "githuburl":"" + }, + { + "uri":"dli_02_0261.html", + "product_code":"dli", + "code":"109", + "des":"This API is used to query information about all global variables in the current project.URI formatGET /v1.0/{project_id}/variablesGET /v1.0/{project_id}/variablesParamete", + "doc_type":"api", + "kw":"Querying All Global Variables,Global Variable-related APIs,API Reference", + "title":"Querying All Global Variables", + "githuburl":"" + }, + { + "uri":"dli_02_0201.html", + "product_code":"dli", + "code":"110", + "des":"This section describes fine-grained permissions management for your DLI. If your account does not need individual IAM users, then you may skip this section.By default, ne", + "doc_type":"api", + "kw":"Permissions Policies and Supported Actions,API Reference", + "title":"Permissions Policies and Supported Actions", + "githuburl":"" + }, + { + "uri":"dli_02_0011.html", + "product_code":"dli", + "code":"111", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Public Parameters", + "title":"Public Parameters", + "githuburl":"" + }, + { + "uri":"dli_02_0012.html", + "product_code":"dli", + "code":"112", + "des":"Table 1 describes status codes.", + "doc_type":"api", + "kw":"Status Codes,Public Parameters,API Reference", + "title":"Status Codes", + "githuburl":"" + }, + { + "uri":"dli_02_0056.html", + "product_code":"dli", + "code":"113", + "des":"If an error occurs in API calling, no result is returned. Identify the cause of error based on the error codes of each API. If an error occurs in API calling, HTTP status", + "doc_type":"api", + "kw":"Error Code,Public Parameters,API Reference", + "title":"Error Code", + "githuburl":"" + }, + { + "uri":"dli_02_0183.html", + "product_code":"dli", + "code":"114", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Obtaining a Project ID,Public Parameters,API Reference", + "title":"Obtaining a Project ID", + "githuburl":"" + }, + { + "uri":"dli_02_0013.html", + "product_code":"dli", + "code":"115", + "des":"An account ID (domain-id) is required for some URLs when an API is called. To obtain an account ID, perform the following operations:Log in to the management console.Hove", + "doc_type":"api", + "kw":"Obtaining an Account ID,Public Parameters,API Reference", + "title":"Obtaining an Account ID", + "githuburl":"" + }, + { + "uri":"dli_02_00003.html", + "product_code":"dli", + "code":"116", + "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "doc_type":"api", + "kw":"Change History,API Reference", + "title":"Change History", + "githuburl":"" + } +] \ No newline at end of file diff --git a/docs/dli/api-ref/CLASS.TXT.json b/docs/dli/api-ref/CLASS.TXT.json new file mode 100644 index 00000000..b8d22363 --- /dev/null +++ b/docs/dli/api-ref/CLASS.TXT.json @@ -0,0 +1,1046 @@ +[ + { + "desc":"Public cloud APIs comply with the RESTful API design principles. REST-based Web services are organized into resources. Each resource is identified by one or more Uniform ", + "product_code":"dli", + "title":"Calling APIs", + "uri":"dli_02_0500.html", + "doc_type":"api", + "p_code":"", + "code":"1" + }, + { + "desc":"This section describes the APIs provided by DLI.", + "product_code":"dli", + "title":"Overview", + "uri":"dli_02_0181.html", + "doc_type":"api", + "p_code":"", + "code":"2" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Getting Started", + "uri":"dli_02_0306.html", + "doc_type":"api", + "p_code":"", + "code":"3" + }, + { + "desc":"This section describes how to create and query a queue using APIs.Queues created using this API will be bound to specified compute resources.It takes 6 to 10 minutes to s", + "product_code":"dli", + "title":"Creating a Queue", + "uri":"dli_02_0307.html", + "doc_type":"api", + "p_code":"3", + "code":"4" + }, + { + "desc":"This section describes how to create and query SQL jobs using APIs.It takes 6 to 10 minutes to start a job using a new queue for the first time.Creating a Queue: Create a", + "product_code":"dli", + "title":"Creating and Submitting a SQL Job", + "uri":"dli_02_0308.html", + "doc_type":"api", + "p_code":"3", + "code":"5" + }, + { + "desc":"This section describes how to create and submit Spark jobs using APIs.It takes 6 to 10 minutes to start a job using a new queue for the first time.Creating a Queue: Creat", + "product_code":"dli", + "title":"Creating and Submitting a Spark Job", + "uri":"dli_02_0309.html", + "doc_type":"api", + "p_code":"3", + "code":"6" + }, + { + "desc":"This section describes how to create and run a user-defined Flink job using APIs.It takes 6 to 10 minutes to start a job using a new queue for the first time.Creating a Q", + "product_code":"dli", + "title":"Creating and Submitting a Flink Job", + "uri":"dli_02_0310.html", + "doc_type":"api", + "p_code":"3", + "code":"7" + }, + { + "desc":"This section describes how to create an enhanced datasource connection using an API.It takes 6 to 10 minutes to start a job using a new queue for the first time.Before cr", + "product_code":"dli", + "title":"Creating and Using a Datasource Connection", + "uri":"dli_02_0311.html", + "doc_type":"api", + "p_code":"3", + "code":"8" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Permission-related APIs", + "uri":"dli_02_0036.html", + "doc_type":"api", + "p_code":"", + "code":"9" + }, + { + "desc":"This API is used to share a specific queue with other users. You can grant users with the permission to use the specified queue or revoke the permission.URI formatPUT /v1", + "product_code":"dli", + "title":"Granting Users with the Queue Usage Permission", + "uri":"dli_02_0037.html", + "doc_type":"api", + "p_code":"9", + "code":"10" + }, + { + "desc":"This API is used to query names of all users who can use a specified queue.URI formatGET /v1.0/{project_id}/queues/{queue_name}/usersGET /v1.0/{project_id}/queues/{queue_", + "product_code":"dli", + "title":"Querying Queue Users", + "uri":"dli_02_0038.html", + "doc_type":"api", + "p_code":"9", + "code":"11" + }, + { + "desc":"This API is used to grant database or table data usage permission to specified users.URI formatPUT /v1.0/{project_id}/user-authorizationPUT /v1.0/{project_id}/user-author", + "product_code":"dli", + "title":"Granting Data Permission to Users", + "uri":"dli_02_0039.html", + "doc_type":"api", + "p_code":"9", + "code":"12" + }, + { + "desc":"This API is used query names of all users who have permission to use or access the database.URI formatGET /v1.0/{project_id}/databases/{database_name}/usersGET /v1.0/{pro", + "product_code":"dli", + "title":"Querying Database Users", + "uri":"dli_02_0040.html", + "doc_type":"api", + "p_code":"9", + "code":"13" + }, + { + "desc":"This API is used to query users who have permission to access the specified table or column in the table.URI formatGET /v1.0/{project_id}/databases/{database_name}/tables", + "product_code":"dli", + "title":"Querying Table Users", + "uri":"dli_02_0041.html", + "doc_type":"api", + "p_code":"9", + "code":"14" + }, + { + "desc":"This API is used to query the permission of a specified user on a table.URI formatGET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/users/{user_name}GE", + "product_code":"dli", + "title":"Querying a User's Table Permissions", + "uri":"dli_02_0042.html", + "doc_type":"api", + "p_code":"9", + "code":"15" + }, + { + "desc":"This API is used to view the permissions granted to a user.URI formatGET /v1.0/{project_id}/authorization/privilegesGET /v1.0/{project_id}/authorization/privilegesParamet", + "product_code":"dli", + "title":"Viewing the Granted Permissions of a User", + "uri":"dli_02_0252.html", + "doc_type":"api", + "p_code":"9", + "code":"16" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Agency-related APIs", + "uri":"dli_02_0297.html", + "doc_type":"api", + "p_code":"", + "code":"17" + }, + { + "desc":"This API is used to obtain the agency information of a DLI user.URI formatGET /v2/{project_id}/agencyGET /v2/{project_id}/agencyParameter descriptionURI parametersParam", + "product_code":"dli", + "title":"Obtaining DLI Agency Information", + "uri":"dli_02_0298.html", + "doc_type":"api", + "p_code":"17", + "code":"18" + }, + { + "desc":"This API is used to create an agency for a DLI user.URI formatPOST /v2/{project_id}/agencyPOST /v2/{project_id}/agencyParameter descriptionURI parametersParameterMandat", + "product_code":"dli", + "title":"Creating a DLI Agency", + "uri":"dli_02_0299.html", + "doc_type":"api", + "p_code":"17", + "code":"19" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Queue-related APIs (Recommended)", + "uri":"dli_02_0193.html", + "doc_type":"api", + "p_code":"", + "code":"20" + }, + { + "desc":"This API is used to create a queue. The queue will be bound to specified compute resources.It takes 5 to 15 minutes to start a job using a new queue for the first time.UR", + "product_code":"dli", + "title":"Creating a Queue", + "uri":"dli_02_0194.html", + "doc_type":"api", + "p_code":"20", + "code":"21" + }, + { + "desc":"This API is used to delete a specified queue.If a task is being executed in a specified queue, the queue cannot be deleted.URI formatDELETE /v1.0/{project_id}/queues/{que", + "product_code":"dli", + "title":"Deleting a Queue", + "uri":"dli_02_0195.html", + "doc_type":"api", + "p_code":"20", + "code":"22" + }, + { + "desc":"This API is used to list all queues under the project.URI formatGET/v1.0/{project_id}/queuesGET/v1.0/{project_id}/queuesParameter descriptionURI parameterParameterMandato", + "product_code":"dli", + "title":"Querying All Queues", + "uri":"dli_02_0196.html", + "doc_type":"api", + "p_code":"20", + "code":"23" + }, + { + "desc":"This API is used to list details of a specific queue in a project.URI formatGET /v1.0/{project_id}/queues/{queue_name}GET /v1.0/{project_id}/queues/{queue_name}Parameter ", + "product_code":"dli", + "title":"Viewing Details of a Queue", + "uri":"dli_02_0016.html", + "doc_type":"api", + "p_code":"20", + "code":"24" + }, + { + "desc":"This API is used to restart, scale out, and scale in queues.Only SQL queues in the Available status can be restarted. (The queue status is Available only after the SQL jo", + "product_code":"dli", + "title":"Restarting, Scaling Out, and Scaling In Queues", + "uri":"dli_02_0249.html", + "doc_type":"api", + "p_code":"20", + "code":"25" + }, + { + "desc":"This API is used to send an address connectivity test request to a specified queue and insert the test address into the table.URI formatPOST /v1.0/{project_id}/queues/{q", + "product_code":"dli", + "title":"Creating an Address Connectivity Test Request", + "uri":"dli_02_0284.html", + "doc_type":"api", + "p_code":"20", + "code":"26" + }, + { + "desc":"This API is used to query the connectivity test result after the test is submitted.URI formatGET /v1.0/{project_id}/queues/{queue_name}/connection-test/{task_id}GET /v1", + "product_code":"dli", + "title":"Querying Connectivity Test Details of a Specified Address", + "uri":"dli_02_0285.html", + "doc_type":"api", + "p_code":"20", + "code":"27" + }, + { + "desc":"This API is used to create a scheduled CU change, that is, to create a scheduled CU change for a specified queue.URI formatPOST /v1/{project_id}/queues/{queue_name}/plan", + "product_code":"dli", + "title":"Creating a Scheduled CU Change", + "uri":"dli_02_0291.html", + "doc_type":"api", + "p_code":"20", + "code":"28" + }, + { + "desc":"This API is used to query the scheduled CU changes and list the changes of a specified queue.URI formatGET /v1/{project_id}/queues/{queue_name}/plansGET /v1/{project_id}/", + "product_code":"dli", + "title":"Viewing a Scheduled CU Change", + "uri":"dli_02_0292.html", + "doc_type":"api", + "p_code":"20", + "code":"29" + }, + { + "desc":"This API is used to delete scheduled CU changes in batches.URI formatPOST /v1/{project_id}/queues/{queue_name}/plans/batch-deletePOST /v1/{project_id}/queues/{queue_name}", + "product_code":"dli", + "title":"Deleting Scheduled CU Changes in Batches", + "uri":"dli_02_0293.html", + "doc_type":"api", + "p_code":"20", + "code":"30" + }, + { + "desc":"This API is used to delete a scheduled CU change for a queue with a specified ID.URI formatDELETE /v1/{project_id}/queues/{queue_name}/plans/{plan_id}DELETE /v1/{projec", + "product_code":"dli", + "title":"Deleting a Scheduled CU Change", + "uri":"dli_02_0294.html", + "doc_type":"api", + "p_code":"20", + "code":"31" + }, + { + "desc":"This API is used to modify a scheduled CU change for a queue with a specified ID.URI formatPUT /v1/{project_id}/queues/{queue_name}/plans/{plan_id}PUT /v1/{project_id}/", + "product_code":"dli", + "title":"Modifying a Scheduled CU Change", + "uri":"dli_02_0295.html", + "doc_type":"api", + "p_code":"20", + "code":"32" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"APIs Related to SQL Jobs", + "uri":"dli_02_0158.html", + "doc_type":"api", + "p_code":"", + "code":"33" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Database-related APIs", + "uri":"dli_02_0027.html", + "doc_type":"api", + "p_code":"33", + "code":"34" + }, + { + "desc":"This API is used to add a database.URI formatPOST /v1.0/{project_id}/databasesPOST /v1.0/{project_id}/databasesParameter descriptionURI parameterParameterMandatoryTypeDes", + "product_code":"dli", + "title":"Creating a Database", + "uri":"dli_02_0028.html", + "doc_type":"api", + "p_code":"34", + "code":"35" + }, + { + "desc":"This API is used to delete an empty database. If there are tables in the database to be deleted, delete all tables first. For details about the API used to delete tables,", + "product_code":"dli", + "title":"Deleting a Database", + "uri":"dli_02_0030.html", + "doc_type":"api", + "p_code":"34", + "code":"36" + }, + { + "desc":"This API is used to query the information about all the databases.URI formatGET /v1.0/{project_id}/databasesGET /v1.0/{project_id}/databasesParameter descriptionURI param", + "product_code":"dli", + "title":"Querying All Databases", + "uri":"dli_02_0029.html", + "doc_type":"api", + "p_code":"34", + "code":"37" + }, + { + "desc":"This API is used to modify the owner of a database.URI formatPUT /v1.0/{project_id}/databases/{database_name}/ownerPUT /v1.0/{project_id}/databases/{database_name}/ownerP", + "product_code":"dli", + "title":"Modifying a Database Owner", + "uri":"dli_02_0164.html", + "doc_type":"api", + "p_code":"34", + "code":"38" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Table-related APIs", + "uri":"dli_02_0031.html", + "doc_type":"api", + "p_code":"33", + "code":"39" + }, + { + "desc":"This API is used to create a table.This API is a synchronous API.URI formatPOST /v1.0/{project_id}/databases/{database_name}/tablesPOST /v1.0/{project_id}/databases/{data", + "product_code":"dli", + "title":"Creating a Table", + "uri":"dli_02_0034.html", + "doc_type":"api", + "p_code":"39", + "code":"40" + }, + { + "desc":"This API is used to delete a specified table.URI formatDELETE /v1.0/{project_id}/databases/{database_name}/tables/{table_name}DELETE /v1.0/{project_id}/databases/{databas", + "product_code":"dli", + "title":"Deleting a Table", + "uri":"dli_02_0035.html", + "doc_type":"api", + "p_code":"39", + "code":"41" + }, + { + "desc":"This API is used to query information about tables that meet the filtering criteria or all the tables in the specified database.URI formatGET /v1.0/{project_id}/databases", + "product_code":"dli", + "title":"Querying All Tables (Recommended)", + "uri":"dli_02_0105.html", + "doc_type":"api", + "p_code":"39", + "code":"42" + }, + { + "desc":"This API is used to describe metadata information in the specified table.URI formatGET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}GET /v1.0/{project_", + "product_code":"dli", + "title":"Describing the Table Information", + "uri":"dli_02_0033.html", + "doc_type":"api", + "p_code":"39", + "code":"43" + }, + { + "desc":"This API is used to preview the first ten rows of a table.URI formatGET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/previewGET /v1.0/{project_id}/dat", + "product_code":"dli", + "title":"Previewing Table Content", + "uri":"dli_02_0108.html", + "doc_type":"api", + "p_code":"39", + "code":"44" + }, + { + "desc":"This API is used to obtain the partition list.URI formatGET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/partitionsGET /v1.0/{project_id}/databases/{d", + "product_code":"dli", + "title":"Obtaining the Partition List", + "uri":"dli_02_0250.html", + "doc_type":"api", + "p_code":"39", + "code":"45" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Job-related APIs", + "uri":"dli_02_0017.html", + "doc_type":"api", + "p_code":"33", + "code":"46" + }, + { + "desc":"This API is used to import data from a file to a DLI or OBS table. Currently, only OBS data can be imported to a DLI or OBS table.This API is asynchronous.When importing ", + "product_code":"dli", + "title":"Importing Data", + "uri":"dli_02_0019.html", + "doc_type":"api", + "p_code":"46", + "code":"47" + }, + { + "desc":"This API is used to export data from a DLI table to a file.This API is asynchronous.Currently, data can be exported only from a DLI table to OBS, and the OBS path must be", + "product_code":"dli", + "title":"Exporting Data", + "uri":"dli_02_0020.html", + "doc_type":"api", + "p_code":"46", + "code":"48" + }, + { + "desc":"This API is used to submit jobs to a queue using SQL statements.The job types support DDL, DCL, IMPORT, QUERY, and INSERT. The IMPORT function is the same as that describ", + "product_code":"dli", + "title":"Submitting a SQL Job (Recommended)", + "uri":"dli_02_0102.html", + "doc_type":"api", + "p_code":"46", + "code":"49" + }, + { + "desc":"This API is used to cancel a submitted job. If execution of a job completes or fails, this job cannot be canceled.URI formatDELETE /v1.0/{project_id}/jobs/{job_id}DELETE ", + "product_code":"dli", + "title":"Canceling a Job (Recommended)", + "uri":"dli_02_0104.html", + "doc_type":"api", + "p_code":"46", + "code":"50" + }, + { + "desc":"This API is used to query information about all jobs in the current project.URI formatGET /v1.0/{project_id}/jobsGET /v1.0/{project_id}/jobsParameter descriptionURI param", + "product_code":"dli", + "title":"Querying All Jobs", + "uri":"dli_02_0025.html", + "doc_type":"api", + "p_code":"46", + "code":"51" + }, + { + "desc":"This API is used to view the job execution result after a job is executed using SQL query statements. Currently, you can only query execution results of jobs of the QUERY", + "product_code":"dli", + "title":"Previewing SQL Job Query Results", + "uri":"dli_02_0312.html", + "doc_type":"api", + "p_code":"46", + "code":"52" + }, + { + "desc":"This API is used to query the status of a submitted job.URI formatGET /v1.0/{project_id}/jobs/{job_id}/statusGET /v1.0/{project_id}/jobs/{job_id}/statusParameter descript", + "product_code":"dli", + "title":"Querying Job Status", + "uri":"dli_02_0021.html", + "doc_type":"api", + "p_code":"46", + "code":"53" + }, + { + "desc":"This API is used to query details about jobs, including databasename, tablename, file size, and export mode.URI formatGET/v1.0/{project_id}/jobs/{job_id}/detailGET/v1.0/{", + "product_code":"dli", + "title":"Querying Job Details", + "uri":"dli_02_0022.html", + "doc_type":"api", + "p_code":"46", + "code":"54" + }, + { + "desc":"This API is used to check the SQL syntax.URI formatPOST /v1.0/{project_id}/jobs/check-sqlPOST /v1.0/{project_id}/jobs/check-sqlParameter descriptionURI parametersParamete", + "product_code":"dli", + "title":"Checking SQL Syntax", + "uri":"dli_02_0107.html", + "doc_type":"api", + "p_code":"46", + "code":"55" + }, + { + "desc":"This API is used to export results returned from the query using SQL statements to OBS. Only the query result of QUERY jobs can be exported.This API is asynchronous.Curre", + "product_code":"dli", + "title":"Exporting Query Results", + "uri":"dli_02_0024.html", + "doc_type":"api", + "p_code":"46", + "code":"56" + }, + { + "desc":"This API is used to obtain the job execution progress. If a job is being executed, information about its subjobs can be obtained. If a job has just started or has ended, ", + "product_code":"dli", + "title":"Querying the Job Execution Progress", + "uri":"dli_02_0296.html", + "doc_type":"api", + "p_code":"46", + "code":"57" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Package Group-related APIs", + "uri":"dli_02_0166.html", + "doc_type":"api", + "p_code":"", + "code":"58" + }, + { + "desc":"This API is used to upload a package group to a project. The function is similar to creating a package on the management console.URI formatPOST /v2.0/{project_id}/resourc", + "product_code":"dli", + "title":"Uploading a Package Group", + "uri":"dli_02_0130.html", + "doc_type":"api", + "p_code":"58", + "code":"59" + }, + { + "desc":"This API is used to query all resources in a project, including groups.URI formatGET /v2.0/{project_id}/resourcesGET /v2.0/{project_id}/resourcesParameter descriptionURI ", + "product_code":"dli", + "title":"Querying Package Group List", + "uri":"dli_02_0168.html", + "doc_type":"api", + "p_code":"58", + "code":"60" + }, + { + "desc":"This API is used to upload a group of JAR packages to a project.When a resource group with the same name is uploaded, the new group overwrites the old group.URI formatPOS", + "product_code":"dli", + "title":"Uploading a JAR Package Group", + "uri":"dli_02_0169.html", + "doc_type":"api", + "p_code":"58", + "code":"61" + }, + { + "desc":"This API is used to upload a group of PyFile packages to a project.When a group with the same name as the PyFile package is uploaded, the new group overwrites the old gro", + "product_code":"dli", + "title":"Uploading a PyFile Package Group", + "uri":"dli_02_0170.html", + "doc_type":"api", + "p_code":"58", + "code":"62" + }, + { + "desc":"This API is used to upload a group of File packages to a project.When the File package group with the same name is uploaded, the new group overwrites the old group.URI fo", + "product_code":"dli", + "title":"Uploading a File Package Group", + "uri":"dli_02_0171.html", + "doc_type":"api", + "p_code":"58", + "code":"63" + }, + { + "desc":"This API is used to query resource information of a package group in a Project.URI formatGET /v2.0/{project_id}/resources/{resource_name}GET /v2.0/{project_id}/resources/", + "product_code":"dli", + "title":"Querying Resource Packages in a Group", + "uri":"dli_02_0172.html", + "doc_type":"api", + "p_code":"58", + "code":"64" + }, + { + "desc":"This API is used to delete resource packages in a group in a Project.URI formatDELETE /v2.0/{project_id}/resources/{resource_name}DELETE /v2.0/{project_id}/resources/{res", + "product_code":"dli", + "title":"Deleting a Resource Package from a Group", + "uri":"dli_02_0173.html", + "doc_type":"api", + "p_code":"58", + "code":"65" + }, + { + "desc":"This API is used to change the owner of a program package.URI formatPUT /v2.0/{project_id}/resources/ownerPUT /v2.0/{project_id}/resources/ownerParameter descriptionURI p", + "product_code":"dli", + "title":"Changing the Owner of a Group or Resource Package", + "uri":"dli_02_0253.html", + "doc_type":"api", + "p_code":"58", + "code":"66" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"APIs Related to Flink Jobs", + "uri":"dli_02_0223.html", + "doc_type":"api", + "p_code":"", + "code":"67" + }, + { + "desc":"This API is used to grant DLI the permission to access OBS buckets for saving job checkpoints and run logs.URI formatPOST /v1.0/{project_id}/dli/obs-authorizePOST /v1.0/{", + "product_code":"dli", + "title":"Granting OBS Permissions to DLI", + "uri":"dli_02_0225.html", + "doc_type":"api", + "p_code":"67", + "code":"68" + }, + { + "desc":"This API is used to create a Flink streaming SQL job.URI formatPOST /v1.0/{project_id}/streaming/sql-jobsPOST /v1.0/{project_id}/streaming/sql-jobsParameter descriptionUR", + "product_code":"dli", + "title":"Creating a SQL Job", + "uri":"dli_02_0228.html", + "doc_type":"api", + "p_code":"67", + "code":"69" + }, + { + "desc":"This API is used to modify a Flink SQL job.URI formatPUT /v1.0/{project_id}/streaming/sql-jobs/{job_id}PUT /v1.0/{project_id}/streaming/sql-jobs/{job_id}Parameter descrip", + "product_code":"dli", + "title":"Updating a SQL Job", + "uri":"dli_02_0229.html", + "doc_type":"api", + "p_code":"67", + "code":"70" + }, + { + "desc":"This API is used to create custom jobs, which currently support the JAR format and run in dedicated queues.URI formatPOST /v1.0/{project_id}/streaming/flink-jobsPOST /v1.", + "product_code":"dli", + "title":"Creating a Flink Jar job", + "uri":"dli_02_0230.html", + "doc_type":"api", + "p_code":"67", + "code":"71" + }, + { + "desc":"This API is used to update custom jobs, which currently support the JAR format and run in dedicated queues.URI formatPUT /v1.0/{project_id}/streaming/flink-jobs/{job_id}P", + "product_code":"dli", + "title":"Updating a Flink Jar Job", + "uri":"dli_02_0231.html", + "doc_type":"api", + "p_code":"67", + "code":"72" + }, + { + "desc":"This API is used to trigger batch job running.URI formatPOST /v1.0/{project_id}/streaming/jobs/runPOST /v1.0/{project_id}/streaming/jobs/runParameter descriptionURI param", + "product_code":"dli", + "title":"Running Jobs in Batches", + "uri":"dli_02_0233.html", + "doc_type":"api", + "p_code":"67", + "code":"73" + }, + { + "desc":"This API is used to query the list of the current user's jobs. You can set the job ID as the ID and query jobs whose IDs are greater than or less than the ID. You can als", + "product_code":"dli", + "title":"Querying the Job List", + "uri":"dli_02_0234.html", + "doc_type":"api", + "p_code":"67", + "code":"74" + }, + { + "desc":"This API is used to query details of a job.URI formatGET /v1.0/{project_id}/streaming/jobs/{job_id}GET /v1.0/{project_id}/streaming/jobs/{job_id}Parameter descriptionURI ", + "product_code":"dli", + "title":"Querying Job Details", + "uri":"dli_02_0235.html", + "doc_type":"api", + "p_code":"67", + "code":"75" + }, + { + "desc":"This API is used to query a job execution plan.URI formatGET /v1.0/{project_id}/streaming/jobs/{job_id}/execute-graphGET /v1.0/{project_id}/streaming/jobs/{job_id}/execut", + "product_code":"dli", + "title":"Querying the Job Execution Plan", + "uri":"dli_02_0236.html", + "doc_type":"api", + "p_code":"67", + "code":"76" + }, + { + "desc":"This API is used to stop running jobs in batches.URI formatPOST /v1.0/{project_id}/streaming/jobs/stopPOST /v1.0/{project_id}/streaming/jobs/stopParameter descriptionURI ", + "product_code":"dli", + "title":"Stopping Jobs in Batches", + "uri":"dli_02_0241.html", + "doc_type":"api", + "p_code":"67", + "code":"77" + }, + { + "desc":"This API is used to delete a Flink job at any state.The job records will not be deleted.URI formatDELETE /v1.0/{project_id}/streaming/jobs/{job_id}DELETE /v1.0/{project_i", + "product_code":"dli", + "title":"Deleting a Job", + "uri":"dli_02_0242.html", + "doc_type":"api", + "p_code":"67", + "code":"78" + }, + { + "desc":"This API is used to batch delete jobs at any state.URI formatPOST /v1.0/{project_id}/streaming/jobs/deletePOST /v1.0/{project_id}/streaming/jobs/deleteParameter descripti", + "product_code":"dli", + "title":"Deleting Jobs in Batches", + "uri":"dli_02_0243.html", + "doc_type":"api", + "p_code":"67", + "code":"79" + }, + { + "desc":"This API is used to export Flink job data.URI formatPOST /v1.0/{project_id}/streaming/jobs/exportPOST /v1.0/{project_id}/streaming/jobs/exportParameter descriptionURI par", + "product_code":"dli", + "title":"Exporting a Flink Job", + "uri":"dli_02_0254.html", + "doc_type":"api", + "p_code":"67", + "code":"80" + }, + { + "desc":"This API is used to import Flink job data.URI formatPOST /v1.0/{project_id}/streaming/jobs/importPOST /v1.0/{project_id}/streaming/jobs/importParameter descriptionURI par", + "product_code":"dli", + "title":"Importing a Flink Job", + "uri":"dli_02_0255.html", + "doc_type":"api", + "p_code":"67", + "code":"81" + }, + { + "desc":"This API is used to generate a static stream graph for a Flink SQL job.URI formatPOST /v3/{project_id}/streaming/jobs/{job_id}/gen-graphPOST /v3/{project_id}/streaming/jo", + "product_code":"dli", + "title":"Generating a Static Stream Graph for a Flink SQL Job", + "uri":"dli_02_0316.html", + "doc_type":"api", + "p_code":"67", + "code":"82" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"APIs Related to Spark jobs", + "uri":"dli_02_0109.html", + "doc_type":"api", + "p_code":"", + "code":"83" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Batch Processing-related APIs", + "uri":"dli_02_0162.html", + "doc_type":"api", + "p_code":"83", + "code":"84" + }, + { + "desc":"This API is used to create a batch processing job in a queue.URI formatPOST /v2.0/{project_id}/batchesPOST /v2.0/{project_id}/batchesParameter descriptionURI parameterPar", + "product_code":"dli", + "title":"Creating a Batch Processing Job", + "uri":"dli_02_0124.html", + "doc_type":"api", + "p_code":"84", + "code":"85" + }, + { + "desc":"This API is used to cancel a batch processing job.Batch processing jobs in the Successful or Failed state cannot be canceled.URI formatDELETE /v2.0/{project_id}/batches/{", + "product_code":"dli", + "title":"Canceling a Batch Processing Job", + "uri":"dli_02_0129.html", + "doc_type":"api", + "p_code":"84", + "code":"86" + }, + { + "desc":"This API is used to obtain the list of batch processing jobs in a queue of a project.URI formatGET /v2.0/{project_id}/batchesGET /v2.0/{project_id}/batchesParameter descr", + "product_code":"dli", + "title":"Obtaining the List of Batch Processing Jobs", + "uri":"dli_02_0125.html", + "doc_type":"api", + "p_code":"84", + "code":"87" + }, + { + "desc":"This API is used to query details about a batch processing job based on the job ID.URI formatGET /v2.0/{project_id}/batches/{batch_id}GET /v2.0/{project_id}/batches/{batc", + "product_code":"dli", + "title":"Querying Batch Job Details", + "uri":"dli_02_0126.html", + "doc_type":"api", + "p_code":"84", + "code":"88" + }, + { + "desc":"This API is used to obtain the execution status of a batch processing job.URI formatGET /v2.0/{project_id}/batches/{batch_id}/stateGET /v2.0/{project_id}/batches/{batch_i", + "product_code":"dli", + "title":"Querying a Batch Job Status", + "uri":"dli_02_0127.html", + "doc_type":"api", + "p_code":"84", + "code":"89" + }, + { + "desc":"This API is used to query the back-end logs of batch processing jobs.URI formatGET /v2.0/{project_id}/batches/{batch_id}/logGET /v2.0/{project_id}/batches/{batch_id}/logP", + "product_code":"dli", + "title":"Querying Batch Job Logs", + "uri":"dli_02_0128.html", + "doc_type":"api", + "p_code":"84", + "code":"90" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"APIs Related to Flink Job Templates", + "uri":"dli_02_0244.html", + "doc_type":"api", + "p_code":"", + "code":"91" + }, + { + "desc":"This API is used to create a user template for the DLI service. A maximum of 100 user templates can be created.URI formatPOST /v1.0/{project_id}/streaming/job-templatesPO", + "product_code":"dli", + "title":"Creating a Template", + "uri":"dli_02_0245.html", + "doc_type":"api", + "p_code":"91", + "code":"92" + }, + { + "desc":"This API is used to update existing templates in DLI.URI formatPUT /v1.0/{project_id}/streaming/job-templates/{template_id}PUT /v1.0/{project_id}/streaming/job-templates/", + "product_code":"dli", + "title":"Updating a Template", + "uri":"dli_02_0246.html", + "doc_type":"api", + "p_code":"91", + "code":"93" + }, + { + "desc":"This API is used to delete a template. A template used by jobs can also be deleted.URI formatDELETE /v1.0/{project_id}/streaming/job-templates/{template_id}DELETE /v1.0/{", + "product_code":"dli", + "title":"Deleting a Template", + "uri":"dli_02_0247.html", + "doc_type":"api", + "p_code":"91", + "code":"94" + }, + { + "desc":"This API is used to query the job template list. Currently, only custom templates can be queried.URI formatGET /v1.0/{project_id}/streaming/job-templatesGET /v1.0/{projec", + "product_code":"dli", + "title":"Querying the Template List", + "uri":"dli_02_0248.html", + "doc_type":"api", + "p_code":"91", + "code":"95" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"APIs Related to Enhanced Datasource Connections", + "uri":"dli_02_0186.html", + "doc_type":"api", + "p_code":"", + "code":"96" + }, + { + "desc":"This API is used to create an enhanced datasource connection with other services.URI formatPOST /v2.0/{project_id}/datasource/enhanced-connectionsPOST /v2.0/{project_id}/", + "product_code":"dli", + "title":"Creating an Enhanced Datasource Connection", + "uri":"dli_02_0187.html", + "doc_type":"api", + "p_code":"96", + "code":"97" + }, + { + "desc":"This API is used to delete an enhanced datasource connection.The connection that is being created cannot be deleted.URI formatDELETE /v2.0/{project_id}/datasource/enhance", + "product_code":"dli", + "title":"Deleting an Enhanced Datasource Connection", + "uri":"dli_02_0188.html", + "doc_type":"api", + "p_code":"96", + "code":"98" + }, + { + "desc":"This API is used to query the list of created enhanced datasource connections.URI formatGET /v2.0/{project_id}/datasource/enhanced-connectionsGET /v2.0/{project_id}/datas", + "product_code":"dli", + "title":"Querying an Enhanced Datasource Connection List", + "uri":"dli_02_0190.html", + "doc_type":"api", + "p_code":"96", + "code":"99" + }, + { + "desc":"This API is used to query the created enhanced datasource connections.URI formatGET /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}GET /v2.0/{project_i", + "product_code":"dli", + "title":"Querying an Enhanced Datasource Connection", + "uri":"dli_02_0189.html", + "doc_type":"api", + "p_code":"96", + "code":"100" + }, + { + "desc":"This API is used to bind a queue to a created enhanced datasource connection.URI formatPOST /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}/associate-q", + "product_code":"dli", + "title":"Binding a Queue", + "uri":"dli_02_0191.html", + "doc_type":"api", + "p_code":"96", + "code":"101" + }, + { + "desc":"This API is used to unbind a queue from an enhanced datasource connection.URI formatPOST /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}/disassociate-q", + "product_code":"dli", + "title":"Unbinding a Queue", + "uri":"dli_02_0192.html", + "doc_type":"api", + "p_code":"96", + "code":"102" + }, + { + "desc":"This API is used to modify the host information of a connected datasource. Only full overwriting is supported.URI formatPUT /v2.0/{project_id}/datasource/enhanced-connect", + "product_code":"dli", + "title":"Modifying the Host Information", + "uri":"dli_02_0200.html", + "doc_type":"api", + "p_code":"96", + "code":"103" + }, + { + "desc":"This API is used to query the authorization about an enhanced datasource connection.URI formatGET /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}/privi", + "product_code":"dli", + "title":"Querying Authorization of an Enhanced Datasource Connection", + "uri":"dli_02_0256.html", + "doc_type":"api", + "p_code":"96", + "code":"104" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Global Variable-related APIs", + "uri":"dli_02_0257.html", + "doc_type":"api", + "p_code":"", + "code":"105" + }, + { + "desc":"This API is used to create a global variable.URI formatPOST /v1.0/{project_id}/variablesPOST /v1.0/{project_id}/variablesParameter descriptionURI parametersParameterManda", + "product_code":"dli", + "title":"Creating a Global Variable", + "uri":"dli_02_0258.html", + "doc_type":"api", + "p_code":"105", + "code":"106" + }, + { + "desc":"This API is used to delete a global variable.Only the user who creates a global variable can delete the variable.URI formatDELETE /v1.0/{project_id}/variables/{var_name}D", + "product_code":"dli", + "title":"Deleting a Global Variable", + "uri":"dli_02_0259.html", + "doc_type":"api", + "p_code":"105", + "code":"107" + }, + { + "desc":"This API is used to modify a global variable.URI formatPUT /v1.0/{project_id}/variables/{var_name}PUT /v1.0/{project_id}/variables/{var_name}Parameter descriptionURI para", + "product_code":"dli", + "title":"Modifying a Global Variable", + "uri":"dli_02_0260.html", + "doc_type":"api", + "p_code":"105", + "code":"108" + }, + { + "desc":"This API is used to query information about all global variables in the current project.URI formatGET /v1.0/{project_id}/variablesGET /v1.0/{project_id}/variablesParamete", + "product_code":"dli", + "title":"Querying All Global Variables", + "uri":"dli_02_0261.html", + "doc_type":"api", + "p_code":"105", + "code":"109" + }, + { + "desc":"This section describes fine-grained permissions management for your DLI. If your account does not need individual IAM users, then you may skip this section.By default, ne", + "product_code":"dli", + "title":"Permissions Policies and Supported Actions", + "uri":"dli_02_0201.html", + "doc_type":"api", + "p_code":"", + "code":"110" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Public Parameters", + "uri":"dli_02_0011.html", + "doc_type":"api", + "p_code":"", + "code":"111" + }, + { + "desc":"Table 1 describes status codes.", + "product_code":"dli", + "title":"Status Codes", + "uri":"dli_02_0012.html", + "doc_type":"api", + "p_code":"111", + "code":"112" + }, + { + "desc":"If an error occurs in API calling, no result is returned. Identify the cause of error based on the error codes of each API. If an error occurs in API calling, HTTP status", + "product_code":"dli", + "title":"Error Code", + "uri":"dli_02_0056.html", + "doc_type":"api", + "p_code":"111", + "code":"113" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Obtaining a Project ID", + "uri":"dli_02_0183.html", + "doc_type":"api", + "p_code":"111", + "code":"114" + }, + { + "desc":"An account ID (domain-id) is required for some URLs when an API is called. To obtain an account ID, perform the following operations:Log in to the management console.Hove", + "product_code":"dli", + "title":"Obtaining an Account ID", + "uri":"dli_02_0013.html", + "doc_type":"api", + "p_code":"111", + "code":"115" + }, + { + "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.", + "product_code":"dli", + "title":"Change History", + "uri":"dli_02_00003.html", + "doc_type":"api", + "p_code":"", + "code":"116" + } +] \ No newline at end of file diff --git a/docs/dli/api-ref/PARAMETERS.txt b/docs/dli/api-ref/PARAMETERS.txt new file mode 100644 index 00000000..6da8d5f0 --- /dev/null +++ b/docs/dli/api-ref/PARAMETERS.txt @@ -0,0 +1,3 @@ +version="" +language="en-us" +type="" \ No newline at end of file diff --git a/docs/dli/api-ref/dli_02_00003.html b/docs/dli/api-ref/dli_02_00003.html new file mode 100644 index 00000000..69640514 --- /dev/null +++ b/docs/dli/api-ref/dli_02_00003.html @@ -0,0 +1,20 @@ + + +
Released On + |
+Description + |
+
---|---|
2023-01-30 + |
+This issue is the first official release. + |
+
Table 1 describes status codes.
+ +Status Code + |
+Message + |
+Description + |
+
---|---|---|
100 + |
+Continue + |
+The client should continue with its request. +This interim response is used to inform the client that part of the request has been received and has not yet been rejected by the server. + |
+
101 + |
+Switching Protocols + |
+The protocol should be switched. The protocol can only be switched to a newer protocol. +For example, the current HTTP protocol is switched to a later version of HTTP. + |
+
200 + |
+Success + |
+The request has been fulfilled. This indicates that the server has provided the requested web page. + |
+
201 + |
+Created + |
+The request is successful and the server has created a new resource. + |
+
202 + |
+Accepted + |
+The request has been accepted, but the processing has not been completed. + |
+
203 + |
+Non-Authoritative Information + |
+Unauthorized information. The request is successful. + |
+
204 + |
+NoContent + |
+The server has successfully processed the request, but does not return any content. +The status code is returned in response to an HTTP OPTIONS request. + |
+
205 + |
+Reset Content + |
+The server has successfully processed the request, but does not return any content. + |
+
206 + |
+Partial Content + |
+The server has successfully processed the partial GET request. + |
+
300 + |
+Multiple Choices + |
+There are multiple options for the location of the requested resource. The response contains a list of resource characteristics and addresses from which a user terminal (such as a browser) can choose the most appropriate one. + |
+
301 + |
+Moved Permanently + |
+The requested resource has been assigned a new permanent URI, and the new URI is contained in the response. + |
+
302 + |
+Found + |
+The requested resource resides temporarily under a different URI. + |
+
303 + |
+See Other + |
+The response to the request can be found under a different URI, +and should be retrieved using a GET or POST method. + |
+
304 + |
+Not Modified + |
+The requested resource has not been modified. In such a case, there is no need to retransmit the resource since the client still has a previously-downloaded copy. + |
+
305 + |
+Use Proxy + |
+The requested resource is available only through a proxy. + |
+
306 + |
+Unused + |
+The HTTP status code is no longer used. + |
+
400 + |
+BadRequest + |
+Invalid request. +The client should not repeat the request without modifications. + |
+
401 + |
+Unauthorized + |
+This status code is returned after the client provides the authentication information, indicating that the authentication information is incorrect or invalid. + |
+
402 + |
+Payment Required + |
+This status code is reserved for future use. + |
+
403 + |
+Forbidden + |
+The server has received the request and understood it, but the server is refusing to respond to it. +The client should modify the request instead of re-initiating it. + |
+
404 + |
+NotFound + |
+The requested resource cannot be found. +The client should not repeat the request without modifications. + |
+
405 + |
+MethodNotAllowed + |
+A request method is not supported for the requested resource. +The client should not repeat the request without modifications. + |
+
406 + |
+Not Acceptable + |
+The server could not fulfill the request according to the content characteristics of the request. + |
+
407 + |
+Proxy Authentication Required + |
+This code is similar to 401, but indicates that the client must first authenticate itself with the proxy. + |
+
408 + |
+Request Time-out + |
+The server has timed out waiting for the request. +The client may repeat the request without modifications at any time later. + |
+
409 + |
+Conflict + |
+The request could not be processed due to a conflict in the request. +This status code indicates that the resource that the client is attempting to create already exists, or that the request has failed to be processed because of the update of the conflict request. + |
+
410 + |
+Gone + |
+The requested resource cannot be found. +The status code indicates that the requested resource has been deleted permanently. + |
+
411 + |
+Length Required + |
+The server is refusing to process the request without a defined Content-Length. + |
+
412 + |
+Precondition Failed + |
+The server does not meet one of the preconditions that the requester puts on the request. + |
+
413 + |
+Request Entity Too Large + |
+The server is refusing to process a request because the request entity is too large for the server to process. The server may disable the connection to prevent the client from sending requests consecutively. If the server is only temporarily unable to process the request, the response will contain a Retry-After header field. + |
+
414 + |
+Request-URI Too Large + |
+The Request-URI is too long for the server to process. + |
+
415 + |
+Unsupported Media Type + |
+The server does not support the media type in the request. + |
+
416 + |
+Requested range not satisfiable + |
+The requested range is invalid. + |
+
417 + |
+Expectation Failed + |
+The server has failed to meet the requirements of the Expect request-header field. + |
+
422 + |
+UnprocessableEntity + |
+The request was well-formed but was unable to be followed due to semantic errors. + |
+
429 + |
+TooManyRequests + |
+The client sends excessive requests to the server within a given time (exceeding the limit on the access frequency of the client), or the server receives excessive requests within a given time (beyond its processing capability). In this case, the client should resend the request after the time specified in the Retry-After header of the response has elapsed. + |
+
500 + |
+InternalServerError + |
+The server is able to receive the request but unable to understand it. + |
+
501 + |
+Not Implemented + |
+The server does not support the requested function. + |
+
502 + |
+Bad Gateway + |
+The server was acting as a gateway or proxy and received an invalid request from the remote server. + |
+
503 + |
+ServiceUnavailable + |
+The requested service is invalid. +The client should not repeat the request without modifications. + |
+
504 + |
+ServerTimeout + |
+The request cannot be fulfilled within a given time. This status code is returned to the client only when the Timeout parameter is specified in the request. + |
+
505 + |
+HTTP Version not supported + |
+The server does not support the HTTP protocol version used in the request. + |
+
An account ID (domain-id) is required for some URLs when an API is called. To obtain an account ID, perform the following operations:
+This API is used to list details of a specific queue in a project.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
queue_name + |
+Yes + |
+String + |
+Specifies the name of a queue to be queried. + NOTE:
+The queue name is case-insensitive. The uppercase letters will be automatically converted to lowercase letters. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
queueName + |
+No + |
+String + |
+Name of a queue. + NOTE:
+The queue name is case-insensitive. The uppercase letters will be automatically converted to lowercase letters. + |
+
description + |
+No + |
+String + |
+Queue description. + |
+
owner + |
+No + |
+String + |
+User who creates a queue. + |
+
create_time + |
+No + |
+Long + |
+Time when the queue is created. The timestamp is expressed in milliseconds. + |
+
queueType + |
+No + |
+String + |
+Indicates the queue type. +
If this parameter is not specified, the default value sql is used. + |
+
cuCount + |
+No + |
+Integer + |
+Number of compute units (CUs) bound to a queue, that is, the number of CUs in the current queue. + |
+
resource_id + |
+No + |
+String + |
+Resource ID of a queue. + |
+
resource_mode + |
+No + |
+Integer + |
+Resource mode +
|
+
enterprise_project_id + |
+No + |
+String + |
+Enterprise project ID. +0 indicates the default enterprise project. + NOTE:
+Users who have enabled Enterprise Management can set this parameter to bind a specified project. + |
+
cu_spec + |
+No + |
+Integer + |
+Specifications of a queue. For a queue whose billing mode is yearly/monthly, this parameter indicates the CU value of the yearly/monthly part. For a pay-per-use queue, this parameter indicates the initial value when a user purchases a queue. + |
+
cu_scale_out_limit + |
+No + |
+Integer + |
+Upper limit of the CU value for elastic scaling of the current queue. + |
+
cu_scale_in_limit + |
+No + |
+Integer + |
+Lower limit of the CU value for elastic scaling of the current queue. + |
+
None
+{ + "is_success": true, + "message": "", + "owner": "testuser", + "description": "", + "queueName": "test", + "create_time": 1587613028851, + "queueType": "general", + "cuCount": 16, + "resource_id": "03d51b88-db63-4611-b779-9a72ba0cf58b", + "resource_mode": 0 +, + "resource_type": "vm", + "cu_spec": 16 +}+
Table 3 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to import data from a file to a DLI or OBS table. Currently, only OBS data can be imported to a DLI or OBS table.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
data_path + |
+Yes + |
+String + |
+Path to the data to be imported. Currently, only OBS data can be imported. + |
+
data_type + |
+Yes + |
+String + |
+Type of the data to be imported. Currently, data types of CSV, Parquet, ORC, JSON, and Avro are supported. + NOTE:
+Data in Avro format generated by Hive tables cannot be imported. + |
+
database_name + |
+Yes + |
+String + |
+Name of the database where the table to which data is imported resides. + |
+
table_name + |
+Yes + |
+String + |
+Name of the table to which data is imported. + |
+
with_column_header + |
+No + |
+Boolean + |
+Whether the first line of the imported data contains column names, that is, headers. The default value is false, indicating that column names are not contained. This parameter can be specified when CSV data is imported. + |
+
delimiter + |
+No + |
+String + |
+User-defined data delimiter. The default value is a comma (,). This parameter can be specified when CSV data is imported. + |
+
quote_char + |
+No + |
+String + |
+User-defined quotation character. The default value is double quotation marks ("). This parameter can be specified when CSV data is imported. + |
+
escape_char + |
+No + |
+String + |
+User-defined escape character. The default value is a backslash (\). This parameter can be specified when CSV data is imported. + |
+
date_format + |
+No + |
+String + |
+Specified date format. The default value is yyyy-MM-dd. For details about the characters involved in the date format, see Table 3. This parameter can be specified when data in the CSV or JSON format is imported. + |
+
bad_records_path + |
+No + |
+String + |
+Bad records storage directory during job execution. After configuring this item, the bad records is not imported into the target table. + |
+
timestamp_format + |
+No + |
+String + + |
+Specified time format. The default value is yyyy-MM-dd HH:mm:ss. For definitions about characters in the time format, see Table 3. This parameter can be specified when data in the CSV or JSON format is imported. + |
+
queue_name + |
+No + |
+String + |
+Name of the queue that is specified to execute a task. If no queue is specified, the default queue is used. + |
+
overwrite + |
+No + |
+Boolean + |
+Whether to overwrite data. The default value is false, indicating appending write. If the value is true, it indicates overwriting. + |
+
partition_spec + |
+No + |
+Object + |
+Partition to which data is to be imported. +
|
+
conf + |
+No + |
+Array of Strings + |
+User-defined parameter that applies to the job. Currently, dli.sql.dynamicPartitionOverwrite.enabled can be set to false by default. If it is set to true, data in a specified partition is overwritten. If it is set to false, data in the entire DataSource table is dynamically overwritten. + NOTE:
+For dynamic overwrite of Hive partition tables, only the involved partition data can be overwritten. The entire table data cannot be overwritten. + |
+
Character + |
+Date or Time Element + |
+Example + |
+
---|---|---|
G + |
+Epoch ID + |
+AD + |
+
y + |
+Year + |
+1996; 96 + |
+
M + |
+Month + |
+July; Jul; 07 + |
+
w + |
+Which week in a year + |
+27 (Week 27 in the year) + |
+
W + |
+Which week in a month + |
+2 (Second week in the month) + |
+
D + |
+Which day in a year + |
+189 (Day 189 in the year) + |
+
d + |
+Which day in a month + |
+10 (Day 10 in the month) + |
+
u + |
+Which day in a week + |
+1 (Monday), ..., 7 (Sunday) + |
+
a + |
+am/pm flag + |
+pm (Afternoon) + |
+
H + |
+Hour time (0-23) + |
+2 + |
+
h + |
+Hour time (1-12) + |
+12 + |
+
m + |
+Minute time + |
+30 + |
+
s + |
+Second time + |
+55 + |
+
S + |
+Which milliseconds + |
+978 + |
+
z + |
+Time zone + |
+Pacific Standard Time; PST; GMT-08:00 + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully sent. Value true indicates that the request is successfully sent. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
job_id + |
+No + |
+String + |
+ID of a job returned after a job is generated and submitted by using SQL statements. The job ID can be used to query the job status and results. + |
+
job_mode + |
+No + |
+String + |
+Job execution mode. The options are as follows: +
|
+
{ + "data_path": "obs://home/data1/DLI/t1.csv", + "data_type": "csv", + "database_name": "db2", + "table_name": "t2", + "with_column_header": false, + "delimiter": ",", + "quote_char": ",", + "escape_char": ",", + "date_format": "yyyy-MM-dd", + "timestamp_format": "yyyy-MM-dd'T'HH:mm:ss.SSSZZ", + "queue_name": "queue2", + "overwrite": false, + "partition_spec":{ + "column1": "2020-01-01", + "column2": "columnPartValue" + } +}+
{ + "is_success": true, + "message": "import data to table t2 started", + "job_id": "6b29eb77-4c16-4e74-838a-2cf7959e9202", + "job_mode":"async" +}+
Table 5 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to export data from a DLI table to a file.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
data_path + |
+Yes + |
+String + |
+Path for storing the exported data. Currently, data can be stored only on OBS. If export_mode is set to errorifexists, the OBS path cannot contain the specified folder, for example, the test folder in the example request. + |
+
data_type + |
+Yes + |
+String + |
+Type of data to be exported. Currently, only CSV and JSON are supported. + |
+
database_name + |
+Yes + |
+String + |
+Name of the database where the table from which data is exported resides. + |
+
table_name + |
+Yes + |
+String + |
+Name of the table from which data is exported. + |
+
compress + |
+Yes + |
+String + |
+Compression mode for exported data. Currently, the compression modes gzip, bzip2, and deflate are supported. If you do not want to compress data, enter none. + |
+
queue_name + |
+No + |
+String + |
+Name of the queue that is specified to execute a task. If no queue is specified, the default queue is used. + |
+
export_mode + |
+No + |
+String + |
+Export mode. The parameter value can be ErrorIfExists or Overwrite. If export_mode is not specified, this parameter is set to ErrorIfExists by default. +
|
+
with_column_header + |
+No + |
+Boolean + |
+Whether to export column names when exporting CSV and JSON data. +
|
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successfully sent. Value true indicates that the request is successfully sent. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
job_id + |
+No + |
+String + |
+ID of a job returned after a job is generated and submitted by using SQL statements. The job ID can be used to query the job status and results. + |
+
job_mode + |
+No + |
+String + |
+Job execution mode. The options are as follows: +
|
+
{ + "data_path": "obs://home/data1/DLI/test", + "data_type": "json", + "database_name": "db2", + "table_name": "t2", + "compress": "gzip", + "with_column_header": "true", + "queue_name": "queue2" +}+
{ + "is_success": true, + "message": "export all data from table db2.t2 to path obs://home/data1/DLI/test started", + "job_id": "828d4044-3d39-449b-b32c-957f7cfadfc9", + "job_mode":"async" +}+
Table 4 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to query the status of a submitted job.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
job_id + |
+Yes + |
+String + |
+Job ID. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+Yes + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+Yes + |
+String + |
+System prompt. If execution succeeds, this parameter is left blank. + |
+
job_id + |
+Yes + |
+String + |
+Job ID. You can get the value by calling Submitting a SQL Job (Recommended). + |
+
job_type + |
+Yes + |
+String + |
+Type of a job, Includes DDL, DCL, IMPORT, EXPORT, QUERY, INSERT, DATA_MIGRATION, UPDATE, DELETE, RESTART_QUEUE and SCALE_QUEUE. + |
+
job_mode + |
+Yes + |
+String + |
+Job execution mode. The options are as follows: +
|
+
queue_name + |
+Yes + |
+String + |
+Name of the queue where the job is submitted. + |
+
owner + |
+Yes + |
+String + |
+User who submits a job. + |
+
start_time + |
+Yes + |
+Long + |
+Time when a job is started. The timestamp is in milliseconds. + |
+
duration + |
+No + |
+Long + |
+Job running duration (unit: millisecond). + |
+
status + |
+Yes + |
+String + |
+Status of a job, including RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, and CANCELLED. + |
+
input_row_count + |
+No + |
+Long + |
+Number of records scanned during the Insert job execution. + |
+
bad_row_count + |
+No + |
+Long + |
+Number of error records scanned during the Insert job execution. + |
+
input_size + |
+Yes + |
+Long + |
+Size of scanned files during job execution (unit: byte). + |
+
result_count + |
+Yes + |
+Integer + |
+Total number of records returned by the current job or total number of records inserted by the Insert job. + |
+
database_name + |
+No + |
+String + |
+Name of the database where the target table resides. database_name is valid only for jobs of the IMPORT EXPORT, and QUERY types. + |
+
table_name + |
+No + |
+String + |
+Name of the target table. table_name is valid only for jobs of the IMPORT EXPORT, and QUERY types. + |
+
detail + |
+Yes + |
+String + |
+JSON character string for information about related columns. + |
+
statement + |
+Yes + |
+String + |
+SQL statements of a job. + |
+
tags + |
+No + |
+Array of objects + |
+Job tags. For details, see Table 3. + |
+
None
+{ + "is_success": true, + "message": "", + "job_id": "208b08d4-0dc2-4dd7-8879-ddd4c020d7aa", + "job_type": "QUERY", + "job_mode":"async", + "queue_name": "default", + "owner": "test", + "start_time": 1509335108918, + "duration": 2523, + "status": "FINISHED", + "input_size": 22, + "result_count": 4, + "database_name":"dbtest", + "table_name":"tbtest", + "detail": "{\"type\":\"struct\",\"fields\":[{\"name\":\"id\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}},{\"name\":\"name\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}}]}", + "statement": "select * from t1" +}+
Table 4 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to query details about jobs, including databasename, tablename, file size, and export mode.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
job_id + |
+Yes + |
+String + |
+Job ID. You can get the value by calling Submitting a SQL Job (Recommended). + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+Yes + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+Yes + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
job_id + |
+Yes + |
+String + |
+Job ID. + |
+
owner + |
+Yes + |
+String + |
+User who submits a job. + |
+
start_time + |
+Yes + |
+Long + |
+Time when a job is started. The timestamp is in milliseconds. + |
+
duration + |
+Yes + |
+Long + |
+Duration for executing the job (unit: millisecond). + |
+
export_mode + |
+No + |
+String + |
+Specified export mode during data export and query result saving. +Available values are ErrorIfExists and Overwrite. +
|
+
data_path + |
+Yes + |
+String + |
+Path to imported or exported files. + |
+
data_type + |
+Yes + |
+String + |
+Type of data to be imported or exported. Currently, only CSV and JSON are supported. + |
+
database_name + |
+Yes + |
+String + |
+Name of the database where the table, where data is imported or exported, resides. + |
+
table_name + |
+Yes + |
+String + |
+Name of the table where data is imported or exported. + |
+
with_column_header + |
+No + |
+Boolean + |
+Whether the imported data contains the column name during the execution of an import job. + |
+
delimiter + |
+No + |
+String + |
+User-defined data delimiter set when the import job is executed. + |
+
quote_char + |
+No + |
+String + |
+User-defined quotation character set when the import job is executed. + |
+
escape_char + |
+No + |
+String + |
+User-defined escape character set when the import job is executed. + |
+
date_format + |
+No + |
+String + |
+Table date format specified when the import job is executed. + |
+
timestamp_format + |
+No + |
+String + |
+Table time format specified when the import job is executed. + |
+
compress + |
+No + |
+String + |
+Compression mode specified when the export job is executed. + |
+
tags + |
+No + |
+Array of objects + |
+Job tags. For details, see Table 3. + |
+
None
+{ + "is_success": true, + "message": "", + "data_path": "obs://DLI/computeCharging/test.csv", + "data_type": "json", + "database_name": "iam_exist", + "date_format": "yyyy-MM-dd", + "delimiter": ",", + "duration": 1623, + "escape_char": "\\", + "job_id": "a85d7298-ecef-47f9-bb31-499d2099d112", + "owner": "iam_exist", + "quote_char": "\"", + "start_time": 1517385246111, + "table_name": "DLI_table20", + "timestamp_format": "yyyy-MM-dd HH:mm:ss", + "with_column_header": false +}+
{ + "is_success": true, + "message": "", + "compress": "none", + "data_path": "obs://xxx/dli/path6", + "data_type": "json", + "database_name": "submitjob", + "duration": 4142, + "export_mode": "Overwrite", + "job_id": "b89fccb2-de6a-4c6c-b9b2-21f08a2eb85e", + "owner": "test", + "start_time": 1524107798024, + "table_name": "autotest" +}+
Table 4 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to export results returned from the query using SQL statements to OBS. Only the query result of QUERY jobs can be exported.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
job_id + |
+Yes + |
+String + |
+Job ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
data_path + |
+Yes + |
+String + |
+Path for storing the exported data. Currently, data can be stored only on OBS. The OBS path cannot contain folders, for example, the path folder in the sample request. + |
+
compress + |
+No + |
+String + |
+Compression format of exported data. Currently, gzip, bzip2, and deflate are supported. The default value is none, indicating that data is not compressed. + |
+
data_type + |
+Yes + |
+String + |
+Storage format of exported data. Currently, only CSV and JSON are supported. + |
+
queue_name + |
+No + |
+String + |
+Name of the queue that is specified to execute a task. If no queue is specified, the default queue is used. + |
+
export_mode + |
+No + |
+String + |
+Export mode. The parameter value can be ErrorIfExists or Overwrite. If export_mode is not specified, this parameter is set to ErrorIfExists by default. +
|
+
with_column_header + |
+No + |
+Boolean + |
+Whether to export column names when exporting CSV and JSON data. +
|
+
limit_num + |
+No + |
+Integer + |
+Number of data records to be exported. The default value is 0, indicating that all data records are exported. + |
+
encoding_type + |
+No + |
+String + |
+Format of the data to be exported. The value can be utf-8, gb2312, or gbk. Value utf-8 will be used if this parameter is left empty. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+Yes + |
+Boolean + |
+Indicates whether the request is successfully sent. Value true indicates that the request is successfully sent. + |
+
message + |
+Yes + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
job_id + |
+No + |
+String + |
+ID of a job returned after a job is generated and submitted by using SQL statements. The job ID can be used to query the job status and results. + |
+
job_mode + |
+No + |
+String + |
+Job execution mode. The options are as follows: +
|
+
{ + "data_path": "obs://obs-bucket1/path", + "data_type": "json", + "compress": "gzip", + "with_column_header": "true", + "queue_name": "queue2", + "limit_num": 10 +}+
{ + "is_success": true, + "message": "", + "job_id": "37a40ef9-86f5-42e6-b4c6-8febec89cc20", + "job_mode":"async" +}+
Table 4 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to query information about all jobs in the current project.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
page-size + |
+No + |
+Integer + |
+Maximum number of jobs displayed on each page. The value range is as follows: [1, 100]. The default value is 50. + |
+
current-page + |
+No + |
+Integer + |
+Current page number. The default value is 1. + |
+
start + |
+No + |
+Long + |
+Queries the jobs executed later than the time specified by this parameter. The time is a UNIX timestamp in milliseconds. + |
+
end + |
+No + |
+Long + |
+Queries the jobs executed earlier than the time specified by this parameter. The time is a UNIX timestamp in milliseconds. + |
+
job-type + |
+No + |
+String + |
+Type of a job to be queried. Job types include DDL, DCL, IMPORT, EXPORT, QUERY, INSERT, DATA_MIGRATION, UPDATE, DELETE, RESTART_QUEUE, and SCALE_QUEUE. To query all types of jobs, enter ALL. + |
+
job-status + |
+No + |
+String + |
+Status of the job to be queried. + |
+
job-id + |
+No + |
+String + |
+ID of the job to be queried. You can get the value by calling Submitting a SQL Job (Recommended). + |
+
queue_name + |
+No + |
+String + |
+Specifies queue_name as the filter to query jobs running on the specified queue. + |
+
sql_pattern + |
+No + |
+String + |
+Specifies the SQL segment as the filter. It is case insensitive. + |
+
order + |
+No + |
+String + |
+Specifies the job sorting mode. The default value is start_time_desc (job submission time in descending order). Four sorting modes are supported: duration_desc (job running duration in descending order), duration_asc (job running duration in ascending order), start_time_desc (job submission time in descending order), and start_time_asc (job submission time in ascending order). + |
+
engine-type + |
+No + |
+String + |
+Engine type. + |
+
owner + |
+No + |
+String + |
+User who submits a job. + |
+
tags + |
+No + |
+String + |
+Queue tags for the search. You can specify multiple tags in key=value format. +
For example, GET /v1.0/{project_id}/jobs? tags=k1%3Dv1 +In this example, = needs to be escaped to %3D, k1 indicates the tag key, and v1 indicates the tag value. +
Separate tags with commas (,) and convert the commas (,) to %2C, for example, GET /v1.0/{project_id}/jobs?. tags=k1%3Dv1%2Ck2%3Dv2. +The equal sign (=) is escaped to %3D. k1 indicates a tag key, and v1 indicates the tag value. k2 indicates another tag key, and v2 indicates the tag value. +Currently, only fuzzy query is supported. Exact query is not supported. + |
+
The following is an example of the URL containing the query parameter:
+GET /v1.0/{project_id}/jobs?page-size={size}¤t-page={page_number}&start={start_time}&end={end_time}&job-type={QUERY}&queue_name={test}&order={duration_desc}
+None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+Yes + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+Yes + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
job_count + |
+Yes + |
+Integer + |
+Indicates the total number of jobs. + |
+
jobs + |
+Yes + |
+Array of Objects + |
+Indicates the information about a job. For details, see Table 4. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
job_id + |
+Yes + |
+String + |
+Job ID. + |
+
job_type + |
+Yes + |
+String + |
+Type of a job. + |
+
queue_name + |
+Yes + |
+String + |
+Queue to which a job is submitted. + |
+
owner + |
+Yes + |
+String + |
+User who submits a job. + |
+
start_time + |
+Yes + |
+Long + |
+Time when a job is started. The timestamp is expressed in milliseconds. + |
+
duration + |
+Yes + |
+Long + |
+Job running duration (unit: millisecond). + |
+
status + |
+Yes + |
+String + |
+Status of a job, including LAUNCHING, RUNNING, FINISHED, FAILED, and CANCELLED. + |
+
input_row_count + |
+No + |
+Long + |
+Number of records scanned during the Insert job execution. + |
+
bad_row_count + |
+No + |
+Long + |
+Number of error records scanned during the Insert job execution. + |
+
input_size + |
+Yes + |
+Long + |
+Size of scanned files during job execution. + |
+
result_count + |
+Yes + |
+Integer + |
+Total number of records returned by the current job or total number of records inserted by the Insert job. + |
+
database_name + |
+No + |
+String + |
+Name of the database where the target table resides. database_name is valid only for jobs of the Import and Export types. + |
+
table_name + |
+No + |
+String + |
+Name of the target table. table_name is valid only for jobs of the Import and Export types. + |
+
with_column_header + |
+No + |
+Boolean + |
+Import jobs, which record whether the imported data contains column names. + |
+
detail + |
+Yes + |
+String + |
+JSON character string of related columns queried by using SQL statements. + |
+
statement + |
+Yes + |
+String + |
+SQL statements of a job. + |
+
message + |
+No + |
+String + |
+System prompt + |
+
end_time + |
+No + |
+Long + |
+Job end time. The timestamp is in milliseconds. + |
+
tags + |
+No + |
+Array of Objects + |
+Job tags. For details, see Table 5. + |
+
None
+{ + "is_success": true, + "message": "", + "job_count": 1, + "jobs": [ + { + + "detail": "{\"type\":\"struct\",\"fields\":[{\"name\":\"name\",\"type\":\"string\",\"nullable\":true,\"metadata\":{}},{\"name\":\"age\",\"type\":\"integer\",\"nullable\":true,\"metadata\":{}}]}", + "duration": 17731, + + "end_time": 1502349821460, + "input_size": 0, + "job_id": "37286cc7-0508-4ffd-b636-951c8a5c75de", + "job_type": "QUERY", + "message": "", + "owner": "tenant1", + "queue_name": "queue1", + "result_count": 3, + "start_time": 1502349803729, + "statement": "select * from t_json_002", + "status": "FINISHED", + "with_column_header": false + } + ] +}+
Table 6 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to add a database.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
database_name + |
+Yes + |
+String + |
+Name of the created database. +
NOTE:
+The default database is a built-in database. You cannot create a database named default. + |
+
description + |
+No + |
+String + |
+Information about the created database. + |
+
enterprise_project_id + |
+No + |
+String + |
+Enterprise project ID. The value 0 indicates the default enterprise project. + NOTE:
+Users who have enabled Enterprise Management can set this parameter to bind a specified project. + |
+
tags + |
+No + |
+Array of Objects + |
+Database tag. For details, see Table 3. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
{ + "database_name": "db1", + "description": "this is for test" +}+
{ + "is_success": true, + "message": "" +}+
Table 5 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to query the information about all the databases.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
with-priv + |
+No + |
+Boolean + |
+Specifies whether to display the permission information. The value can be true or false. The default value is false. + |
+
offset + |
+No + |
+Integer + |
+The value should be no less than 0. The default value is 0. + |
+
limit + |
+No + |
+Integer + |
+Number of returned data records. The value must be greater than or equal to 0. By default, all data records are returned. + |
+
keyword + |
+No + |
+String + |
+Database name filtering keyword. Fuzzy match is used to obtain all databases whose names contain the keyword. + |
+
tags + |
+No + |
+String + |
+Database tags. The format is key=value. +
GET /v1.0/{project_id}/databases?offset=0&limit=10&with-priv=true&tags=k1%3Dv1 +The equal sign (=) is escaped to %3D, k1 indicates the tag key, and v1 indicates the tag value. +
Use commas (,) to separate tags. The commas (,) must be escaped to %2C. For example: +GET /v1.0/{project_id}/databases?offset=0&limit=10&with-priv=true&tags=k1%3Dv1%2Ck2%3Dv2 +The equal sign (=) is escaped to %3D. k1 indicates a tag key, and v1 indicates the tag value. k2 indicates another tag key, and v2 indicates the tag value. +Currently, only fuzzy query is supported. Exact query is not supported. + |
+
The following is an example of the URL containing the query parameter:
+GET /v1.0/{project_id}/databases?with-priv={is_with_priv}&offset={offsetValue}&limit={limitValue}&keyword={keywordValue}?tags={tagsValue}
+None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
database_count + |
+No + |
+Integer + |
+Total number of databases. + |
+
databases + |
+No + |
+Array of objects + |
+Database information. For details, see Table 4. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
database_name + |
+No + |
+String + |
+Name of a database. + |
+
owner + |
+No + |
+String + |
+Creator of a database. + |
+
table_number + |
+No + |
+Integer + |
+Number of tables in a database. + |
+
description + |
+No + |
+String + |
+Information about a database. + |
+
enterprise_project_id + |
+Yes + |
+String + |
+Enterprise project ID. The value 0 indicates the default enterprise project. + NOTE:
+Users who have enabled Enterprise Management can set this parameter to bind a specified project. + |
+
None
+{ + "is_success": true, + "message": "", + "database_count": 1, + "databases": [ + { + "database_name": "db2", + "description": "this is for test", + "owner": "tenant1", + "table_number": 15 + + } + ] +}+
Table 5 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to delete an empty database. If there are tables in the database to be deleted, delete all tables first. For details about the API used to delete tables, see Deleting a Table.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
database_name + |
+Yes + |
+String + |
+Name of the database to be deleted. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
cascade + |
+No + |
+Boolean + |
+Specifies whether to forcibly delete the database. The value can be true or false. Default value: false. + |
+
async + |
+No + |
+Boolean + |
+Specifies whether to delete the database in asynchronous mode. The value can be true or false. Default value: false. + |
+
The following is an example of the URL containing the query parameter:
+DELETE /v1.0/{project_id}/databases/{database_name}?cascade={is_cascade}&async={is_asyn}
+None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
job_id + |
+No + |
+String + |
+Returned job ID, which can be used to obtain the job status and result. + |
+
job_type + |
+No + |
+String + |
+Type of a job. The options are as follows: +
|
+
job_mode + |
+No + |
+String + |
+Job execution mode. The options are as follows: +
|
+
None
+{ + "is_success": true, + "message": "", + "job_mode": "sync" +}+
{ + "is_success": true, + "message": "", + "job_id": "208b08d4-0dc2-4dd7-8879-ddd4c020d7aa", + "job_type": "DDL", + "job_mode": "async" +}+
Table 4 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to describe metadata information in the specified table.
+GET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
database_name + |
+Yes + |
+String + |
+Name of the database where the target table resides. + |
+
table_name + |
+Yes + |
+String + |
+Name of the target table. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+Yes + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+Yes + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
column_count + |
+Yes + |
+Integer + |
+Total number of columns in the table. + |
+
columns + |
+Yes + |
+Array of Objects + |
+Column information, including the column name, type, and description. For details, see Table 3. + |
+
table_type + |
+Yes + |
+String + |
+Table type. The options are as follows: +MANAGED: DLI table +EXTERNAL: OBS table +VIEW: view + |
+
data_type + |
+No + |
+String + |
+Data type, including CSV, Parquet, ORC, JSON, and Avro. + |
+
data_location + |
+No + |
+String + |
+Path for storing data, which is an OBS path. + |
+
storage_properties + |
+No + |
+Array of Objects + |
+Storage attribute, which is in the format of key/value and includes parameters delimiter, escape, quote, header, dateformat, and timestampformat. + |
+
table_comment + |
+No + |
+String + |
+Table comment. + |
+
create_table_sql + |
+No + |
+String + |
+Statement used to create a table. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
column_name + |
+Yes + |
+String + |
+Column name. + |
+
description + |
+Yes + |
+String + |
+Description of a column. + |
+
type + |
+Yes + |
+String + |
+Data type of a column. + |
+
is_partition_column + |
+Yes + |
+Boolean + |
+Indicates whether the column is a partition column. The value true indicates that the column is a partition column, and the value false indicates that the column is not a partition column. The default value is false. + |
+
None
+{ + "is_success": true, + "message": "", + "column_count": 3, + "columns": [ + { + "column_name": "id", + "description": "", + "type": "int", + "is_partition_column": false + }, + { + "column_name": "name", + "description": "", + "type": "string", + "is_partition_column": false + }, + { + "column_name": "level", + "description": "", + "type": "string", + "is_partition_column": true + } + ], + "table_type":"MANAGED" +}+
{ + "is_success": true, + "message": "", + "column_count": 2, + "columns": [ + { + "type": "string", + "description": "", + "column_name": "col2", + "is_partition_column": false + }, + { + "type": "string", + "description": "", + "column_name": "col1", + "is_partition_column": true + } + ], + "table_type": "EXTERNAL", + "data_type": "parquet", + "data_location": "obs://obs-wangtao/savepoint/savepoint-d95437-039668840fff/_metadata", + "storage_properties": [ + { + "key": "timestampformat", + "value": "yyyy-MM-dd HH:mm:ss" + }, + { + "key": "quote", + "value": "\"" + }, + { + "key": "dateformat", + "value": "yyyy-MM-dd" + }, + { + "key": "escape", + "value": "\\" + }, + { + "key": "header", + "value": "false" + }, + { + "key": "delimiter", + "value": "," + } + ], + "table_comment": "", + "create_table_sql": "CREATE TABLE `default`.`wan_test` (`col2` STRING, `col1` STRING)\nUSING parquet\nOPTIONS (\n `timestampformat` 'yyyy-MM-dd HH:mm:ss',\n `quote` '\"',\n `dateformat` 'yyyy-MM-dd',\n `escape` '\\\\',\n `header` 'false',\n `delimiter` ','\n)\nPARTITIONED BY (col1)\nCOMMENT ''\nLOCATION 'obs://obs-wangtao/savepoint/savepoint-d95437-039668840fff/_metadata'\nTBLPROPERTIES (\n 'hive.serialization.extend.nesting.levels' = 'true'\n)\n" + }+
{ + "is_success": true, + "message": "", + "column_count": 3, + "columns": [ + { + "column_name": "id", + "description": "", + "type": "int", + "is_partition_column": false + }, + { + "column_name": "name", + "description": "", + "type": "string", + "is_partition_column": false + }, + { + "column_name": "level", + "description": "", + "type": "string", + "is_partition_column": true + } + ], + "table_type":"VIEW", + "create_table_sql": "CREATE VIEW `default`.`view1`(id, name) AS\nselect * from a_gff.testtable\n" +}+
Table 4 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to create a table.
+This API is a synchronous API.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
database_name + |
+Yes + |
+String + |
+Name of the database where the new table resides. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
table_name + |
+Yes + |
+String + |
+Name of the created table. +
|
+
data_location + |
+Yes + |
+String + |
+Location where data is stored. The options are as follows: +
|
+
description + |
+No + |
+String + |
+Information about the new table. + |
+
columns + |
+Yes + |
+Array of Objects + |
+Columns of the new table. For details about column parameters, see Table 3. This parameter is optional when data_location is VIEW. + |
+
select_statement + |
+No + |
+String + |
+Query statement required for creating a view. The database to which the table belongs needs to be specified in the query statement, in the format of database.table. This parameter is mandatory when data_location is VIEW. + |
+
data_type + |
+No + |
+String + |
+Type of the data to be added to the OBS table. The options are as follows: Parquet, ORC, CSV, JSON, and Avro. + NOTE:
+This parameter is mandatory for an OBS table. + |
+
data_path + |
+No + |
+String + |
+Storage path of data in the new OBS table, which must be a path on OBS and must begin with obs. + NOTE:
+This parameter is mandatory for an OBS table. +Do not set this parameter to the OBS root directory. Otherwise, all data in the root directory will be cleared when you clear table data. + |
+
with_column_header + |
+No + |
+Boolean + |
+Whether the table header is included in the OBS table data. Only data in CSV files has this attribute. This parameter is mandatory when data_location is OBS. + |
+
delimiter + |
+No + |
+String + |
+User-defined data delimiter. Only data in CSV files has this attribute. This parameter is mandatory when data_location is OBS. + |
+
quote_char + |
+No + |
+String + |
+User-defined reference character. Double quotation marks ("\") are used by default. Only data in CSV files has this attribute. This parameter is mandatory when data_location is OBS. + |
+
escape_char + |
+No + |
+String + |
+User-defined escape character. Backslashes (\\) are used by default. Only data in CSV files has this attribute. This parameter is mandatory when data_location is OBS. + |
+
date_format + |
+No + |
+String + |
+User-defined date type. yyyy-MM-dd is used by default. For details about the characters involved in the date format, see Table 3. Only data in CSV and JSON files has this attribute. This parameter is mandatory when data_location is OBS. + |
+
timestamp_format + |
+No + |
+String + |
+User-defined timestamp type. yyyy-MM-dd HH:mm:ss is used by default. For definitions about characters in the timestamp format, see Table 3. Only data in CSV and JSON files has this attribute. This parameter is mandatory when data_location is OBS. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
column_name + |
+Yes + |
+String + |
+Name of a column. + |
+
type + |
+Yes + |
+String + |
+Data type of a column. + |
+
description + |
+No + |
+String + |
+Description of a column. + |
+
is_partition_column + |
+No + |
+Boolean + |
+Whether the column is a partition column. The value true indicates a partition column, and the value false indicates a non-partition column. The default value is false. + NOTE:
+When creating a partition table, ensure that at least one column in the table is a non-partition column. For details, see "Request example". + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
{ + "table_name": "tb1", + "data_location": "OBS", + "description": "", + "data_type": "csv", + "data_path": "obs://obs/path1", + "columns": [ + { + "column_name": "column1", + "type": "string", + "description": "", + "is_partition_column": true + }, + { + "column_name": "column2", + "type": "string", + "description": "", + "is_partition_column": false + } + ], + "with_column_header": true, + "delimiter": ",", + "quote_char": "\"", + "escape_char": "\\", + "date_format": "yyyy-MM-dd", + "timestamp_format": "yyyy-MM-dd HH:mm:ss" +}+
The values of date_format and timestamp_format must be the same as the time format in the imported CSV file.
+{ + "table_name": "view1", + "data_location": "VIEW", + "columns": [ + { + "column_name": "column1", + "type": "string", + "description": "", + "is_partition_column": true + }, + { + "column_name": "column2", + "type": "string", + "description": "", + "is_partition_column": false + } + ], + "select_statement": "select * from db1.tb1" +}+
{ + "is_success": true, + "message": "" +}+
Table 5 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to delete a specified table.
+DELETE /v1.0/{project_id}/databases/{database_name}/tables/{table_name}
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
database_name + |
+Yes + |
+String + |
+Name of the database where the table to be deleted resides. + |
+
table_name + |
+Yes + |
+String + |
+Name of the table to be deleted. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
async + |
+No + |
+Boolean + |
+Specifies whether to delete the database in asynchronous mode. The value can be true or false. Default value: false. + |
+
The following is an example of the URL containing the query parameter:
+DELETE /v1.0/{project_id}/databases/{database_name}/tables/{table_name}?async={is_async}
+None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
job_mode + |
+No + |
+String + |
+Job execution mode. The options are as follows: +
|
+
None
+{ + "is_success": true, + "message": "" +}+
Table 4 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to share a specific queue with other users. You can grant users with the permission to use the specified queue or revoke the permission.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
queue_name + |
+Yes + |
+String + |
+Name of a queue. + |
+
user_name + |
+Yes + |
+String + |
+Name of the user who is granted with usage permission on a queue or whose queue usage permission is revoked or updated. + |
+
action + |
+Yes + |
+String + |
+Grants or revokes the permission. The parameter value can be grant, revoke, or update. Users can perform the update operation only when they have been granted with the grant and revoke permissions. +
|
+
privileges + |
+Yes + |
+Array of Strings + |
+List of permissions to be granted, revoked, or updated. The following permissions are supported: +
|
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
{ + "queue_name": "queue1", + "user_name": "tenant2", + "action": "grant", + "privileges" : ["DROP_QUEUE", "SUBMIT_JOB"] +}+
{ + "is_success": true, + "message": "" +}+
Table 4 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to query names of all users who can use a specified queue.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
queue_name + |
+Yes + |
+String + |
+Name of a queue. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
limit + |
+Yes + |
+Integer + |
+Number of records to be displayed of the page-based query. + |
+
offset + |
+Yes + |
+Integer + |
+Specifies the offset of the page-based query. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
queue_name + |
+No + |
+String + |
+Name of a queue. + |
+
privileges + |
+No + |
+Array of Object + |
+Users who are granted with the permission to use this queue and the permission array to which users belong. +For details, see Table 4. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_admin + |
+No + |
+Boolean + |
+Whether the database user is an administrator. + |
+
user_name + |
+No + |
+String + |
+Name of the user who has permission on the current queue. + |
+
privileges + |
+No + |
+Array of Strings + |
+Permission of the user on the queue. + |
+
None
+{ + "is_success": true, + "message": "", + "privileges": [ + { + "is_admin": true, + "privileges": [ + "ALL" + ], + "user_name": "tenant1" + }, + { + "is_admin": false, + "privileges": [ + "SUBMIT_JOB" + ], + "user_name": "user2" + } + ], + "queue_name": "queue1" +}+
Table 5 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to grant database or table data usage permission to specified users.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
user_name + |
+Yes + |
+String + |
+Name of the user who is granted with usage permission on a queue or whose queue usage permission is revoked or updated. + |
+
action + |
+Yes + |
+String + |
+Grants or revokes the permission. The parameter value can be grant, revoke, or update. +
NOTE:
+Users can perform the update operation only when they have been granted with the grant and revoke permissions. + |
+
privileges + |
+Yes + |
+Array of Objects + |
+Permission granting information. For details, see Table 3. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
object + |
+Yes + |
+String + |
+Data objects to be assigned. If they are named: +
|
+
privileges + |
+Yes + |
+Array of Strings + |
+List of permissions to be granted, revoked, or updated. + NOTE:
+If Action is Update and the update list is empty, all permissions of the user in the database or table are revoked. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
{ + "user_name": "user2", + "action": "grant", + "privileges": [ + { + "object": "databases.db1.tables.tb2.columns.column1", + "privileges": [ + "SELECT" + ] + }, + { + "object": "databases.db1.tables.tbl", + "privileges": [ + "DROP_TABLE" + ] + }, + { + "object": "databases.db1", + "privileges": [ + "SELECT" + ] + } + ] +}+
{ + "is_success": true, + "message": "" +}+
Table 5 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used query names of all users who have permission to use or access the database.
+Parameter + |
+Mandatory + |
+Description + |
+
---|---|---|
project_id + |
+Yes + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
database_name + |
+Yes + |
+Name of the database to be queried. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
database_name + |
+No + |
+String + |
+Name of the database to be queried. + |
+
privileges + |
+No + |
+Array of objects + |
+Permission information. For details, see Table 3. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_admin + |
+No + |
+Boolean + |
+Whether the database user is an administrator. + |
+
user_name + |
+No + |
+String + |
+Name of the user who has permission on the current database. + |
+
privileges + |
+No + |
+Array of Strings + |
+Permission of the user on the database. + |
+
None
+{ + "is_success": true, + "message": "", + "database_name": "dsstest", + "privileges": [ + { + "is_admin": true, + "privileges": [ + "ALL" + ], + "user_name": "test" + }, + { + "is_admin": false, + "privileges": [ + "ALTER_TABLE_ADD_PARTITION" + ], + "user_name": "scuser1" + }, + { + "is_admin": false, + "privileges": [ + "CREATE_TABLE" + ], + "user_name": "scuser2" + } + ] +}+
If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to query users who have permission to access the specified table or column in the table.
+GET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/users
+Parameter + |
+Mandatory + |
+Description + |
+
---|---|---|
project_id + |
+Yes + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
database_name + |
+Yes + |
+Name of the database where the table to be queried resides. + |
+
table_name + |
+Yes + |
+Name of a table that is to be queried. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
privileges + |
+No + |
+Array <Objects> + |
+Permission information. For details, see Table 3. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_admin + |
+No + |
+Boolean + |
+Whether the table user is an administrator. + |
+
object + |
+No + |
+String + |
+Objects on which a user has permission. +
|
+
privileges + |
+No + |
+Array<String> + |
+Permission of the user on the object. + |
+
user_name + |
+No + |
+String + |
+Name of the user who has the permission. + |
+
None
+{ + "is_success": true, + "message": "", + "privileges": [ + { + "is_admin": false, + "object": "databases.dsstest.tables.csv_par_table", + "privileges": [ + "SELECT" + ], + "user_name": "tent2" + }, + { + "is_admin": true, + "object": "databases.dsstest.tables.csv_par_table", + "privileges": [ + "ALL" + ], + "user_name": "tent4" + } + ] +}+
If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to query the permission of a specified user on a table.
+GET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/users/{user_name}
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
database_name + |
+Yes + |
+String + |
+Name of the database where the table to be queried resides. + |
+
table_name + |
+Yes + |
+String + |
+Name of a table that is to be queried. + |
+
user_name + |
+Yes + |
+String + |
+Name of the user whose permission is to be queried. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
user_name + |
+No + |
+String + |
+Name of the user whose permission is to be queried. + |
+
privileges + |
+No + |
+Array Of objects + |
+Permission information. For details, see Table 3. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
object + |
+No + |
+String + |
+Objects on which a user has permission. +
|
+
privileges + |
+No + |
+Array of Strings + |
+Permission of the user on a specified object. + |
+
None
+{ + "is_success": true, + "message": "", + "privileges": [ + { + "object": "databases.dsstest.tables.obs_2312", + "privileges": [ + "DESCRIBE_TABLE" + ] + }, + { + "object": "databases.dsstest.tables.obs_2312.columns.id", + "privileges": [ + "SELECT" + ] + } + ], + "user_name": "scuser1" +}+
Table 4 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+If an error occurs in API calling, no result is returned. Identify the cause of error based on the error codes of each API. If an error occurs in API calling, HTTP status code 4xx or 5xx is returned. The response body contains the specific error code and information. If you are unable to identify the cause of an error, contact technical personnel and provide the error code so that we can help you solve the problem as soon as possible.
+If an error occurs during API calling, the system returns an error code and a message to you. The following shows the format of an error response body:
+{ + "error_msg": "The format of message is error", + "error_code": "DLI.0001" +}+
In the preceding information, error_code is an error code, and error_msg describes the error.
+ +Parameter + |
+Parameter Type + |
+Description + |
+
---|---|---|
error_code + |
+String + |
+Error code. For details, see Table 2. + |
+
error_msg + |
+String + |
+Error details. + |
+
Status Code + |
+Error Code + |
+Error Message + |
+
---|---|---|
400 + |
+DLI.0001 + |
+Parameter check errors occur. + |
+
400 + |
+DLI.0002 + |
+The object does not exist. + |
+
400 + |
+DLI.0003 + |
+SQL permission verification fails. + |
+
400 + |
+DLI.0004 + |
+SQL syntax parsing errors occur. + |
+
400 + |
+DLI.0005 + |
+SQL semantics parsing errors occur. + |
+
400 + |
+DLI.0006 + |
+The object exists. + |
+
400 + |
+DLI.0007 + |
+The operation is not supported. + |
+
400 + |
+DLI.0008 + |
+Metadata errors occur. + |
+
400 + |
+DLI.0009 + |
+System restrictions. + |
+
400 + |
+DLI.0011 + |
+The file permission check fails. + |
+
400 + |
+DLI.0012 + |
+Resource objects are unavailable. + |
+
401 + |
+DLI.0013 + |
+User authentication errors occur. + |
+
401 + |
+DLI.0014 + |
+Service authentication errors occur. + |
+
400 + |
+DLI.0015 + |
+Token parsing error. + |
+
400 + |
+DLI.0016 + |
+The identity and role are incorrect. + |
+
400 + |
+DLI.0018 + |
+Data conversion errors occur. + |
+
400 + |
+DLI.0019 + |
+The task times out. + |
+
400 + |
+DLI.0100 + |
+The result expires. + |
+
404 + |
+DLI.0023 + |
+No related resources were found. + |
+
400 + |
+DLI.0999 + |
+Server-side errors occur. + |
+
400 + |
+DLI.1028 + |
+The quota is insufficient. + |
+
If no queue named testqueue exists, the following error message is displayed when you submit a job submission request:
+{ + "error_code": "DLI.0002", + "error_msg": "There is no queue named testqueue" +}+
This API is used to submit jobs to a queue using SQL statements.
+The job types support DDL, DCL, IMPORT, QUERY, and INSERT. The IMPORT function is the same as that described in Importing Data. The difference lies in the implementation method.
+Additionally, you can use other APIs to query and manage jobs. For details, see the following sections:
+This API is synchronous if job_type in the response message is DCL.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
sql + |
+Yes + |
+String + |
+SQL statement that you want to execute. + |
+
currentdb + |
+No + |
+String + |
+Database where the SQL statement is executed. This parameter does not need to be configured during database creation. + |
+
queue_name + |
+No + |
+String + |
+Name of the queue to which a job to be submitted belongs. The name can contain only digits, letters, and underscores (_), but cannot contain only digits or start with an underscore (_). + |
+
conf + |
+No + |
+Array of Strings + |
+You can set the configuration parameters for the SQL job in the form of Key/Value. For details about the supported configuration items, see Table 3. + |
+
tags + |
+No + |
+Array of Objects + |
+Label of a job. For details, see Table 4. + |
+
Parameter + |
+Default Value + |
+Description + |
+
---|---|---|
spark.sql.files.maxRecordsPerFile + |
+0 + |
+Maximum number of records to be written into a single file. If the value is zero or negative, there is no limit. + |
+
spark.sql.autoBroadcastJoinThreshold + |
+209715200 + |
+Maximum size of the table that displays all working nodes when a connection is executed. You can set this parameter to -1 to disable the display. + NOTE:
+Currently, only the configuration unit metastore table that runs the ANALYZE TABLE COMPUTE statistics noscan command and the file-based data source table that directly calculates statistics based on data files are supported. + |
+
spark.sql.shuffle.partitions + |
+200 + |
+Default number of partitions used to filter data for join or aggregation. + |
+
spark.sql.dynamicPartitionOverwrite.enabled + |
+false + |
+Whether DLI overwrites the partitions where data will be written into during runtime. If you set this parameter to false, all partitions that meet the specified condition will be deleted before data overwrite starts. For example, if you set false and use INSERT OVERWRITE to write partition 2021-02 to a partitioned table that has the 2021-01 partition, this partition will be deleted. +If you set this parameter to true, DLI does not delete partitions before overwrite starts. + |
+
spark.sql.files.maxPartitionBytes + |
+134217728 + |
+Maximum number of bytes to be packed into a single partition when a file is read. + |
+
spark.sql.badRecordsPath + |
+- + |
+Path of bad records. + |
+
dli.sql.sqlasync.enabled + |
+false + |
+Indicates whether DDL and DCL statements are executed asynchronously. The value true indicates that asynchronous execution is enabled. + |
+
dli.sql.job.timeout + |
+- + |
+Sets the job running timeout interval. If the timeout interval expires, the job is canceled. Unit: second + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+Yes + |
+Boolean + |
+Indicates whether the request is successfully sent. Value true indicates that the request is successfully sent. + |
+
message + |
+Yes + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
job_id + |
+Yes + |
+String + |
+ID of a job returned after a job is generated and submitted by using SQL statements. The job ID can be used to query the job status and results. + |
+
job_type + |
+Yes + |
+String + |
+Type of a job. Job types include the following: +
|
+
schema + |
+No + |
+Array of objects + |
+If the statement type is DDL, the column name and type of DDL are displayed. + |
+
rows + |
+No + |
+Array of objects + |
+When the statement type is DDL, results of the DDL are displayed. + |
+
job_mode + |
+No + |
+String + |
+Job execution mode. The options are as follows: +
|
+
{ + "currentdb": "db1", + "sql": "desc table1", + "queue_name": "default", + "conf": [ + "dli.sql.shuffle.partitions = 200" + ], + "tags": [ + { + "key": "workspace", + "value": "space1" + }, + { + "key": "jobName", + "value": "name1" + } + ] +}+
{ + "is_success": true, + "message": "", + "job_id": "8ecb0777-9c70-4529-9935-29ea0946039c", + "job_type": "DDL", + "job_mode":"sync", + "schema": [ + { + "col_name": "string" + }, + { + "data_type": "string" + }, + { + "comment": "string" + } + ], + "rows": [ + [ + "c1", + "int", + null + ], + [ + "c2", + "string", + null + ] + ] +}+
Table 6 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to cancel a submitted job. If execution of a job completes or fails, this job cannot be canceled.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
job_id + |
+Yes + |
+String + |
+Job ID. You can get the value by calling Submitting a SQL Job (Recommended). + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
None
+{ + "is_success": true, + "message": "" +}+
Table 3 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to query information about tables that meet the filtering criteria or all the tables in the specified database.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
database_name + |
+Yes + |
+String + |
+Name of the database where the table resides. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
keyword + |
+No + |
+String + |
+Keywords used to filter table names. + |
+
with-detail + |
+No + |
+Boolean + |
+Whether to obtain detailed information about tables (such as owner and size). The default value is false. + |
+
page-size + |
+No + |
+Integer + |
+Paging size. The minimum value is 1 and the maximum value is 100. + |
+
current-page + |
+No + |
+Integer + |
+Current page number. The minimum value is 1. + |
+
with-priv + |
+No + |
+Boolean + |
+Whether to return permission information. + |
+
table-type + |
+No + |
+String + |
+Database table type. The options are as follows: +
|
+
datasource-type + |
+No + |
+String + |
+Data source type. The options are as follows: +
|
+
without-tablemeta + |
+No + |
+Boolean + |
+Whether to obtain the metadata of a table. The default value is false. If this parameter is set to true, the response speed can be greatly improved. + |
+
The following is an example of the URL containing the query parameter:
+GET /v1.0/{project_id}/databases/{database_name}/tables?keyword=tb&with-detail=true
+None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+Yes + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+Yes + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
table_count + |
+Yes + |
+Integer + |
+Total number of tables. + |
+
tables + |
+Yes + |
+Array of Objects + |
+Table information. For details, see Table 4. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
create_time + |
+Yes + |
+Long + |
+Time when a table is created. The timestamp is expressed in milliseconds. + |
+
data_type + |
+No + |
+String + |
+Type of the data to be added to the OBS table. The options are as follows: Parquet, ORC, CSV, JSON, and Avro. + NOTE:
+This parameter is available only for OBS tables. + |
+
data_location + |
+Yes + |
+String + |
+Data storage location, which can be DLI or OBS. + |
+
last_access_time + |
+Yes + |
+Long + |
+Time when the table was last updated. The timestamp is expressed in milliseconds. + |
+
location + |
+No + |
+String + |
+Storage path on the OBS table. + NOTE:
+This parameter is available only for OBS tables. + |
+
owner + |
+Yes + |
+String + |
+Table owner. + |
+
table_name + |
+Yes + |
+String + |
+Name of a table. + |
+
table_size + |
+Yes + |
+Long + |
+Size of a DLI table. Set parameter to 0 for non-DLI tables. The unit is byte. + |
+
table_type + |
+Yes + |
+String + |
+Type of a table. +
|
+
partition_columns + |
+No + |
+String + |
+Partition field. This parameter is valid only for OBS partition tables. + |
+
page-size + |
+No + |
+Integer + |
+Paging size. The minimum value is 1 and the maximum value is 100. + |
+
current-page + |
+No + |
+Integer + |
+Current page number. The minimum value is 1. + |
+
If with-detail is set to false in the URI, only values of tables-related parameters data_location, table_name, and table_type are returned.
+None
+{ + "is_success": true, + "message": "", + "table_count": 1, + "tables": [ + { "create_time":1517364268000, + "data_location":"OBS", + "data_type":"csv", + "last_access_time":1517364268000, + "location":"obs://DLI/sqldata/data.txt", + "owner":"test", + "partition_columns": ["a0"], + "table_name":"obs_t", + "table_size":0, + "table_type":"EXTERNAL" + } + ] +}+
Table 5 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to check the SQL syntax.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
sql + |
+Yes + |
+String + |
+SQL statement that you want to execute. + |
+
currentdb + |
+No + |
+String + |
+Database where the SQL statement is executed. + NOTE:
+
|
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
job_type + |
+No + |
+String + |
+Type of a job. Job types include the following: DDL, DCL, IMPORT, EXPORT, QUERY, and INSERT. + |
+
{ + "currentdb": "db1", + "sql": "select * from t1" +}+
{ + "is_success": true, + "message": "the sql is ok", + "job_type":"QUERY" +}+
Table 4 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to preview the first ten rows of a table.
+GET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/preview
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
database_name + |
+Yes + |
+String + |
+Name of the database where the table to be previewed resides. + |
+
table_name + |
+Yes + |
+String + |
+Name of the table to be previewed. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
mode + |
+No + |
+String + |
+Preview table mode. The options are SYNC and ASYNC. The default value is SYNC. + |
+
The following is an example of the URL containing the query parameter:
+GET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/preview?mode={previewMode}
+None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
schema + |
+No + |
+Array of objects + |
+Column name and type of a table. + |
+
rows + |
+No + |
+Array of objects + |
+Previewed table content. + |
+
None
+{ + "is_success": true, + "message": "", + "schema": [ + { + "id": "int" + }, + { + "name": "string" + }, + { + "address": "string" + } + ], + "rows": [ + [ + "1", + "John", + "xxx" + ], + [ + "2", + "Lily", + "xxx" + ] + ] + }+
In asynchronous request mode, a job ID is returned. You can obtain the preview information based on the job ID.
+Table 4 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to create a batch processing job in a queue.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
file + |
+Yes + |
+String + |
+Name of the package that is of the JAR or pyFile type and has been uploaded to the DLI resource management system. You can also specify an OBS path, for example, obs://Bucket name/Package name. + |
+
class_name + |
+Yes + |
+String + |
+Java/Spark main class of the batch processing job. + |
+
queue + |
+No + |
+String + |
+Queue name. Set this parameter to the name of the created DLI queue. The queue must be of the general-purpose type. + NOTE:
+
|
+
cluster_name + |
+No + |
+String + |
+Queue name. Set this parameter to the created DLI queue name. + NOTE:
+You are advised to use the queue parameter. The queue and cluster_name parameters cannot coexist. + |
+
args + |
+No + |
+Array of Strings + |
+Input parameters of the main class, that is, application parameters. + |
+
sc_type + |
+No + |
+String + |
+Compute resource type. Currently, resource types A, B, and C are available. If this parameter is not specified, the minimum configuration (type A) is used. For details about resource types, see Table 3. + |
+
jars + |
+No + |
+Array of Strings + |
+Name of the package that is of the JAR type and has been uploaded to the DLI resource management system. You can also specify an OBS path, for example, obs://Bucket name/Package name. + |
+
python_files + |
+No + |
+Array of Strings + |
+Name of the package that is of the PyFile type and has been uploaded to the DLI resource management system. You can also specify an OBS path, for example, obs://Bucket name/Package name. + |
+
files + |
+No + |
+Array of Strings + |
+Name of the package that is of the file type and has been uploaded to the DLI resource management system. You can also specify an OBS path, for example, obs://Bucket name/Package name. + |
+
modules + |
+No + |
+Array of Strings + |
+Name of the dependent system resource module. You can view the module name using the API related to Querying Resource Packages in a Group. +DLI provides dependencies for executing datasource jobs. The following table lists the dependency modules corresponding to different services.
+
|
+
resources + |
+No + |
+Array of Objects + |
+JSON object list, including the name and type of the JSON package that has been uploaded to the queue. For details, see Table 4. + |
+
groups + |
+No + |
+Array of Objects + |
+JSON object list, including the package group resource. For details about the format, see the request example. If the type of the name in resources is not verified, the package with the name exists in the group. For details, see Table 5. + |
+
conf + |
+No + |
+Array of Objects + |
+Batch configuration item. + |
+
name + |
+No + |
+String + |
+Batch processing task name. The value contains a maximum of 128 characters. + |
+
driver_memory + |
+No + |
+String + |
+Driver memory of the Spark application, for example, 2 GB and 2048 MB. This configuration item replaces the default parameter in sc_type. The unit must be provided. Otherwise, the startup fails. + |
+
driver_cores + |
+No + |
+Integer + |
+Number of CPU cores of the Spark application driver. This configuration item replaces the default parameter in sc_type. + |
+
executor_memory + |
+No + |
+String + |
+Executor memory of the Spark application, for example, 2 GB and 2048 MB. This configuration item replaces the default parameter in sc_type. The unit must be provided. Otherwise, the startup fails. + |
+
executor_cores + |
+No + |
+Integer + |
+Number of CPU cores of each Executor in the Spark application. This configuration item replaces the default parameter in sc_type. + |
+
num_executors + |
+No + |
+Integer + |
+Number of Executors in a Spark application. This configuration item replaces the default parameter in sc_type. + |
+
obs_bucket + |
+No + |
+String + |
+OBS bucket for storing the Spark jobs. Set this parameter when you need to save jobs. + |
+
auto_recovery + |
+No + |
+Boolean + |
+Whether to enable the retry function. If enabled, Spark jobs will be automatically retried after an exception occurs. The default value is false. + |
+
max_retry_times + |
+No + |
+Integer + |
+Maximum retry times. The maximum value is 100, and the default value is 20. + |
+
catalog_name + |
+No + |
+String + |
+To access metadata, set this parameter to dli. + |
+
Resource Type + |
+Physical Resource + |
+driverCores + |
+executorCores + |
+driverMemory + |
+executorMemory + |
+numExecutor + |
+
---|---|---|---|---|---|---|
A + |
+8 vCPUs, 32-GB memory + |
+2 + |
+1 + |
+7 GB + |
+4 GB + |
+6 + |
+
B + |
+16 vCPUs, 64-GB memory + |
+2 + |
+2 + |
+7 GB + |
+8 GB + |
+7 + |
+
C + |
+32 vCPUs, 128-GB memory + |
+4 + |
+2 + |
+15 GB + |
+8 GB + |
+14 + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
name + |
+No + |
+String + |
+Resource name You can also specify an OBS path, for example, obs://Bucket name/Package name. + |
+
type + |
+No + |
+String + |
+Resource type. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
name + |
+No + |
+String + |
+User group name + |
+
resources + |
+No + |
+Array of Objects + |
+User group resource For details, see Table 4. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
id + |
+No + |
+String + |
+ID of a batch processing job. + |
+
appId + |
+No + |
+String + |
+Back-end application ID of a batch processing job. + |
+
name + |
+No + |
+String + |
+Batch processing task name. The value contains a maximum of 128 characters. + |
+
owner + |
+No + |
+String + |
+Owner of a batch processing job. + |
+
proxyUser + |
+No + |
+String + |
+Proxy user (resource tenant) to which a batch processing job belongs. + |
+
state + |
+No + |
+String + |
+Status of a batch processing job. For details, see Table 7. + |
+
kind + |
+No + |
+String + |
+Type of a batch processing job. Only Spark parameters are supported. + |
+
log + |
+No + |
+Array of strings + |
+Last 10 records of the current batch processing job. + |
+
sc_type + |
+No + |
+String + |
+Type of a computing resource. If the computing resource type is customized, value CUSTOMIZED is returned. + |
+
cluster_name + |
+No + |
+String + |
+Queue where a batch processing job is located. + |
+
queue + |
+Yes + |
+String + |
+Queue name. Set this parameter to the name of the created DLI queue. + NOTE:
+
|
+
create_time + |
+No + |
+Long + |
+Time when a batch processing job is created. The timestamp is expressed in milliseconds. + |
+
update_time + |
+No + |
+Long + |
+Time when a batch processing job is updated. The timestamp is expressed in milliseconds. + |
+
duration + |
+No + |
+Long + |
+Job running duration (unit: millisecond) + |
+
Parameter + |
+Type + |
+Description + |
+
---|---|---|
starting + |
+String + |
+The batch processing job is being started. + |
+
running + |
+String + |
+The batch processing job is executing a task. + |
+
dead + |
+String + |
+The batch processing job has exited. + |
+
success + |
+String + |
+The batch processing job is successfully executed. + |
+
recovering + |
+String + |
+The batch processing job is being restored. + |
+
{ + "file": "batchTest/spark-examples_2.11-2.1.0.luxor.jar", + "class_name": "org.apache.spark.examples.SparkPi", + "sc_type": "A", + "jars": ["demo-1.0.0.jar"], + "files": ["count.txt"], + "resources":[ + {"name": "groupTest/testJar.jar", "type": "jar"}, + {"name": "kafka-clients-0.10.0.0.jar", "type": "jar"}], + "groups": [ + {"name": "groupTestJar", "resources": [{"name": "testJar.jar", "type": "jar"}, {"name": "testJar1.jar", "type": "jar"}]}, + {"name": "batchTest", "resources": [{"name": "luxor.jar", "type": "jar"}]}], + "queue": " test", + "name": "TestDemo4" + + +}+
The batchTest/spark-examples_2.11-2.1.0.luxor.jar file has been uploaded through API involved in Uploading a Package Group.
+{ + "id": "07a3e4e6-9a28-4e92-8d3f-9c538621a166", + "appId": "", + "name": "", + "owner": "test1", + "proxyUser": "", + "state": "starting", + "kind": "", + "log": [], + "sc_type": "CUSTOMIZED", + "cluster_name": "aaa", + "queue": "aaa", + "create_time": 1607589874156, + "update_time": 1607589874156 +}+
Table 8 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to obtain the list of batch processing jobs in a queue of a project.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
job_name + |
+No + |
+String + |
+Name of a batch processing job. + |
+
job_id + |
+No + |
+String + |
+This API is used to query a batch job based on the job ID. + |
+
cluster_name + |
+No + |
+String + |
+DLI queue name. If this parameter is left blank, the names of all batch processing jobs in the current project are obtained. You are advised to specify this parameter, instead of leaving it blank. + |
+
queue_name + |
+No + |
+String + |
+DLI queue name. You can query batch jobs based on the queue name. This method is recommended. + |
+
from + |
+No + |
+Integer + |
+Index number of the start batch processing job. By default, the index number starts from 0. + |
+
size + |
+No + |
+Integer + |
+Number of batch processing jobs to be queried + |
+
state + |
+No + |
+String + |
+Query batch jobs by job status. + |
+
owner + |
+No + |
+String + |
+User who submits a job. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
from + |
+No + |
+Integer + |
+Index number of the start batch processing job. + |
+
total + |
+No + |
+Integer + |
+Total number of batch processing jobs. + |
+
sessions + |
+No + |
+Array of objects + |
+Batch job information. For details, see Table 6 in Creating a Batch Processing Job. + |
+
create_time + |
+No + |
+Long + |
+Time when a batch processing job is created. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
duration + |
+No + |
+Long + |
+Job running duration (unit: millisecond) + |
+
id + |
+No + |
+String + |
+ID of a batch processing job. + |
+
state + |
+No + |
+String + |
+Status of a batch processing job + |
+
appId + |
+No + |
+String + |
+Back-end application ID of a batch processing job + |
+
log + |
+No + |
+Array of Strings + |
+Last 10 records of the current batch processing job + |
+
sc_type + |
+No + |
+String + |
+Type of a computing resource. If the computing resource type is customized, value CUSTOMIZED is returned. + |
+
cluster_name + |
+No + |
+String + |
+Queue where a batch processing job is located. + |
+
create_time + |
+No + |
+Long + |
+Time when a batch processing job is created. The timestamp is in milliseconds. + |
+
name + |
+No + |
+String + |
+Name of a batch processing job. + |
+
owner + |
+No + |
+String + |
+Owner of a batch processing job. + |
+
proxyUser + |
+No + |
+String + |
+Proxy user (resource tenant) to which a batch processing job belongs. + |
+
kind + |
+No + |
+String + |
+Type of a batch processing job. Only Spark parameters are supported. + |
+
queue + |
+No + |
+String + |
+Queue where a batch processing job is located. + |
+
image + |
+No + |
+String + |
+Custom image. The format is Organization name/Image name:Image version. +This parameter is valid only when feature is set to custom. You can use this parameter with the feature parameter to specify a user-defined Spark image for job running. + |
+
update_time + |
+No + |
+Long + |
+Time when a batch processing job is updated. The timestamp is in milliseconds. + |
+
None
+{ + "from": 0, + "total": 1, + "sessions": [ + { + "id": "178fa687-2e8a-41ed-a439-b00de60bb176", + "state": "dead", + "appId": null, + "log": [ + "stdout: ", + "stderr: ", + "YARN Diagnostics: " + ], + "sc_type": "A", + "cluster_name": "test", + "create_time": 1531906043036 + } + ] +}+
Table 5 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to query details about a batch processing job based on the job ID.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
batch_id + |
+Yes + |
+String + |
+ID of a batch processing job. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
id + |
+No + |
+String + |
+ID of a batch processing job. + |
+
appId + |
+No + |
+String + |
+Back-end application ID of a batch processing job. + |
+
name + |
+No + |
+String + |
+Name of a batch processing job. + |
+
owner + |
+No + |
+String + |
+Owner of a batch processing job. + |
+
proxyUser + |
+No + |
+String + |
+Proxy user (resource tenant) to which a batch processing job belongs. + |
+
state + |
+No + |
+String + |
+Status of a batch processing job. For details, see Table 7 in Creating a Batch Processing Job. + |
+
kind + |
+No + |
+String + |
+Type of a batch processing job. Only Spark parameters are supported. + |
+
log + |
+No + |
+Array of Strings + |
+Last 10 records of the current batch processing job. + |
+
sc_type + |
+No + |
+String + |
+Type of a computing resource. If the computing resource type is customized, value CUSTOMIZED is returned. + |
+
cluster_name + |
+No + |
+String + |
+Queue where a batch processing job is located. + |
+
queue + |
+No + |
+String + |
+Queue where a batch processing job is located. + |
+
create_time + |
+No + |
+Long + |
+Time when a batch processing job is created. The timestamp is expressed in milliseconds. + |
+
update_time + |
+No + |
+Long + |
+Time when a batch processing job is updated. The timestamp is expressed in milliseconds. + |
+
None
+{ + "id": "0a324461-d9d9-45da-a52a-3b3c7a3d809e", + "appId": "", + "name": "", + "owner": "", + "proxyUser": "", + "state": "starting", + "kind":"", + "log": [ + "stdout: ", + "stderr: ", + "YARN Diagnostics: " + ], + "sc_type": "A", + "cluster_name": "test", + "queue": "test", + "create_time": 1531906043036, + "update_time": 1531906043036 +}+
Table 3 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to obtain the execution status of a batch processing job.
+Parameter + |
+Mandatory + |
+Description + |
+
---|---|---|
project_id + |
+Yes + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
batch_id + |
+Yes + |
+ID of a batch processing job. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
id + |
+No + |
+String + |
+ID of a batch processing job, which is in the universal unique identifier (UUID) format. + |
+
state + |
+No + |
+String + |
+Status of a batch processing job. For details, see Table 7 in Creating a Batch Processing Job. + |
+
None
+{"id":"0a324461-d9d9-45da-a52a-3b3c7a3d809e","state":"Success"}+
Table 3 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to query the back-end logs of batch processing jobs.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
batch_id + |
+Yes + |
+String + |
+ID of a batch processing job. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
from + |
+No + |
+Integer + |
+Start line of the log to be displayed. By default, the last 100 lines of the log are displayed. If a log file contains fewer than 100 lines, line 0 is the start line. + |
+
size + |
+No + |
+Integer + |
+Number of logs to be queried. + |
+
type + |
+No + |
+String + |
+If type is set to driver, the Spark Driver log is generated. + |
+
index + |
+No + |
+Integer + |
+When a submitted job is retried, multiple driver logs are generated. This parameter specifies the index number of the specified driver log. The default value is 0. This parameter must be used together with the type parameter. If only index is specified, the default value of type is driver. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
id + |
+No + |
+String + |
+ID of a batch processing job. + |
+
from + |
+No + |
+String + |
+Start index of a log. + |
+
total + |
+No + |
+Long + |
+Total number of records in a log. + |
+
log + |
+No + |
+Array of Strings + |
+Log of the current batch processing job. + |
+
None
+{ + "id": "0a324461-d9d9-45da-a52a-3b3c7a3d809e", + "from": 0, + "total": 3, + "log": [ + "Detailed information about job logs" + ] +}+
Table 4 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to cancel a batch processing job.
+Batch processing jobs in the Successful or Failed state cannot be canceled.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
batch_id + |
+Yes + |
+String + |
+ID of a batch processing job. Set the value to the job ID obtained in Creating a Batch Processing Job. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
msg + |
+No + |
+String + |
+If the batch processing job is successfully canceled, value deleted is returned. + |
+
None
+{ + "msg": "deleted" +}+
Table 3 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to upload a package group to a project. The function is similar to creating a package on the management console.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
paths + |
+Yes + |
+Array of Strings + |
+List of OBS object paths. The OBS object path refers to the OBS object URL. + |
+
kind + |
+Yes + |
+String + |
+File type of a package group. +
NOTE:
+If the same group of packages to be uploaded contains different file types, select file as the type of the file to be uploaded. + |
+
group + |
+Yes + |
+String + |
+Name of the group to be created. + |
+
is_async + |
+No + |
+Boolean + |
+Whether to upload resource packages in asynchronous mode. The default value is false, indicating that the asynchronous mode is not used. You are advised to upload resource packages in asynchronous mode. + |
+
tags + |
+No + |
+Array of Objects + |
+Resource tag. For details, see Table 3. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
group_name + |
+No + |
+String + |
+Group name. + |
+
status + |
+No + |
+String + |
+Status of a package group to be uploaded. + |
+
resources + |
+No + |
+Array of strings + |
+List of names of resource packages contained in the group. + |
+
details + |
+No + |
+Array of body + |
+Details about a group resource package. For details, see Table 5. + |
+
create_time + |
+No + |
+Long + |
+UNIX timestamp when a package group is uploaded. + |
+
update_time + |
+No + |
+Long + |
+UNIX timestamp when a package group is updated. + |
+
is_async + |
+No + |
+Boolean + |
+Whether to upload resource packages in asynchronous mode. The default value is false, indicating that the asynchronous mode is not used. You are advised to upload resource packages in asynchronous mode. + |
+
owner + |
+No + |
+String + |
+Owner of a resource package. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
create_time + |
+Yes + |
+Long + |
+UNIX time when a resource package is uploaded. The timestamp is in milliseconds. + |
+
update_time + |
+No + |
+Long + |
+UNIX time when the uploaded resource package is uploaded. The timestamp is in milliseconds. + |
+
resource_type + |
+Yes + |
+String + |
+Resource type. + |
+
resource_name + |
+No + |
+String + |
+Resource name. + |
+
status + |
+No + |
+String + |
+
|
+
underlying_name + |
+No + |
+String + |
+Name of the resource packages in a queue. + |
+
is_async + |
+No + |
+Boolean + |
+Whether to upload resource packages in asynchronous mode. The default value is false, indicating that the asynchronous mode is not used. You are advised to upload resource packages in asynchronous mode. + |
+
{ + "paths": [ + "https://xkftest.obs.xxx.com/txr_test/jars/spark-sdv-app.jar", + "https://xkftest.obs.xxx.com/txr_test/jars/wordcount", + "https://xkftest.obs.xxx.com/txr_test/jars/wordcount.py" + ], + "kind": "jar", + "group": "gatk", + "is_async":"true" +}+
{ + "group_name": "gatk", + "status": "READY", + "resources": [ + "spark-sdv-app.jar", + "wordcount", + "wordcount.py" + ], + "details": [ + { + "create_time": 0, + "update_time": 0, + "resource_type": "jar", + "resource_name": "spark-sdv-app.jar", + "status": "READY", + "underlying_name": "987e208d-d46e-4475-a8c0-a62f0275750b_spark-sdv-app.jar" + }, + { + "create_time": 0, + "update_time": 0, + "resource_type": "jar", + "resource_name": "wordcount", + "status": "READY", + "underlying_name": "987e208d-d46e-4475-a8c0-a62f0275750b_wordcount" + }, + { + "create_time": 0, + "update_time": 0, + "resource_type": "jar", + "resource_name": "wordcount.py", + "status": "READY", + "underlying_name": "987e208d-d46e-4475-a8c0-a62f0275750b_wordcount.py" + } + ], + "create_time": 1551334579654, + "update_time": 1551345369070 +}+
Table 6 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to modify the owner of a database.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
database_name + |
+Yes + |
+String + |
+Name of a database. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
new_owner + |
+Yes + |
+String + |
+Name of the new owner. The new user must be a sub-user of the current tenant. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
{ + "new_owner": "scuser1" +}+
{ + "is_success": true, + "message": "" +}+
Table 4 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to query all resources in a project, including groups.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
kind + |
+No + |
+String + |
+Specifies the file type. The options are as follows: +
|
+
tags + |
+No + |
+String + |
+Specifies a label for filtering. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
resources + |
+No + |
+Array of Objects + |
+List of names of uploaded user resources. For details about resources, see Table 4. + |
+
modules + |
+No + |
+Array of Objects + |
+List of built-in resource groups. For details about the groups, see Table 5. + |
+
groups + |
+No + |
+Array of Objects + |
+Uploaded package groups of a user. + |
+
total + |
+Yes + |
+Integer + |
+Total number of returned resource packages. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
create_time + |
+No + |
+Long + |
+UNIX timestamp when a resource package is uploaded. + |
+
update_time + |
+No + |
+Long + |
+UNIX timestamp when the uploaded resource package is uploaded. + |
+
resource_type + |
+No + |
+String + |
+Resource type. + |
+
resource_name + |
+No + |
+String + |
+Resource name. + |
+
status + |
+No + |
+String + |
+
|
+
underlying_name + |
+No + |
+String + |
+Name of the resource package in the queue. + |
+
owner + |
+No + |
+String + |
+Owner of the resource package. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
module_name + |
+No + |
+String + |
+Module name. + |
+
module_type + |
+No + |
+String + |
+Module type. + |
+
status + |
+No + |
+String + |
+
|
+
resources + |
+No + |
+Array of Strings + |
+List of names of resource packages contained in the group. + |
+
description + |
+No + |
+String + |
+Module description. + |
+
create_time + |
+No + |
+Long + |
+UNIX timestamp when a package group is uploaded. + |
+
update_time + |
+No + |
+Long + |
+UNIX timestamp when a package group is updated. + |
+
None
+{ + "resources": [ + { + "create_time": 1521532893736, + "update_time": 1521552364503, + "resource_type": "jar", + "resource_name": "luxor-router-1.1.1.jar", + "status": "READY", + "underlying_name": "3efffb4f-40e9-455e-8b5a-a23b4d355e46_luxor-router-1.1.1.jar" + } + ], + "groups": [ + { + "group_name": "groupTest", + "status": "READY", + "resources": [ + "part-00000-9dfc17b1-2feb-45c5-b81d-bff533d6ed13.csv.gz", + "person.csv" + ], + "details": [ + { + "create_time": 1547090015132, + "update_time": 1547090015132, + "resource_type": "jar", + "resource_name": "part-00000-9dfc17b1-2feb-45c5-b81d-bff533d6ed13.csv.gz", + "status": "READY", + "underlying_name": "db50c4dc-7187-4eb9-a5d0-73ba8102ea5e_part-00000-9dfc17b1-2feb-45c5-b81d-bff533d6ed13.csv.gz" + }, + { + "create_time": 1547091098668, + "update_time": 1547091098668, + "resource_type": "file", + "resource_name": "person.csv", + "status": "READY", + "underlying_name": "a4243a8c-bca6-4e77-a968-1f3b00217474_person.csv" + } + ], + "create_time": 1547090015131, + "update_time": 1547091098666 + } + ], + "modules": [ + { + "module_name": "gatk", + "status": "READY", + "resources": [ + "gatk.jar", + "tika-core-1.18.jar", + "s3fs-2.2.2.jar" + ], + "create_time": 1521532893736, + "update_time": 1521552364503 + } + ] +}+
Table 6 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to upload a group of JAR packages to a project.
+When a resource group with the same name is uploaded, the new group overwrites the old group.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
paths + |
+Yes + |
+Array of Strings + |
+List of OBS object paths. The OBS object path refers to the OBS object URL. + |
+
group + |
+Yes + |
+String + |
+Name of a package group. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
group_name + |
+No + |
+String + |
+Group name. + |
+
status + |
+No + |
+String + |
+Status of a package group to be uploaded. + |
+
resources + |
+No + |
+Array of strings + |
+List of names of resource packages contained in the group. + |
+
details + |
+No + |
+Array of body + |
+Details about a group resource package. For details, see Table 4. + |
+
create_time + |
+No + |
+Long + |
+UNIX timestamp when a package group is uploaded. + |
+
update_time + |
+No + |
+Long + |
+UNIX timestamp when a package group is updated. + |
+
is_async + |
+No + |
+Boolean + |
+Whether to upload resource packages in asynchronous mode. The default value is false, indicating that the asynchronous mode is not used. You are advised to upload resource packages in asynchronous mode. + |
+
owner + |
+No + |
+String + |
+Owner of a resource package. + |
+
description + |
+No + |
+String + |
+Description of a resource module. + |
+
module_name + |
+No + |
+String + |
+Name of a resource module. + |
+
module_type + |
+No + |
+String + |
+Type of a resource module. +
|
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
create_time + |
+No + |
+Long + |
+UNIX time when a resource package is uploaded. The timestamp is in milliseconds. + |
+
update_time + |
+No + |
+Long + |
+UNIX time when the uploaded resource package is uploaded. The timestamp is in milliseconds. + |
+
resource_type + |
+No + |
+String + |
+Resource type. Set this parameter to jar. + |
+
resource_name + |
+No + |
+String + |
+Resource name. + |
+
status + |
+No + |
+String + |
+
|
+
underlying_name + |
+No + |
+String + |
+Name of the resource packages in a queue. + |
+
is_async + |
+No + |
+Boolean + |
+Indicates whether to upload a resource package asynchronously. + |
+
{ + "paths": [ + "https://test.obs.xxx.com/test_dli.jar" + ], + "group": "gatk" +}+
{ + "group_name": "gatk", + "status": "READY", + "resources": [ + "test_dli.jar" + ], + "details":[ + { + "create_time":1608804435312, + "update_time":1608804435312, + "resource_type":"jar", + "resource_name":"test_dli.jar", + "status":"READY", + "underlying_name":"test_dli.jar" + } + ], + "create_time": 1521532893736, + "update_time": 1521552364503, + "is_async":false +}+
Table 5 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to upload a group of PyFile packages to a project.
+When a group with the same name as the PyFile package is uploaded, the new group overwrites the old group.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
paths + |
+Yes + |
+Array of strings + |
+List of OBS object paths. The OBS object path refers to the OBS object URL. + |
+
group + |
+Yes + |
+String + |
+Name of a package group. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
group_name + |
+No + |
+String + |
+Group name. + |
+
status + |
+No + |
+String + |
+Status of a package group to be uploaded. + |
+
resources + |
+No + |
+Array of strings + |
+List of names of resource packages contained in the group. + |
+
details + |
+No + |
+Array of body + |
+Details about a group resource package. For details, see Table 4. + |
+
create_time + |
+No + |
+Long + |
+UNIX timestamp when a package group is uploaded. + |
+
update_time + |
+No + |
+Long + |
+UNIX timestamp when a package group is updated. + |
+
is_async + |
+No + |
+Boolean + |
+Whether to upload resource packages in asynchronous mode. The default value is false, indicating that the asynchronous mode is not used. You are advised to upload resource packages in asynchronous mode. + |
+
owner + |
+No + |
+String + |
+Owner of a resource package. + |
+
description + |
+No + |
+String + |
+Description of a resource module. + |
+
module_name + |
+No + |
+String + |
+Name of a resource module. + |
+
module_type + |
+No + |
+String + |
+Type of a resource module. +
|
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
create_time + |
+No + |
+Long + |
+UNIX time when a resource package is uploaded. The timestamp is in milliseconds. + |
+
update_time + |
+No + |
+Long + |
+UNIX time when the uploaded resource package is uploaded. The timestamp is in milliseconds. + |
+
resource_type + |
+No + |
+String + |
+Resource type. Set this parameter to pyFile. + |
+
resource_name + |
+No + |
+String + |
+Resource name. + |
+
status + |
+No + |
+String + |
+
|
+
underlying_name + |
+No + |
+String + |
+Name of the resource packages in a queue. + |
+
is_async + |
+No + |
+Boolean + |
+Indicates whether to upload a resource package asynchronously. + |
+
{ + "paths": [ + "https://test.obs.xxx.com/dli_tf.py" + ], + "group": " gatk" +}+
{ + "group_name": "gatk", + "status": "READY", + "resources": [ + "dli_tf.py" + ], + "details":[ + { + "create_time":1608804435312, + "update_time":1608804435312, + "resource_type":"pyFile", + "resource_name":"dli_tf.py", + "status":"READY", + "underlying_name":"dli_tf.py" + } + ], + "create_time": 1521532893736, + "update_time": 1521552364503, + "is_async":false +}+
Table 5 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to upload a group of File packages to a project.
+When the File package group with the same name is uploaded, the new group overwrites the old group.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
paths + |
+Yes + |
+Array of Strings + |
+List of OBS object paths. The OBS object path refers to the OBS object URL. + |
+
group + |
+Yes + |
+String + |
+Name of a package group. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
group_name + |
+No + |
+String + |
+Group name. + |
+
status + |
+No + |
+String + |
+Status of a package group to be uploaded. + |
+
resources + |
+No + |
+Array of strings + |
+List of names of resource packages contained in the group. + |
+
details + |
+No + |
+Array of body + |
+Details about a group resource package. For details, see Table 4. + |
+
create_time + |
+No + |
+Long + |
+UNIX timestamp when a package group is uploaded. + |
+
update_time + |
+No + |
+Long + |
+UNIX timestamp when a package group is updated. + |
+
is_async + |
+No + |
+Boolean + |
+Whether to upload resource packages in asynchronous mode. The default value is false, indicating that the asynchronous mode is not used. You are advised to upload resource packages in asynchronous mode. + |
+
owner + |
+No + |
+String + |
+Owner of a resource package. + |
+
description + |
+No + |
+String + |
+Description of a resource module. + |
+
module_name + |
+No + |
+String + |
+Name of a resource module. + |
+
module_type + |
+No + |
+String + |
+Type of a resource module. +
|
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
create_time + |
+No + |
+Long + |
+UNIX time when a resource package is uploaded. The timestamp is expressed in milliseconds. + |
+
update_time + |
+No + |
+Long + |
+UNIX time when the uploaded resource package is uploaded. The timestamp is expressed in milliseconds. + |
+
resource_type + |
+No + |
+String + |
+Resource type. Set this parameter to file. + |
+
resource_name + |
+No + |
+String + |
+Resource name. + |
+
status + |
+No + |
+String + |
+
|
+
underlying_name + |
+No + |
+String + |
+Name of the resource packages in a queue. + |
+
is_async + |
+No + |
+Boolean + |
+Indicates whether to upload a resource package asynchronously. + |
+
{ + "paths": [ + "https: //test.obs.xxx.com/test_dli.jar", + "https://test.obs.xxx.com/dli_tf.py" + ], + "group": "gatk" +}+
{ + "group_name": "gatk", + "status": "READY", + "resources": [ + "test_dli.jar", + "dli_tf.py" + ], + "details":[ + { + "create_time":1608804435312, + "update_time":1608804435312, + "resource_type":"file", + "resource_name":"test_dli.jar", + "status":"READY", + "underlying_name":"test_dli.jar" + }, + { + "create_time":1608804435312, + "update_time":1608804435312, + "resource_type":"file", + "resource_name":"dli_tf.py", + "status":"READY", + "underlying_name":"dli_tf.py" + } + ], + "create_time": 1521532893736, + "update_time": 1521552364503, + "is_async":false +}+
Table 5 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to query resource information of a package group in a Project.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
resource_name + |
+Yes + |
+String + |
+Name of the resource package that is uploaded. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
group + |
+No + |
+String + |
+Name of the package group returned when the resource package is uploaded. + |
+
The following is an example of the URL containing the query parameter:
+GET /v2.0/{project_id}/resources/{resource_name}?group={group}
+None
+Parameter + |
+Type + |
+Description + |
+
---|---|---|
create_time + |
+Long + |
+UNIX time when a resource package is uploaded. The timestamp is expressed in milliseconds. + |
+
update_time + |
+Long + |
+UNIX time when the uploaded resource package is uploaded. The timestamp is expressed in milliseconds. + |
+
resource_type + |
+String + |
+Resource type. + |
+
resource_name + |
+String + |
+Resource name. + |
+
status + |
+String + |
+
|
+
underlying_name + |
+String + |
+Name of the resource packages in a queue. + |
+
owner + |
+String + |
+Owner of a resource package. + |
+
None
+{ + "create_time": 1522055409139, + "update_time": 1522228350501, + "resource_type": "jar", + "resource_name": "luxor-ommanager-dist.tar.gz", + "status": "uploading", + "underlying_name": "7885d26e-c532-40f3-a755-c82c442f19b8_luxor-ommanager-dist.tar.gz" +}+
Table 4 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to delete resource packages in a group in a Project.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
resource_name + |
+Yes + |
+String + |
+Name of the resource package that is uploaded. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
group + |
+No + |
+String + |
+Name of the package group returned when the resource package is uploaded. + |
+
The following is an example of the URL containing the query parameter:
+DELETE /v2.0/{project_id}/resources/{resource_name}?group={group}
+None
+None
+None
+Table 3 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This section describes the APIs provided by DLI.
+ +Type + |
+Subtype + |
+Description + |
+
---|---|---|
Permission-related APIs + |
+- + |
+You can assign permissions to queues, view queue users, assign data permissions, view database users, view table users, view table user permissions, and view the permissions of authorized users. + |
+
Agency-related APIs + |
+- + |
+Obtain the DLI agency information and create a DLI agency. + |
+
Queue-related APIs (Recommended) + |
+- + |
+You can create queues, delete queues, query all queues, modify the CIDR block of a queue, restart, scale out, or scale in a queue, query queue details, create a request for testing the connectivity of a specified address, query the connectivity of a specified address, create a scheduled queue scaling plan, query a scheduled queue scaling plan, delete scheduled queue scaling plans in batches, and delete or modify a scheduled queue scaling plan. + |
+
APIs Related to SQL Jobs + + |
+Database-related APIs + |
+You can create a database, delete a database, view all databases, and modify database users. + |
+
Table-related APIs + |
+You can create, delete, and query tables, describe table information, preview table content, modify table users, and obtain the partition information list. + |
+|
Job-related APIs + |
+You can import and export data, submitting SQL jobs, canceling jobs, querying all jobs, previewing job results, querying job status, querying job details, checking SQL syntax, and exporting query results. + |
+|
Package Group-related APIs + |
+- + |
+You can upload a group resource, query the group resource list, upload a group resource in JAR format, upload a PyFile group resource, upload a File type group resource, query a resource package in a group, delete a resource package in a group, and change the owner of a group or resource package. + |
+
APIs Related to Flink Jobs + |
+- + |
+You can authorize DLI to OBS, create and update SQL jobs and user-defined Flink jobs, run jobs in batches, query the job list, job details, job execution plans, and job monitoring information. You can also stop jobs in batches, delete and batch delete jobs, export and import Flink jobs, create IEF message channels, report Flink job status and callback Flink job actions at the edge, and report IEF system events. + |
+
APIs related to Spark jobs + |
+Batch Processing-related APIs + |
+Creating batch jobs, cancel batch jobs, querying batch job lists, querying batch job details, querying batch job status, and querying batch job logs. + |
+
APIs Related to Flink Job Templates + |
+- + |
+You can create, update, and delete a template, and query the template list. + |
+
APIs Related to Enhanced Datasource Connections + |
+- + |
+You can create and delete enhanced datasource connections, query the enhanced datasource connection list as well as the connections, bind and unbind queues, modify host information, and query enhanced datasource connection permissions. + |
+
APIs Related to Global Variables + |
+- + |
+You can creat, delete, modify, and query global variables. + |
+
{ + "projects": [ + { + "domain_id": "65382450e8f64ac0870cd180d14e684b", + "is_domain": false, + "parent_id": "65382450e8f64ac0870cd180d14e684b", + "name": "project_name", + "description": "", + "links": { + "next": null, + "previous": null, + "self": "https://www.example.com/v3/projects/a4a5d4098fb4474fa22cd05f897d6b99" + }, + "id": "a4a5d4098fb4474fa22cd05f897d6b99", + "enabled": true + } + ], + "links": { + "next": null, + "previous": null, + "self": "https://www.example.com/v3/projects" + } +}+
This API is used to create an enhanced datasource connection with other services.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
name + |
+Yes + |
+String + |
+Name of the connection. +
|
+
dest_vpc_id + |
+Yes + |
+String + |
+The ID of the service VPC to be connected. + |
+
dest_network_id + |
+Yes + |
+String + |
+The subnet ID of the to-be-connected service. + |
+
queues + |
+No + |
+Array of Strings + |
+List of queue names that are available for datasource connections. + |
+
routetable_id + |
+No + |
+String + |
+Route table associated with the subnet of the service. + |
+
hosts + |
+No + |
+Array of Objects + |
+The user-defined host information. A maximum of 20,000 records are supported. For details, see hosts request parameters. + |
+
tags + |
+No + |
+Array of Objects + |
+Tags of datasource connections. For details, see Table 4. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
name + |
+No + |
+String + |
+The user-defined host name. The value can consist of 128 characters, including digits, letters, underscores (_), hyphens (-), and periods (.). It must start with a letter. + |
+
ip + |
+No + |
+String + |
+The IPv4 address of the host. + |
+
Parameter + |
+Type + |
+Description + |
+
---|---|---|
is_success + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+String + |
+System prompt. If execution succeeds, the message may be left blank. + |
+
connection_id + |
+String + |
+Connection ID. Identifies the UUID of a datasource connection. + |
+
{ + "name": "test", + "dest_vpc_id": "22094d8f-c310-4621-913d-4c4d655d8495", + "dest_network_id": "78f2562a-36e4-4b39-95b9-f5aab22e1281", + "queues": [ + "q1", + "q2" + ], + "hosts": [ + { + "ip":"192.168.0.1", + "name":"ecs-97f8-0001" + }, + { + "ip":"192.168.0.2", + "name":"ecs-97f8-0002" + } + ] +}+
{ + "is_success": true, +"message": "Create peer connection for queues:{queue list in the request parameter}", + "connection_id": "2a620c33-5609-40c9-affd-2b6453071b0f" +}+
Table 6 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to delete an enhanced datasource connection.
+The connection that is being created cannot be deleted.
+DELETE /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
connection_id + |
+Yes + |
+String + |
+Connection ID. Identifies the UUID of a datasource connection. Set the value to the connection ID returned by Creating an Enhanced Datasource Connection. + |
+
None
+Parameter + |
+Type + |
+Description + |
+
---|---|---|
is_success + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+String + |
+System message. Value Deleted indicates that the operation is successful. + |
+
None
+{ + "is_success": true, + "message": "Deleted" +}+
Table 3 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to query the created enhanced datasource connections.
+GET /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
connection_id + |
+Yes + |
+String + |
+Connection ID. Identifies the UUID of a datasource connection. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
id + |
+No + |
+String + |
+Connection ID. Identifies the UUID of a datasource connection. + |
+
name + |
+No + |
+String + |
+User-defined connection name. + |
+
status + |
+No + |
+String + |
+Connection status. The options are as follows: +
|
+
available_queue_info + |
+No + |
+Array of Objects + |
+For details about how to create a datasource connection for each queue, see Table 3. + |
+
dest_vpc_id + |
+No + |
+String + |
+The VPC ID of the connected service. + |
+
dest_network_id + |
+No + |
+String + |
+Subnet ID of the connected service. + |
+
create_time + |
+No + |
+Long + |
+Time when a link is created. The time is converted to a UTC timestamp. + |
+
hosts + |
+No + |
+Array of Objects + |
+User-defined host information. For details, see hosts parameter description. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
peer_id + |
+No + |
+String + |
+ID of a datasource connection. + |
+
status + |
+No + |
+String + |
+Connection status. For details about the status code, see Table 5. + |
+
name + |
+No + |
+String + |
+Name of a queue. + |
+
err_msg + |
+No + |
+String + |
+Detailed error message when the status is FAILED. + |
+
update_time + |
+No + |
+Long + |
+Time when the available queue list was updated. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
name + |
+No + |
+String + |
+The user-defined host name. + |
+
ip + |
+No + |
+String + |
+The IPv4 address of the host. + |
+
Parameter + |
+Definition + |
+Description + |
+
---|---|---|
CREATING + |
+Creating + |
+The datasource connection is being created. + |
+
ACTIVE + |
+Active + |
+The datasource connection has been created, and the connection to the destination address is normal. + |
+
FAILED + |
+Failed + |
+Failed to create a datasource connection. + |
+
None
+{ + "is_success": true, + "message": "", + "name": "withvpc", + "id": "4c693ecc-bab8-4113-a838-129cedc9a563", + "available_queue_info": [ + { + "status": "ACTIVE", + "name": "resource_mode_1", + "peer_id": "d2ae6628-fa37-4e04-806d-c59c497492d1", + "err_msg": "", + "update_time": 1566889577861 + } + ], + "dest_vpc_id": "22094d8f-c310-4621-913d-4c4d655d8495", + "dest_network_id": "78f2562a-36e4-4b39-95b9-f5aab22e1281", + "create_time": 1566888011125, + "status": "ACTIVE", + "hosts": [ + { + "ip":"192.168.0.1", + "name":"ecs-97f8-0001" + }, + { + "ip":"192.168.0.2", + "name":"ecs-97f8-0002" + } + ] +}+
Table 6 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to query the list of created enhanced datasource connections.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
limit + |
+No + |
+String + |
+The maximum number of connections to be queried. The default value is 100. If limit is set to 0, all datasource connections are returned. + |
+
offset + |
+No + |
+String + |
+The offset of the query result. The default value is 0. Note that the connections are sorted by creation time. + |
+
status + |
+No + |
+String + |
+Connection status. The options are as follows: +
NOTE:
+The connection status is case insensitive. + |
+
name + |
+No + |
+String + |
+Connection name + |
+
tags + |
+No + |
+String + |
+List of tag names. The value is k=v for a single tag. Multiple tags are separated by commas (,). Example: tag1=v1,tag2=v2. + |
+
The following is an example of the URL containing the query parameter:
+GET /v2.0/{project_id}/datasource/enhanced-connections?limit={limit}&offset={offset}&status={status}&name={name}
+None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
connections + |
+No + |
+Array of Objects + |
+Datasource connection information list. For details, see Table 4. + |
+
count + |
+No + |
+Integer + |
+Number of returned datasource connections. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
id + |
+No + |
+String + |
+Connection ID. Identifies the UUID of a datasource connection. + |
+
name + |
+No + |
+String + |
+User-defined connection name. + |
+
status + |
+No + |
+String + |
+Connection status. The options are as follows: +
|
+
available_queue_info + |
+No + |
+Array of Objects + |
+For details about how to create a datasource connection for each queue, see Table 5. + |
+
dest_vpc_id + |
+No + |
+String + |
+The VPC ID of the connected service. + |
+
dest_network_id + |
+No + |
+String + |
+Subnet ID of the connected service. + |
+
isPrivis + |
+No + |
+Boolean + |
+Whether the project permissions have been granted for the enhanced datasource connection. If the datasource connection has the permissions, the value of this field is false. Otherwise, the value is true. + |
+
create_time + |
+No + |
+Long + |
+Time when a link is created. The time is converted to a UTC timestamp. + |
+
hosts + |
+No + |
+Array of Objects + |
+User-defined host information. For details, see Table 6. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
peer_id + |
+No + |
+String + |
+ID of a datasource connection. + |
+
status + |
+No + |
+String + |
+Connection status. For details about the status code, see Table 7. + |
+
name + |
+No + |
+String + |
+Name of a queue. + |
+
err_msg + |
+No + |
+String + |
+Detailed error message when the status is FAILED. + |
+
update_time + |
+No + |
+Long + |
+Time when the available queue list was updated. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
name + |
+No + |
+String + |
+Custom host name + |
+
ip + |
+No + |
+String + |
+IPv4 address of the host + |
+
Parameter + |
+Definition + |
+Description + |
+
---|---|---|
CREATING + |
+Creating + |
+The datasource connection is being created. + |
+
ACTIVE + |
+Active + |
+The datasource connection has been created, and the connection to the destination address is normal. + |
+
FAILED + |
+Failed + |
+Failed to create a datasource connection. + |
+
None
+{ + "is_success": true, + "message": "", + "count": 1, + "connections": [ + { + "name": "withvpc", + "id": "4c693ecc-bab8-4113-a838-129cedc9a563", + "available_queue_info": [ + { + "status": "ACTIVE", + "name": "resource_mode_1", + "peer_id": "d2ae6628-fa37-4e04-806d-c59c497492d1", + "err_msg": "", + "update_time": 1566889577861 + } + ], + "dest_vpc_id": "22094d8f-c310-4621-913d-4c4d655d8495", + "dest_network_id": "78f2562a-36e4-4b39-95b9-f5aab22e1281", + "isPrivis": true, + "create_time": 1566888011125, + "status": "ACTIVE" + } + ] +}+
Table 8 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to bind a queue to a created enhanced datasource connection.
+POST /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}/associate-queue
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
connection_id + |
+Yes + |
+String + |
+Connection ID. Identifies the UUID of a datasource connection. Set the value to the connection ID returned by Creating an Enhanced Datasource Connection. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
queues + |
+Yes + |
+Array of Strings + |
+List of queue names that are available for datasource connections. + |
+
Parameter + |
+Type + |
+Description + |
+
---|---|---|
is_success + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+String + |
+System prompt. If execution succeeds, the message may be left blank. + |
+
{ + "queues": [ + "q1", + "q2" + ] +}+
{ + "is_success": true, + "message": "associated peer connection for queues: {q1,q2}." +}+
Table 4 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to unbind a queue from an enhanced datasource connection.
+POST /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}/disassociate-queue
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
connection_id + |
+Yes + |
+String + |
+Connection ID. Identifies the UUID of a datasource connection. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
queues + |
+Yes + |
+Array of String + |
+List of queue names that are available for datasource connections. + |
+
Parameter + |
+Type + |
+Description + |
+
---|---|---|
is_success + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+String + |
+System prompt. If execution succeeds, the message may be left blank. + |
+
{ + "queues": [ + "q1", + "q2" + ] +}+
{ + "is_success": true, + "message": "Disassociated peer connection for queues:{q1,q2}." +}+
Table 4 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to create a queue. The queue will be bound to specified compute resources.
+It takes 5 to 15 minutes to start a job using a new queue for the first time.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
queue_name + |
+Yes + |
+String + |
+Name of a newly created resource queue. The name can contain only digits, letters, and underscores (_), but cannot contain only digits or start with an underscore (_). Length range: 1 to 128 characters. + NOTE:
+The queue name is case-insensitive. The uppercase letters will be automatically converted to lowercase letters. + |
+
queue_type + |
+No + |
+String + |
+Queue type. The options are as follows: +
NOTE:
+If the type is not specified, the default value sql is used. + |
+
description + |
+No + |
+String + |
+Description of a queue. + |
+
cu_count + |
+Yes + |
+Integer + |
+Minimum number of CUs that are bound to a queue. Currently, the value can only be 16, 64, or 256. + |
+
enterprise_project_id + |
+No + |
+String + |
+Enterprise project ID. The value 0 indicates the default enterprise project. + NOTE:
+Users who have enabled Enterprise Management can set this parameter to bind a specified project. + |
+
platform + |
+No + |
+String + |
+CPU architecture of compute resources. +
|
+
resource_mode + |
+No + |
+Integer + |
+Queue resource mode. The options are as follows: +0: indicates the shared resource mode. +1: indicates the exclusive resource mode. + |
+
labels + |
+No + |
+Array of Strings + |
+Tag information of the queue to be created. Currently, the tag information includes whether the queue is cross-AZ (JSON character string). The value can only be 2, that is, a dual-AZ queue whose compute resources are distributed in two AZs is created. + |
+
tags + |
+No + |
+Array of Objects + |
+Queue tags for identifying cloud resources. A tag consists of a key and tag value. For details, see Table 3. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
queue_name + |
+No + |
+String + |
+Name of the created queue. + NOTE:
+The queue name is case-insensitive. The uppercase letters will be automatically converted to lowercase letters. + |
+
{ + "queue_name": "queue1", + "description": "test", + "cu_count": 16, + "resource_mode": 1, + "queue_type": "general", + "labels": ["multi_az=2"] +}+
{ + "is_success": true, + "message": "", + "queue_name": "queue1" +}+
Table 5 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to delete a specified queue.
+If a task is being executed in a specified queue, the queue cannot be deleted.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
queue_name + |
+Yes + |
+String + |
+Name of a queue to be deleted. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
None
+{ + "is_success": true, + "message": "" +}+
Table 3 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to list all queues under the project.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
queue_type + |
+No + |
+String + |
+Type of the queue. The options are as follows: +
If this parameter is not specified, the default value sql is used. + |
+
with-priv + |
+No + |
+Boolean + |
+Whether to return permission information. + |
+
page-size + |
+No + |
+Integer + |
+Maximum number of lines displayed on each page. The default value is Integer.MAX_VALUE, indicating that all results are displayed on one page. + |
+
current-page + |
+No + |
+Integer + |
+Current page number. The default value is 1. + |
+
order + |
+No + |
+String + |
+Filed based on which queues are ordered +The default value is queue_name_asc (alphabetically ascending order on queue names). Other options are queue_name_desc (alphabetically descending order on queue names), cu_asc (ascending order on CUs), and cu_desc (descending order on CUs). + |
+
tags + |
+No + |
+String + |
+Query results are filtered by tag. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
queues + |
+No + |
+Array of Object + |
+Queue information For details, see Table 4. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
queue_name + |
+No + |
+String + |
+Name of a queue. + |
+
description + |
+No + |
+String + |
+Queue description. + |
+
owner + |
+No + |
+String + |
+User who creates a queue. + |
+
create_time + |
+No + |
+Long + |
+Time when the queue is created. The timestamp is expressed in milliseconds. + |
+
queue_type + |
+No + |
+String + |
+Queue type. +
If this parameter is not specified, the default value sql is used. + |
+
cu_count + |
+No + |
+Integer + |
+Number of compute units (CUs) bound to a queue, that is, the number of CUs in the current queue. + |
+
resource_id + |
+No + |
+String + |
+Resource ID of a queue. + |
+
enterprise_project_id + |
+No + |
+String + |
+Enterprise project ID. 0 indicates the default enterprise project. + NOTE:
+Users who have enabled Enterprise Management can set this parameter to bind a specified project. + |
+
cidr_in_vpc + |
+No + |
+String + |
+The VPC CIDR block of the queue. + |
+
cidr_in_mgntsubnet + |
+No + |
+String + |
+CIDR block of the management subnet + |
+
cidr_in_subnet + |
+No + |
+String + |
+Subnet CIDR block + |
+
resource_mode + |
+No + |
+Integer + |
+Resource mode +
|
+
platform + |
+No + |
+String + |
+CPU architecture of queue compute resources. +
|
+
is_restarting + |
+No + |
+Boolean + |
+Whether to restart the queue. The default value is false. + |
+
labels + |
+No + |
+String + |
+Tag information of the queue to be created, including the JSON string indicating whether the queue is Dual-AZ. Currently, only the value 2 is supported, indicating that two queues are created. + |
+
cu_spec + |
+No + |
+Integer + |
+Specifications of a queue. For a queue whose billing mode is yearly/monthly, this parameter indicates the CU value of the yearly/monthly part. For a pay-per-use queue, this parameter indicates the initial value when a user purchases a queue. + |
+
cu_scale_out_limit + |
+No + |
+Integer + |
+Upper limit of the CU value for elastic scaling of the current queue. + |
+
cu_scale_in_limit + |
+No + |
+Integer + |
+Lower limit of the CU value for elastic scaling of the current queue. + |
+
None
+{ + "is_success": "true", + "message": "", + "queues": [ + { + "queue_name": "test", + "owner": "testuser", + "description": "", + "create_time": 1562221422671, + "queue_type": "spark", + "cu_count": 16, + "resource_id": "26afb850-d3c9-42c1-81c0-583d1163e80f", + "cidr_in_vpc": "10.0.0.0/8", + "cidr_in_subnet": "10.0.0.0/24", + "cidr_in_mgntsubnet": "10.23.128.0/24", + "resource_mode": 1, + "platform": "x86_64", + "is_restarting": "false", + "labels": "multi_az=2", + "resource_type": "vm", + "cu_spec": 16 + } + ] + }+
Table 5 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to modify the host information of a connected datasource. Only full overwriting is supported.
+PUT /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
connection_id + |
+Yes + |
+String + |
+Connection ID. Identifies the UUID of a datasource connection. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
hosts + |
+Yes + |
+Array of objects + |
+The user-defined host information. A maximum of 20,000 records are supported. For details, see hosts request parameters. If this parameter is left blank, all configured host information will be deleted. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
name + |
+No + |
+String + |
+The user-defined host name. The value can consist of 128 characters, including digits, letters, underscores (_), hyphens (-), and periods (.). It must start with a letter. + |
+
ip + |
+No + |
+String + |
+The IPv4 address of the host. + |
+
Parameter + |
+Type + |
+Description + |
+
---|---|---|
is_success + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+String + |
+System prompt. If execution succeeds, the message may be left blank. + |
+
{ + "hosts": [ + { + "ip":"192.168.0.1", + "name":"ecs-97f8-0001" + }, + { + "ip":"192.168.0.2", + "name":"ecs-97f8-0002" + } + ] +}+
{ + "is_success": true, + "message": "" +}+
Table 5 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This section describes fine-grained permissions management for your DLI. If your account does not need individual IAM users, then you may skip this section.
+By default, new IAM users do not have permissions assigned. You need to add them to one or more groups, and attach permissions policies or roles to these groups. Users inherit permissions from the groups to which they are added After authorization, the user can perform specified operations on MRS based on the permissions.
+ +Policy-based authorization is useful if you want to allow or deny the access to an API.
+An account has all of the permissions required to call all APIs, but IAM users must have the required permissions specifically assigned. The permissions required for calling an API are determined by the actions supported by the API. Only users who have been granted permissions allowing the actions can call the API successfully. For example, if an IAM user needs to create buckets using an API, the user must have been granted permissions that allow the dli:queue:create_queue action.
+VBS provides system-defined policies that can be directly used in IAM. You can also create custom policies and use them to supplement system-defined policies, implementing more refined access control. Operations supported by policies are specific to APIs. The following are common concepts related to policies:
+The check mark (√) indicates that an action takes effect. The cross mark (x) indicates that an action does not take effect.
+DLI supports the following actions that can be defined in custom policies:
+ +Permission + |
+API + |
+Actions + |
+Dependent Permission + |
+IAM Project +(Project) + |
+Enterprise Project +(Enterprise Project) + |
+
---|---|---|---|---|---|
Creating a Queue + |
+POST /v1.0/{project_id}/queues + |
+dli:queue:create_queue + |
+- + |
+√ + |
+√ + |
+
Deleting a Queue + |
+DELETE /v1.0/{project_id}/queues/{queue_name} + |
+dli:queue:drop_queue + |
+- + |
+√ + |
+√ + |
+
Submitting a Job + |
+POST /v1.0/{project_id}/jobs/submit-job + |
+dli:queue:submit_job + |
+- + |
+√ + |
+× + |
+
Canceling a job + |
+DELETE /v1.0/{project_id}/jobs/{job_id} + |
+dli:queue:cancel_job + |
+- + |
+√ + |
+× + |
+
Viewing Queue Permissions of Other Users + |
+GET /v1.0/{project_id}/queues/{queue_name}/users + |
+dli:queue:show_privileges + |
+- + |
+√ + |
+× + |
+
Restarting a queue + |
+PUT /v1.0/{project_id}/queues/{queue_name}/action + |
+dli:queue:restart + |
+- + |
+√ + |
+× + |
+
Scaling out/in a queue + |
+PUT /v1.0/{project_id}/queues/{queue_name}/action + |
+dli:queue:scale_queue + |
+- + |
+√ + |
+× + |
+
Granting permissions to a specified user queue + |
+PUT /v1.0/{project_id}/user-authorization + |
+dli:queue:grant_privilege + |
+- + |
+√ + |
+× + |
+
Removing permissions of a specified user queue + |
+POST /v1.0/{project_id}/databases + |
+dli:database:create_database + |
+- + |
+√ + |
+× + |
+
Creating a Database + |
+POST /v1.0/{project_id}/databases + |
+dli:database:create_database + |
+- + |
+√ + |
+× + |
+
Deleting a Database + |
+DELETE /v1.0/{project_id}/databases/{database_name} + |
+dli:database:drop_database + |
+- + |
+√ + |
+× + |
+
Modifying database configuration + |
+POST /v1.0/{project_id}/jobs/submit-job + |
+dli:database:alter_database_properties + |
+- + |
+√ + |
+× + |
+
Explaining the SQL Statement as an Execution Plan + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:database:explain + |
+- + |
+√ + |
+× + |
+
Creating a Table + |
+POST /v1.0/{project_id}/databases/{database_name}/tables + |
+dli:database:create_table + |
+- + |
+√ + |
+× + |
+
Creating a View + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+database:create_view + |
+- + |
+√ + |
+× + |
+
Creating a Function + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:database:create_function + |
+- + |
+√ + |
+× + |
+
Describing a Function + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:database:describe_function + |
+- + |
+√ + |
+× + |
+
Deleting a Function + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:database:drop_function + |
+- + |
+√ + |
+× + |
+
Displaying a Function + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:database:show_functions + |
+- + |
+√ + |
+× + |
+
Creating a role + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:database:create_role + |
+- + |
+√ + |
+× + |
+
Deleting a role + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:database:drop_role + |
+- + |
+√ + |
+× + |
+
Displaying a Role + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:database:show_roles + |
+- + |
+√ + |
+× + |
+
Displaying All Roles + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:database:show_all_roles + |
+- + |
+√ + |
+× + |
+
Binding a Role + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:database:grant_role + |
+- + |
+√ + |
+× + |
+
Unbinding the Role + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:database:revoke_role + |
+- + |
+√ + |
+× + |
+
Displaying the Binding Relationships Between All Roles and Users + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:database:show_users + |
+- + |
+√ + |
+× + |
+
Viewing Database Permissions of Other Users + |
+GET /v1.0/{project_id}/databases/{database_name}/users + |
+dli:database:show_privileges + |
+- + |
+√ + |
+× + |
+
Displaying database + |
+GET /v1.0/{project_id}/databases + |
+dli:database:display_database + |
+- + |
+√ + |
+× + |
+
Displaying all databases + |
+GET /v1.0/{project_id}/databases + |
+dli:database:display_all_databases + |
+- + |
+√ + |
+× + |
+
Displaying all tables + |
+GET /v1.0/{project_id}/databases + |
+dli:database:display_all_tables + |
+- + |
+√ + |
+× + |
+
Granting database permissions to a specified user + |
+PUT /v1.0/{project_id}/user-authorization + |
+dli:database:grant_privilege + |
+- + |
+√ + |
+× + |
+
Removing database permissions of a specified user + |
+PUT /v1.0/{project_id}/user-authorization + |
+dli:database:revoke_privilege + |
+- + |
+√ + |
+× + |
+
Deleting a Table + |
+DELETE /v1.0/{project_id}/databases/{database_name}/tables/{table_name} + |
+dli:table:drop_table + |
+- + |
+√ + |
+× + |
+
Displaying Table Structure + |
+GET /v1.0/{project_id}/databases/{database_name}/tables/{table_name} + |
+dli:table:describe_table + |
+- + |
+√ + |
+× + |
+
Querying a Table + |
+GET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/preview + |
+dli:table:select + |
+- + |
+√ + |
+× + |
+
Displaying table configuration + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:table:show_table_properties + |
+- + |
+√ + |
+× + |
+
Displaying the Table Creation Statement + |
+GET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/preview + |
+dli:table:show_create_table + |
+- + |
+√ + |
+× + |
+
Displaying All Partitions + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:table:show_partitions + |
+- + |
+√ + |
+× + |
+
Setting Table Configuration + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:table:alter_table_set_properties + |
+- + |
+√ + |
+× + |
+
Adding a Column + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:table:alter_table_add_columns + |
+- + |
+√ + |
+× + |
+
Adding Partitions to the Partitioned Table + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:table:alter_table_add_partition + |
+- + |
+√ + |
+× + |
+
Renaming a Table Partition + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:table:alter_table_rename_partition + |
+- + |
+√ + |
+× + |
+
Deleting Partitions from a Partitioned Table + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:table:alter_table_drop_partition + |
+- + |
+√ + |
+× + |
+
Restoring Table Partitions + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:table:alter_table_recover_partition + |
+- + |
+√ + |
+× + |
+
Renaming a Table + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:table:alter_table_rename + |
+- + |
+√ + |
+× + |
+
Setting the Partition Path + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:table:alter_table_set_location + |
+- + |
+√ + |
+× + |
+
Inserting data into a table + |
+POST /v1.0/{project_id}/jobs/submit-job, statement invoking + |
+dli:table:insert_into_table + |
+- + |
+√ + |
+× + |
+
Rewriting table data + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:table:insert_overwrite_table + |
+- + |
+√ + |
+× + |
+
Viewing Table Permissions of Other Users + |
+GET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/users + |
+dli:table:show_privileges + |
+- + |
+√ + |
+× + |
+
Clearing a table + |
+POST /v1.0/{project_id}/jobs/submit-job + |
+dli:table:truncate_table + |
+- + |
+√ + |
+× + |
+
Updating a table + |
+POST /v1.0/{project_id}/jobs/submit-job + |
+dli:table:update + |
+- + |
+√ + |
+× + |
+
Deleting data in a table + |
+POST /v1.0/{project_id}/jobs/submit-job + |
+dli:table:delete + |
+- + |
+√ + |
+× + |
+
Modifying column information + |
+POST /v1.0/{project_id}/jobs/submit-job + |
+dli:table:alter_table_change_column + |
+- + |
+√ + |
+× + |
+
Deleting a column + |
+POST /v1.0/{project_id}/jobs/submit-job + |
+dli:table:alter_table_drop_columns + |
+- + |
+√ + |
+× + |
+
Displaying data segments + |
+POST /v1.0/{project_id}/jobs/submit-job + |
+dli:table:show_segments + |
+- + |
+√ + |
+× + |
+
Merging data segments + |
+POST /v1.0/{project_id}/jobs/submit-job + |
+dli:table:compaction + |
+- + |
+√ + |
+× + |
+
Modifying a View + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:table:alter_view + |
+- + |
+√ + |
+× + |
+
Displaying a table + |
+POST /v1.0/{project_id}/jobs/submit-job + |
+dli:table:display_table + |
+- + |
+√ + |
+× + |
+
Granting data table permissions to a specified user + |
+PUT /v1.0/{project_id}/user-authorization + |
+dli:table:grant_privilege + |
+- + |
+√ + |
+× + |
+
Removing data table permissions of a specified user + |
+PUT /v1.0/{project_id}/user-authorization + |
+dli:table:revoke_privilege + |
+- + |
+√ + |
+× + |
+
Viewing the security authentication information permission list of other users + |
+GET /v1.0/{project_id}/datasource/auth-infos/{auth_name}/users + |
+dli:datasourceauth:show_privileges + |
+- + |
+√ + |
+× + |
+
Using security authentication information + |
+POST /v1.0/{project_id}/jobs/submit-job + |
+dli:datasourceauth:use_auth + |
+- + |
+√ + |
+× + |
+
Deleting security authentication information + |
+DELETE /v2.0/{project_id}/datasource/auth-infos/{auth_info_name} + |
+dli:datasourceauth:drop_auth + |
+- + |
+√ + |
+× + |
+
Granting security authentication permissions to a specified user + |
+PUT /v1.0/{project_id}/user-authorization + |
+dli:datasourceauth:grant_privilege + |
+- + |
+√ + |
+× + |
+
Updating security authentication information + |
+PUT /v2.0/{project_id}/datasource/auth-infos + |
+dli:datasourceauth:update_auth + |
+- + |
+√ + |
+× + |
+
Granting security authentication permissions to a specified user + |
+PUT /v1.0/{project_id}/user-authorization + |
+dli:datasourceauth:grant_privilege + |
+- + |
+√ + |
+× + |
+
Removing security authentication permissions of a specified user + |
+PUT /v1.0/{project_id}/user-authorization + |
+dli:datasourceauth:revoke_privilege + |
+- + |
+√ + |
+× + |
+
Querying job details + |
+GET /v1.0/:x_project_id/streaming/jobs/:job_id + |
+dli:jobs:get + |
+- + |
+√ + |
+× + |
+
Querying a job list + |
+GET /v1.0/:x_project_id/streaming/jobs + |
+dli:jobs:list_all + |
+- + |
+√ + |
+× + |
+
Creating a job + |
+POST /v1.0/:x_project_id/streaming/sql-jobs + |
+dli:jobs:create + |
+- + |
+√ + |
+× + |
+
Updating a job + |
+PUT /v1.0/:x_project_id/streaming/sql-jobs/:job_id + |
+dli:jobs:update + |
+- + |
+√ + |
+× + |
+
Deleting a job + |
+POST /v1.0/:x_project_id/streaming/jobs/delete + |
+dli:jobs:delete + |
+- + |
+√ + |
+× + |
+
Starting a job + |
+POST /v1.0/:x_project_id/streaming/jobs/run + |
+dli:jobs:start + |
+- + |
+√ + |
+× + |
+
Stopping a job + |
+POST /v1.0/:x_project_id/streaming/jobs/stop + |
+dli:jobs:stop + |
+- + |
+√ + |
+× + |
+
Exporting a job + |
+POST /v1.0/:x_project_id/streaming/jobs/export + |
+dli:jobs:export + |
+- + |
+√ + |
+× + |
+
Granting job permissions to a specified user + |
+PUT /v1.0/{{project_id}}/authorization + |
+dli:jobs:grant_privilege + |
+- + |
+√ + |
+× + |
+
Removing job permissions of a specified user + |
+PUT /v1.0/{{project_id}}/authorization + |
+dli:jobs:revokePrivilege + |
+- + |
+√ + |
+× + |
+
Querying a Column + |
+POST /v1.0/{project_id}/jobs/submit-job, SQL statement invoking + |
+dli:column:select + |
+- + |
+√ + |
+× + |
+
Granting permissions to a specified user queue + |
+PUT /v1.0/{project_id}/user-authorization + |
+dli:column:grant_privilege + |
+- + |
+√ + |
+× + |
+
Removing permissions of a specified user queue + |
+PUT /v1.0/{project_id}/user-authorization + |
+dli:column:revoke_privilege + |
+- + |
+√ + |
+× + |
+
Querying the Flink Job List + |
+GET /v1.0/:x_project_id/jobs + |
+dli:jobs:list_job + |
+- + |
+√ + |
+× + |
+
Querying Flink Job Details + |
+GET /v1.0/:x_project_id/job/:job_id + |
+dli:jobs:get_job + |
+- + |
+√ + |
+× + |
+
Creating a Flink Job + |
+POST /v1.0/:x_project_id/sql_job + |
+dli:jobs:create_job + |
+- + |
+√ + |
+× + |
+
Updating a Flink Job + |
+PATCH /v1.0/:x_project_id/sql_job + |
+dli:jobs:update_job + |
+- + |
+√ + |
+× + |
+
Deleting a Flink Job + |
+DELETE /v1.0/:x_project_id/job/:job_id + |
+dli:jobs:delete_job + |
+- + |
+√ + |
+× + |
+
Starting a Flink Job + |
+POST v1.0/:x_project_id/job/:job_id/run + |
+dli:queue:submit_job + |
+- + |
+√ + |
+× + |
+
Stopping a Flink Job + |
+POST /v1.0/:x_project_id/job/:job_id/stop + |
+dli:queue:cancel_job + |
+- + |
+√ + |
+× + |
+
This API is used to grant DLI the permission to access OBS buckets for saving job checkpoints and run logs.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
obs_buckets + |
+Yes + |
+Array of Strings + |
+List of OBS buckets. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+Message content. + |
+
{ + "obs_buckets": [ + "bucket1" + ] +}+
{ + "is_success": "true", + "message": "The following OBS bucket is authorized successfully, bucket1." +}+
Table 4 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to create a Flink streaming SQL job.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
name + |
+Yes + |
+String + |
+Name of the job. The value can contain 1 to 57 characters. + |
+
desc + |
+No + |
+String + |
+Job description. Length range: 0 to 512 characters. + |
+
template_id + |
+No + |
+Integer + |
+Template ID. +If both template_id and sql_body are specified, sql_body is used. If template_id is specified but sql_body is not, fill sql_body with the template_id value. + |
+
queue_name + |
+No + |
+String + |
+Name of a queue. The value can contain 0 to 128 characters. + |
+
sql_body + |
+No + |
+String + |
+Stream SQL statement, which includes at least the following three parts: source, query, and sink. Length range: 1024x1024 characters. + |
+
run_mode + |
+No + |
+String + |
+Job running mode. The options are as follows: +
The default value is shared_cluster. + |
+
cu_number + |
+No + |
+Integer + |
+Number of CUs selected for a job. The default value is 2. +Sum of the number of compute units and job manager CUs of DLI. CU is also the billing unit of DLI. One CU equals one vCPU and 4 GB. The value is the number of CUs required for job running and cannot exceed the number of CUs in the bound queue. For details about how to set the number of CUs of JobManager, see manager_cu_number. + |
+
parallel_number + |
+No + |
+Integer + |
+Number of parallel jobs set by a user. The default value is 1. +Number of Flink SQL jobs that run at the same time. Properly increasing the number of parallel threads improves the overall computing capability of the job. However, the switchover overhead caused by the increase of threads must be considered. This value cannot be greater than four times the compute units (number of CUs minus the number of JobManager CUs). +For details about how to set the number of JobManager CUs, see manager_cu_number. + |
+
checkpoint_enabled + |
+No + |
+Boolean + |
+Whether to enable the automatic job snapshot function. +
|
+
checkpoint_mode + |
+No + |
+Integer + |
+Snapshot mode. There are two options: +
The default value is 1. + |
+
checkpoint_interval + |
+No + |
+Integer + |
+Snapshot interval. The unit is second. The default value is 10. + |
+
obs_bucket + |
+No + |
+String + |
+OBS path where users are authorized to save the snapshot. This parameter is valid only when checkpoint_enabled is set to true. +OBS path where users are authorized to save the snapshot. This parameter is valid only when log_enabled is set to true. + |
+
log_enabled + |
+No + |
+Boolean + |
+Whether to enable the function of uploading job logs to users' OBS buckets. The default value is false. + |
+
smn_topic + |
+No + |
+String + |
+SMN topic. If a job fails, the system will send a message to users subscribed to the SMN topic. + |
+
restart_when_exception + |
+No + |
+Boolean + |
+Whether to enable the function of automatically restarting a job upon job exceptions. The default value is false. + |
+
idle_state_retention + |
+No + |
+Integer + |
+Retention time of the idle state. The unit is hour. The default value is 1. + |
+
job_type + |
+No + |
+String + |
+Job type. This parameter can be set to flink_sql_job. + + |
+
dirty_data_strategy + |
+No + |
+String + |
+Dirty data policy of a job. +
The default value is 0. + |
+
udf_jar_url + |
+No + |
+String + |
+Name of the resource package that has been uploaded to the DLI resource management system. The UDF Jar file of the SQL job is specified by this parameter. + |
+
manager_cu_number + |
+No + |
+Integer + |
+Number of CUs in the JobManager selected for a job. The default value is 1. + |
+
tm_cus + |
+No + |
+Integer + |
+Number of CUs for each TaskManager. The default value is 1. + |
+
tm_slot_num + |
+No + |
+Integer + |
+Number of slots in each TaskManager. The default value is (parallel_number*tm_cus)/(cu_number-manager_cu_number). + |
+
resume_checkpoint + |
+No + |
+Boolean + |
+Whether the abnormal restart is recovered from the checkpoint. + |
+
resume_max_num + |
+No + |
+Integer + |
+Maximum number of retry times upon exceptions. The unit is times/hour. Value range: -1 or greater than 0. The default value is -1, indicating that the number of times is unlimited. + |
+
tags + |
+No + |
+Array of Objects + |
+Label of a Flink SQL job. For details, see Table 3. + |
+
runtime_config + |
+No + |
+String + |
+Customizes optimization parameters when a Flink job is running. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+Message content. + |
+
job + |
+No + |
+Object + |
+Information about the job status. For details, see Table 5. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
job_id + |
+Yes + |
+Long + |
+Job ID. + |
+
status_name + |
+No + |
+String + |
+Name of job status. For details, see the description of the status field in Querying Job Details. + |
+
status_desc + |
+No + |
+String + |
+Status description. Causes and suggestions for the abnormal status. + |
+
{ + "name": "myjob", + "desc": "This is a job used for counting characters.", + "template_id": 100000, + "queue_name": "testQueue", + "sql_body": "select * from source_table", + "run_mode": "exclusive_cluster", + "cu_number": 2, + "parallel_number": 1, + "checkpoint_enabled": false, + "checkpoint_mode": "exactly_once", + "checkpoint_interval": 0, + "obs_bucket": "my_obs_bucket", + "log_enabled": false, + "restart_when_exception": false, + "idle_state_retention": 3600, + "job_type": "flink_sql_job", + "dirty_data_strategy": "0", + "udf_jar_url": "group/test.jar" +}+
{ + "is_success": "true", + "message": "A DLI job is created successfully.", + "job": { + "job_id": 148, + "status_name": "job_init", + "status_desc": "" + } +}+
Table 6 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to modify a Flink SQL job.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
job_id + |
+Yes + |
+Long + |
+Job ID. Refer to Creating a SQL Job to obtain the value. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
name + |
+No + |
+String + |
+Name of a job. Length range: 0 to 57 characters. + |
+
desc + |
+No + |
+String + |
+Job description. Length range: 0 to 512 characters. + |
+
queue_name + |
+No + |
+String + |
+Name of a queue. The value can contain 0 to 128 characters. + |
+
sql_body + |
+No + |
+String + |
+Stream SQL statement, which includes at least the following three parts: source, query, and sink. Length range: 0 to 1024x1024 characters. + |
+
run_mode + |
+No + |
+String + |
+Job running mode. The options are as follows: +
The default value is shared_cluster. + |
+
cu_number + |
+No + |
+Integer + |
+Number of CUs selected for a job. The default value is 2. + |
+
parallel_number + |
+No + |
+Integer + |
+Number of parallel jobs set by a user. The default value is 1. + |
+
checkpoint_enabled + |
+No + |
+Boolean + |
+Whether to enable the automatic job snapshot function. +
|
+
checkpoint_mode + |
+No + |
+Integer + |
+Snapshot mode. There are two options: +
The default value is 1. + |
+
checkpoint_interval + |
+No + |
+Integer + |
+Snapshot interval. The unit is second. The default value is 10. + |
+
obs_bucket + |
+No + |
+String + |
+OBS path where users are authorized to save the snapshot. This parameter is valid only when checkpoint_enabled is set to true. +OBS path where users are authorized to save the snapshot. This parameter is valid only when log_enabled is set to true. + |
+
log_enabled + |
+No + |
+Boolean + |
+Whether to enable the function of uploading job logs to users' OBS buckets. The default value is false. + |
+
smn_topic + |
+No + |
+String + |
+SMN topic. If a job fails, the system will send a message to users subscribed to the SMN topic. + |
+
restart_when_exception + |
+No + |
+Boolean + |
+Whether to enable the function of automatically restarting a job upon job exceptions. The default value is false. + |
+
idle_state_retention + |
+No + |
+Integer + |
+Expiration time, in seconds. The default value is 3600. + |
+
edge_group_ids + |
+No + |
+Array of Strings + |
+List of edge computing group IDs. Use commas (,) to separate multiple IDs. + |
+
dirty_data_strategy + |
+No + |
+String + |
+Dirty data policy of a job. +
The default value is 0. + |
+
udf_jar_url + |
+No + |
+String + |
+Name of the resource package that has been uploaded to the DLI resource management system. The UDF Jar file of the SQL job is specified by this parameter. + |
+
manager_cu_number + |
+No + |
+Integer + |
+Number of CUs in the JobManager selected for a job. The default value is 1. + |
+
tm_cus + |
+No + |
+Integer + |
+Number of CUs for each TaskManager. The default value is 1. + |
+
tm_slot_num + |
+No + |
+Integer + |
+Number of slots in each TaskManager. The default value is (parallel_number*tm_cus)/(cu_number-manager_cu_number). + |
+
operator_config + |
+No + |
+String + |
+Degree of parallelism (DOP) of an operator. + |
+
resume_checkpoint + |
+No + |
+Boolean + |
+Whether the abnormal restart is recovered from the checkpoint. + |
+
resume_max_num + |
+No + |
+Integer + |
+Maximum number of retry times upon exceptions. The unit is times/hour. Value range: -1 or greater than 0. The default value is -1, indicating that the number of times is unlimited. + |
+
static_estimator_config + |
+No + |
+String + |
+Traffic or hit ratio of each operator, which is a character string in JSON format. Example: +{"operator_list":[{"id":"0a448493b4782967b150582570326227","rate_factor":0.55},{"id":"6d2677a0ecc3fd8df0b72ec675edf8f4","rate_factor":1},{"id":"ea632d67b7d595e5b851708ae9ad79d6","rate_factor":0.55},{"id":"bc764cd8ddf7a0cff126f51c16239658","output_rate":2000}]}+ |
+
runtime_config + |
+No + |
+String + |
+Customizes optimization parameters when a Flink job is running. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+Message content. + |
+
job + |
+No + |
+Object + |
+Information about job update. For details, see Table 4. + |
+
{ + "name": "myjob", + "desc": "My first job", + "queue_name": "testQueue", + "sql_body": "select * from source_table", + "run_mode": "shared_cluster", + "cu_number": 4, + "parallel_number": 4, + "checkpoint_enabled": false, + "checkpoint_mode": "exactly_once", + "checkpoint_interval": 10, + "obs_bucket": "", + "log_enabled": false, + "smn_topic": "", + "restart_when_exception": false, + "idle_state_retention": 3600, + "edge_group_ids": [ + "62de1e1c-066e-48a8-a79d-f461a31b2ee1", + "2eb00f85-99f2-4144-bcb7-d39ff47f9002" + ], + "dirty_data_strategy": "0", + "udf_jar_url": "group/test.jar" +}+
{ + "is_success": "true", + "message": "The job is updated successfully.", + "job": { + "update_time": 1578905682534 + } +}+
Table 5 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to create custom jobs, which currently support the JAR format and run in dedicated queues.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
name + |
+Yes + |
+String + |
+Name of the job. The value can contain 1 to 57 characters. + |
+
desc + |
+No + |
+String + |
+Job description. Length range: 0 to 512 characters. + |
+
queue_name + |
+No + |
+String + |
+Name of a queue. The value can contain 0 to 128 characters. + |
+
cu_number + |
+No + |
+Integer + |
+Number of CUs selected for a job. + |
+
manager_cu_number + |
+No + |
+Integer + |
+Number of CUs on the management node selected by the user for a job, which corresponds to the number of Flink job managers. The default value is 1. + |
+
parallel_number + |
+No + |
+Integer + |
+Number of parallel operations selected for a job. + |
+
log_enabled + |
+No + |
+Boolean + |
+Whether to enable the job log function. +
|
+
obs_bucket + |
+No + |
+String + |
+OBS bucket where users are authorized to save logs when log_enabled is set to true. + |
+
smn_topic + |
+No + |
+String + |
+SMN topic. If a job fails, the system will send a message to users subscribed to the SMN topic. + |
+
main_class + |
+No + |
+String + |
+Job entry class. + |
+
entrypoint_args + |
+No + |
+String + |
+Job entry parameter. Multiple parameters are separated by spaces. + |
+
restart_when_exception + |
+No + |
+Boolean + |
+Whether to enable the function of restart upon exceptions. The default value is false. + |
+
entrypoint + |
+No + |
+String + |
+Name of the package that has been uploaded to the DLI resource management system. This parameter is used to customize the JAR file where the job main class is located. + |
+
dependency_jars + |
+No + |
+Array of Strings + |
+Name of the package that has been uploaded to the DLI resource management system. This parameter is used to customize other dependency packages. +Example: myGroup/test.jar,myGroup/test1.jar. + |
+
dependency_files + |
+No + |
+Array of Strings + |
+Name of the resource package that has been uploaded to the DLI resource management system. This parameter is used to customize dependency files. +Example: myGroup/test.cvs,myGroup/test1.csv. +You can add the following content to the application to access the corresponding dependency file: In the command, fileName indicates the name of the file to be accessed, and ClassName indicates the name of the class that needs to access the file. +ClassName.class.getClassLoader().getResource("userData/fileName")+ |
+
tm_cus + |
+No + |
+Integer + |
+Number of CUs for each TaskManager. The default value is 1. + |
+
tm_slot_num + |
+No + |
+Integer + |
+Number of slots in each TaskManager. The default value is (parallel_number*tm_cus)/(cu_number-manager_cu_number). + |
+
resume_checkpoint + |
+No + |
+Boolean + |
+Whether the abnormal restart is recovered from the checkpoint. + |
+
resume_max_num + |
+No + |
+Integer + |
+Maximum number of retry times upon exceptions. The unit is times/hour. Value range: -1 or greater than 0. The default value is -1, indicating that the number of times is unlimited. + |
+
checkpoint_path + |
+No + |
+String + |
+Storage address of the checkpoint in the JAR file of the user. The path must be unique. + |
+
tags + |
+No + |
+Array of Objects + |
+Label of a Flink JAR job. For details, see Table 3. + |
+
runtime_config + |
+No + |
+String + |
+Customizes optimization parameters when a Flink job is running. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+Message content. + |
+
job + |
+No + |
+Object + |
+Information about the job status. For details, see Table 5. + |
+
{ + "name": "test", + "desc": "job for test", + "queue_name": "testQueue", + "manager_cu_number": 1, + "cu_number": 2, + "parallel_number": 1, + "tm_cus": 1, + "tm_slot_num": 1, + "log_enabled": true, + "obs_bucket": "bucketName", + "smn_topic": "topic", + "main_class": "org.apache.flink.examples.streaming.JavaQueueStream", + "restart_when_exception": false, + "entrypoint": "javaQueueStream.jar", + "entrypoint_args":"-windowSize 2000 -rate 3", + "dependency_jars": [ + "myGroup/test.jar", + "myGroup/test1.jar" + ], + "dependency_files": [ + "myGroup/test.csv", + "myGroup/test1.csv" + ] +}+
{ + "is_success": true, + "message": "A Flink job is created successfully.", + "job": { + "job_id": 138, + "status_name": "job_init", + "status_desc": "" + } +}+
Table 6 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to update custom jobs, which currently support the JAR format and run in dedicated queues.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
job_id + |
+Yes + |
+Long + |
+Job ID. Refer to Creating a Flink Jar job to obtain the value. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
name + |
+No + |
+String + |
+Name of the job. Length range: 0 to 57 characters. + |
+
desc + |
+No + |
+String + |
+Job description. Length range: 0 to 512 characters. + |
+
queue_name + |
+No + |
+String + |
+Name of a queue. Length range: 1 to 128 characters. + |
+
cu_number + |
+No + |
+Integer + |
+Number of CUs selected for a job. The default value is 2. + |
+
manager_cu_number + |
+No + |
+Integer + |
+Number of CUs on the management node selected by the user for a job, which corresponds to the number of Flink job managers. The default value is 1. + |
+
parallel_number + |
+No + |
+Integer + |
+Number of parallel operations selected for a job. The default value is 1. + |
+
log_enabled + |
+No + |
+Boolean + |
+Whether to enable the job log function. +
|
+
obs_bucket + |
+No + |
+String + |
+OBS path where users are authorized to save logs when log_enabled is set to true. + |
+
smn_topic + |
+No + |
+String + |
+SMN topic. If a job fails, the system will send a message to users subscribed to the SMN topic. + |
+
main_class + |
+No + |
+String + |
+Job entry class. + |
+
entrypoint_args + |
+No + |
+String + |
+Job entry parameter. Multiple parameters are separated by spaces. + |
+
restart_when_exception + |
+No + |
+Boolean + |
+Whether to enable the function of restart upon exceptions. The default value is false. + |
+
entrypoint + |
+No + |
+String + |
+Name of the package that has been uploaded to the DLI resource management system. This parameter is used to customize the JAR file where the job main class is located. + |
+
dependency_jars + |
+No + |
+Array of Strings + |
+Name of the package that has been uploaded to the DLI resource management system. This parameter is used to customize other dependency packages. +Example: myGroup/test.jar,myGroup/test1.jar. + |
+
dependency_files + |
+No + |
+Array of Strings + |
+Name of the resource package that has been uploaded to the DLI resource management system. This parameter is used to customize dependency files. +Example: myGroup/test.cvs,myGroup/test1.csv. + |
+
tm_cus + |
+No + |
+Integer + |
+Number of CUs for each TaskManager. The default value is 1. + |
+
tm_slot_num + |
+No + |
+Integer + |
+Number of slots in each TaskManager. The default value is (parallel_number*tm_cus)/(cu_number-manager_cu_number). + |
+
resume_checkpoint + |
+No + |
+Boolean + |
+Whether the abnormal restart is recovered from the checkpoint. + |
+
resume_max_num + |
+No + |
+Integer + |
+Maximum number of retry times upon exceptions. The unit is times/hour. Value range: -1 or greater than 0. The default value is -1, indicating that the number of times is unlimited. + |
+
checkpoint_path + |
+No + |
+String + |
+Storage address of the checkpoint in the JAR file of the user. The path must be unique. + |
+
runtime_config + |
+No + |
+String + |
+Customizes optimization parameters when a Flink job is running. + |
+
job_type + |
+No + |
+String + |
+Job types. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+Message content. + |
+
job + |
+No + |
+object + |
+Information about job update. For details, see Table 4. + |
+
{ + "name": "test1", + "desc": "job for test", + "job_type": "flink_jar_job", + "queue_name": "testQueue", + "manager_cu_number": 1, + "cu_number": 2, + "parallel_number": 1, + "log_enabled": false, + "main_class": "org.apache.flink.examples.streaming.JavaQueueStream", + "restart_when_exception": false, + "entrypoint": "FemaleInfoCollec.jar", + "dependency_jars": [ + "myGroup/test.jar", + "myGroup/test1.jar" + ], + "dependency_files": [ + "myGroup/test.csv", + "myGroup/test1.csv" + ] +}+
{ + "is_success": true, + "message": "The Flink job is updated successfully.", + "job": { + "update_time": 1516952770835 + } +}+
Table 5 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to trigger batch job running.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
job_ids + |
+Yes + |
+Array of Long + |
+Batch job ID. You can obtain the job ID by calling the API for creating a job or the API for querying a job. + |
+
resume_savepoint + |
+No + |
+Boolean + |
+Whether to restore a job from the latest savepoint. +
The default value is false. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
Array elements + |
+No + |
+Array of Objects + |
+The response message returned is as follows: For details, see Table 4. + |
+
{ + "job_ids": [131,130,138,137], + "resume_savepoint": true +}+
[ + { + "is_success": "true", + "message": "The request for submitting DLI jobs is delivered successfully." + }, + { + "is_success": "true", + "message": "The request for submitting DLI jobs is delivered successfully." + }, + { + "is_success": "true", + "message": "The request for submitting DLI jobs is delivered successfully." + }, + { + "is_success": "true", + "message": "The request for submitting DLI jobs is delivered successfully." + } +]+
Table 5 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to query the list of the current user's jobs. You can set the job ID as the ID and query jobs whose IDs are greater than or less than the ID. You can also query jobs in specific status, for example, in running status or other. By default, all jobs are queried.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
job_type + |
+No + |
+String + |
+Job type. +
|
+
status + |
+No + |
+String + |
+Job status code. +Available job statuses are as follows: +
|
+
queue_name + |
+No + |
+String + |
+Name of a queue. + |
+
order + |
+No + |
+String + |
+Sorting style of the query results. +
The default value is desc. + |
+
limit + |
+No + |
+Integer + |
+Number of returned data records. The default value is 10 and the maximum value is 100. + |
+
name + |
+No + |
+String + |
+Name of the job. Length range: 0 to 57 characters. + |
+
offset + |
+No + |
+Integer + |
+Job offset. + |
+
show_detail + |
+No + |
+Boolean + |
+Whether to return job details. The default value is false. If this parameter is set to true, the job details are returned. For details, see Querying Job Details. + |
+
user_name + |
+No + |
+String + |
+Username, which can be used as a filter. + |
+
tags + |
+No + |
+String + |
+Specifies a label for filtering. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
job_list + |
+No + |
+Object + |
+Information about a job list. For details, see Table 4. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
total_count + |
+No + |
+Integer + |
+Number of records in the query result. + |
+
jobs + |
+No + |
+Array of Objects + |
+Information about a job. For details, see Table 5. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
job_id + |
+No + |
+Long + |
+Job ID. + |
+
name + |
+No + |
+String + |
+Name of the job. Length range: 0 to 57 characters. + |
+
desc + |
+No + |
+String + |
+Job description. Length range: 0 to 512 characters. + |
+
user_name + |
+No + |
+String + |
+Username. This parameter is valid only when show_detail is set to false. + |
+
job_type + |
+No + |
+String + |
+Job type. +
|
+
status + |
+No + |
+String + |
+Job status. + |
+
status_desc + |
+No + |
+String + |
+Description of job status. + |
+
create_time + |
+No + |
+Long + |
+Time when a job is created. + |
+
start_time + |
+No + |
+Long + |
+Time when a job is started. The value 0 indicates that the process is not started. + |
+
duration + |
+No + |
+Long + |
+Running duration of a job. Unit: ms. This parameter is valid only when show_detail is set to false. + |
+
root_id + |
+No + |
+Long + |
+Parent job ID. This parameter is valid only when show_detail is set to false. + |
+
graph_editor_enabled + |
+No + |
+Boolean + |
+Whether the flow diagram can be edited. Value true indicates that the flow diagram can be edited, and false indicates that the flow diagram cannot be edited. + |
+
has_savepoint + |
+No + |
+Boolean + |
+Whether a job has a savepoint. Value true indicates that the job has a savepoint, and false indicates that the job does not have a savepoint. + |
+
user_id + |
+No + |
+String + |
+ID of the user who creates the job. This parameter is valid only when show_detail is set to true. + |
+
project_id + |
+No + |
+String + |
+ID of the project to which a job belongs. This parameter is valid only when show_detail is set to true. + |
+
sql_body + |
+No + |
+String + |
+Stream SQL statement. This parameter is valid only when show_detail is set to false. + |
+
run_mode + |
+No + |
+String + |
+Job running mode. The options are as follows: The value can be shared_cluster, exclusive_cluster, or edge_node. This parameter is valid only when show_detail is set to true. +
|
+
job_config + |
+No + |
+Object + |
+Job configuration. This parameter is valid only when show_detail is set to false. For details, see Table 6. + |
+
main_class + |
+No + |
+String + |
+Main class of a JAR package. This parameter is valid only when show_detail is set to false. + |
+
entrypoint_args + |
+No + |
+String + |
+Job running parameter of the JAR file. Multiple parameters are separated by spaces. This parameter is valid only when show_detail is set to true. + |
+
execution_graph + |
+No + |
+String + |
+Job execution plan. This parameter is valid only when show_detail is set to false. + |
+
update_time + |
+No + |
+Long + |
+Time when a job is updated. This parameter is valid only when show_detail is set to false. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
checkpoint_enabled + |
+No + |
+Boolean + |
+Whether to enable the automatic job snapshot function. +
The default value is false. + |
+
checkpoint_mode + |
+No + |
+String + |
+Snapshot mode. There are two options: +
The default value is exactly_once. + |
+
checkpoint_interval + |
+No + |
+Integer + |
+Snapshot interval. The unit is second. The default value is 10. + |
+
log_enabled + |
+No + |
+Boolean + |
+Whether to enable the log storage function. The default value is false. + |
+
obs_bucket + |
+No + |
+String + |
+Name of an OBS bucket. + |
+
smn_topic + |
+No + |
+String + |
+SMN topic name. If a job fails, the system will send a message to users subscribed to the SMN topic. + |
+
root_id + |
+No + |
+Integer + |
+Parent job ID. + |
+
edge_group_ids + |
+No + |
+Array of Strings + |
+List of edge computing group IDs. Use commas (,) to separate multiple IDs. + |
+
manager_cu_number + |
+No + |
+Integer + |
+Number of CUs of the management unit. The default value is 1. + |
+
cu_number + |
+No + |
+Integer + |
+Number of CUs selected for a job. This parameter is valid only when show_detail is set to true. +
The default value is 2. + |
+
parallel_number + |
+No + |
+Integer + |
+Number of concurrent jobs set by a user. This parameter is valid only when show_detail is set to true. +
The default value is 1. + |
+
restart_when_exception + |
+No + |
+Boolean + |
+Whether to enable the function of restart upon exceptions. + |
+
idle_state_retention + |
+No + |
+Integer + |
+Expiration time. + |
+
udf_jar_url + |
+No + |
+String + |
+Name of the package that has been uploaded to the DLI resource management system. The UDF Jar file of the SQL job is uploaded through this parameter. + |
+
dirty_data_strategy + |
+No + |
+String + |
+Dirty data policy of a job. +
|
+
entrypoint + |
+No + |
+String + |
+Name of the package that has been uploaded to the DLI resource management system. This parameter is used to customize the JAR file where the job main class is located. + |
+
dependency_jars + |
+No + |
+Array of Strings + |
+Name of the package that has been uploaded to the DLI resource management system. This parameter is used to customize other dependency packages. + |
+
dependency_files + |
+No + |
+Array of Strings + |
+Name of the resource package that has been uploaded to the DLI resource management system. This parameter is used to customize dependency files. + |
+
executor_number + |
+No + |
+Integer + |
+Number of compute nodes in a job. + |
+
executor_cu_number + |
+No + |
+Integer + |
+Number of CUs in a compute node. + |
+
resume_checkpoint + |
+No + |
+Boolean + |
+Whether to restore data from the latest checkpoint when the system automatically restarts upon an exception. The default value is false. + |
+
None
+{ + "is_success": "true", + "message": "Querying of the job list succeeds.", + "job_list": { + "total_count": 26, + "jobs": [ + { + "job_id": 146, + "name": "aaaaa", + "desc": "", + "user_name": "", + "job_type": "flink_sql_job", + "status": "job_init", + "status_desc": "", + "create_time": 1578892414688, + "duration": 0, + "root_id": -1, + "graph_editor_enabled": false, + "has_savepoint": false + } + ] + } +}+
Table 7 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to query details of a job.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
job_id + |
+Yes + |
+String + |
+Job ID. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
job_detail + |
+No + |
+Object + |
+Job details. For details, see Table 3. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
job_id + |
+No + |
+Long + |
+Job ID. + |
+
name + |
+No + |
+String + |
+Name of the job. Length range: 0 to 57 characters. + |
+
desc + |
+No + |
+String + |
+Job description. Length range: 0 to 512 characters. + |
+
job_type + |
+No + |
+String + |
+Job type. +
|
+
status + |
+No + |
+String + |
+Job status. +Available job statuses are as follows: +
|
+
status_desc + |
+No + |
+String + |
+Description of job status. + |
+
create_time + |
+No + |
+Long + |
+Time when a job is created. + |
+
start_time + |
+No + |
+Long + |
+Time when a job is started. + |
+
user_id + |
+No + |
+String + |
+ID of the user who creates the job. + |
+
queue_name + |
+No + |
+String + |
+Name of a queue. Length range: 1 to 128 characters. + |
+
project_id + |
+No + |
+String + |
+ID of the project to which a job belongs. + |
+
sql_body + |
+No + |
+String + |
+Stream SQL statement. + |
+
savepoint_path + |
+No + |
+String + |
+Path for storing manually generated checkpoints. + |
+
run_mode + |
+No + |
+String + |
+Job running mode. The options are as follows: +
|
+
job_config + |
+No + |
+Object + |
+Job configurations. Refer to Table 4 for details. + |
+
main_class + |
+No + |
+String + |
+Main class of a JAR package, for example, org.apache.spark.examples.streaming.JavaQueueStream. + |
+
entrypoint_args + |
+No + |
+String + |
+Running parameter of a JAR package job. Multiple parameters are separated by spaces. + |
+
execution_graph + |
+No + |
+String + |
+Job execution plan. + |
+
update_time + |
+No + |
+Long + |
+Time when a job is updated. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
checkpoint_enabled + |
+No + |
+Boolean + |
+Whether to enable the automatic job snapshot function. +
The default value is false. + |
+
checkpoint_interval + |
+No + |
+Integer + |
+Snapshot interval. The unit is second. The default value is 10. + |
+
checkpoint_mode + |
+No + |
+String + |
+Snapshot mode. There are two options: +
The default value is exactly_once. + |
+
log_enabled + |
+No + |
+Boolean + |
+Whether to enable the log storage function. The default value is false. + |
+
obs_bucket + |
+No + |
+String + |
+Name of an OBS bucket. + |
+
root_id + |
+No + |
+Integer + |
+Parent job ID. + |
+
edge_group_ids + |
+No + |
+Array of Strings + |
+List of edge computing group IDs. Use commas (,) to separate multiple IDs. + |
+
manager_cu_number + |
+No + |
+Integer + |
+Number of CUs of the management unit. The default value is 1. + |
+
graph_editor_enabled + |
+No + |
+Boolean + |
+Whether to enable flow diagram editing. The default value is false. + |
+
graph_editor_data + |
+No + |
+String + |
+Data of flow diagram editing. The default value is null. + |
+
executor_number + |
+No + |
+Integer + |
+Number of compute nodes in a job. + |
+
executor_cu_number + |
+No + |
+Integer + |
+Number of CUs in a compute node. + |
+
cu_number + |
+No + |
+Integer + |
+Number of CUs selected for a job. This parameter is valid only when show_detail is set to true. +
The default value is 2. + |
+
parallel_number + |
+No + |
+Integer + |
+Number of concurrent jobs set by a user. This parameter is valid only when show_detail is set to true. +
The default value is 1. + |
+
smn_topic + |
+No + |
+String + |
+SMN topic name. If a job fails, the system will send a message to users subscribed to this SMN topic. + |
+
restart_when_exception + |
+No + |
+Boolean + |
+Whether to enable the function of restart upon exceptions. + |
+
resume_checkpoint + |
+No + |
+Boolean + |
+Whether to restore data from the latest checkpoint when the system automatically restarts upon an exception. The default value is false. + |
+
resume_max_num + |
+No + |
+Integer + |
+Maximum retry attempts. –1 indicates there is no upper limit. + |
+
checkpoint_path + |
+No + |
+String + |
+Path for saving the checkpoint. + |
+
idle_state_retention + |
+No + |
+Integer + |
+Expiration time. + |
+
config_url + |
+No + |
+String + |
+OBS path of the config package uploaded by the user. + |
+
udf_jar_url + |
+No + |
+String + |
+Name of the package that has been uploaded to the DLI resource management system. The UDF Jar file of the SQL job is uploaded through this parameter. + |
+
dirty_data_strategy + |
+No + |
+String + |
+Dirty data policy of a job. +
|
+
entrypoint + |
+No + |
+String + |
+Name of the package that has been uploaded to the DLI resource management system. This parameter is used to customize the JAR file where the job main class is located. + |
+
dependency_jars + |
+No + |
+Array of Strings + |
+Name of the package that has been uploaded to the DLI resource management system. This parameter is used to customize other dependency packages. + |
+
dependency_files + |
+No + |
+Array of Strings + |
+Name of the resource package that has been uploaded to the DLI resource management system. This parameter is used to customize dependency files. + |
+
tm_cus + |
+No + |
+int + |
+Number of CUs per TaskManager node. + |
+
tm_slot_num + |
+No + |
+int + |
+Number of slots per TaskManager node. + |
+
operator_config + |
+No + |
+String + |
+Operator's parallelism degree. The operator ID and degree of parallelism are displayed in JSON format. + |
+
static_estimator_config + |
+No + |
+String + |
+Estimation of static flow diagram resources. + |
+
runtime_config + |
+No + |
+String + |
+Customizes optimization parameters when a Flink job is running. + |
+
None
+{ + "is_success": "true", + "message": "Job detail query succeeds.", + "job_detail": { + "job_id": 104, + "user_id": "011c99a26ae84a1bb963a75e7637d3fd", + "queue_name": "flinktest", + "project_id": "330e068af1334c9782f4226acc00a2e2", + "name": "jptest", + "desc": "", + "sql_body": "", + "run_mode": "exclusive_cluster", + "job_type": "flink_jar_job", + "job_config": { + "checkpoint_enabled": false, + "checkpoint_interval": 10, + "checkpoint_mode": "exactly_once", + "log_enabled": false, + "obs_bucket": null, + "root_id": -1, + "edge_group_ids": null, + "graph_editor_enabled": false, + "graph_editor_data": "", + "manager_cu_number": 1, + "executor_number": null, + "executor_cu_number": null, + "cu_number": 2, + "parallel_number": 1, + "smn_topic": null, + "restart_when_exception": false, + "idle_state_retention": 3600, + "config_url": null, + "udf_jar_url": null, + "dirty_data_strategy": null, + "entrypoint": "FemaleInfoCollection.jar", + "dependency_jars": [ + "FemaleInfoCollection.jar", + "ObsBatchTest.jar" + ], + "dependency_files": [ + "FemaleInfoCollection.jar", + "ReadFromResource" + ] + }, + "main_class": null, + "entrypoint_args": null, + "execution_graph": null, + "status": "job_init", + "status_desc": "", + "create_time": 1578466221525, + "update_time": 1578467395713, + "start_time": null + } +}+
{ + "is_success": "true", + "message": "The job information query succeeds.", + "job_detail": { + "job_type": "flink_opensource_sql_job", + "status_desc": "", + "create_time": 1637632872828, + "sql_body": "xxx", + "savepoint_path": null, + "main_class": null, + "queue_name": "xie_container_general", + "execution_graph": "xxx", + "start_time": 1638433497621, + "update_time": 1638449337993, + "job_config": { + "checkpoint_enabled": true, + "checkpoint_interval": 600, + "checkpoint_mode": "exactly_once", + "log_enabled": true, + "obs_bucket": "dli-test", + "root_id": -1, + "edge_group_ids": null, + "graph_editor_enabled": false, + "graph_editor_data": "", + "manager_cu_number": 1, + "executor_number": null, + "executor_cu_number": null, + "cu_number": 2, + "parallel_number": 3, + "smn_topic": "", + "restart_when_exception": true, + "resume_checkpoint": true, + "resume_max_num": -1, + "checkpoint_path": null, + "idle_state_retention": 3600, + "config_url": null, + "udf_jar_url": "test/flink_test-1.0-SNAPSHOT-jar-with-dependencies.jar", + "dirty_data_strategy": "0", + "entrypoint": "test/flink_test-1.0-SNAPSHOT-jar-with-dependencies.jar", + "dependency_jars": null, + "dependency_files": null, + "tm_cus": 1, + "tm_slot_num": 3, + "image": null, + "feature": null, + "flink_version": null, + "operator_config": "xxx", + "static_estimator_config": "xxx", + "runtime_config": null + }, + "user_id": "xxx", + "project_id": "xxx", + "run_mode": "exclusive_cluster", + "job_id": 90634, + "name": "test_guoquan", + "desc": "", + "entrypoint_args": null, + "status": "job_cancel_success" + } +}+
Table 5 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to query a job execution plan.
+GET /v1.0/{project_id}/streaming/jobs/{job_id}/execute-graph
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
job_id + |
+Yes + |
+Long + |
+Job ID. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successful. + |
+
message + |
+No + |
+String + |
+Message content. + |
+
execute_graph + |
+No + |
+Object + |
+Response parameter for querying a job plan. For details, see Table 3. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
jid + |
+No + |
+String + |
+ID of a Flink job. + |
+
name + |
+No + |
+String + |
+Name of a Flink job. + |
+
isStoppable + |
+No + |
+Boolean + |
+Whether a job can be stopped. + |
+
state + |
+No + |
+String + |
+Execution status of a job. + |
+
start-time + |
+No + |
+Long + |
+Time when a job is started. + |
+
end-time + |
+No + |
+Long + |
+Time when a job is stopped. + |
+
duration + |
+No + |
+Long + |
+Running duration of a job. + |
+
None
+{ + "is_success": "true", + "message": "Querying the job execution graph succeeds.", + "execute_graph": { + "jid": "4e966f43f2c90b0e1bf3188ecf55504b", + "name": "", + "isStoppable": false, + "state": "RUNNING", + "start-time": 1578904488436, + "end-time": -1, + "duration": 516274 + } +}+
Status Code + |
+Description + |
+
---|---|
200 + |
+Querying the job execution plan succeeds. + |
+
400 + |
+The input parameter is invalid. + |
+
If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to stop running jobs in batches.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
job_ids + |
+Yes + |
+Array of Long + |
+Job ID. + |
+
trigger_savepoint + |
+No + |
+Boolean + |
+Whether to create a savepoint for a job to store the job status information before stopping it. The data type is Boolean. +
|
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
Array elements + |
+No + |
+Array of Objects + |
+The response message returned is as follows: For details, see Table 4. + |
+
{ + "job_ids": [128, 137], + "trigger_savepoint": false +}+
[{"is_success":"true", +"message": "The request for stopping DLI jobs is delivered successfully."}]+
Table 5 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to delete a Flink job at any state.
+The job records will not be deleted.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
job_id + |
+Yes + |
+Long + |
+Job ID. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
None
+{ + "is_success": "true", + "message": "The job is deleted successfully.", +}+
Table 3 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to batch delete jobs at any state.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
job_ids + |
+Yes + |
+[Long] + |
+Job ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
{ + "job_ids":[12,232] +}+
[{ + "is_success": "true", + "message": "The job is deleted successfully.", +}]+
Table 4 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to create a user template for the DLI service. A maximum of 100 user templates can be created.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
name + |
+Yes + |
+String + |
+Template name. The value can contain 1 to 64 characters. + |
+
desc + |
+No + |
+String + |
+Template description. Length range: 0 to 512 characters. + |
+
sql_body + |
+No + |
+String + |
+Stream SQL statement, which includes at least the following three parts: source, query, and sink. Length range: 0 to 2,048 characters. + |
+
tags + |
+No + |
+Array of Objects + |
+Label of a Flink job template. For details, see Table 3. + |
+
job_type + |
+No + |
+String + |
+Flink job template type. The default value is flink_sql_job. You can set this parameter to flink_sql_job or flink_opensource_sql_job only. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successful. + |
+
message + |
+No + |
+String + |
+Message content. + |
+
template + |
+No + |
+Object + |
+Information about job update. For details, see Table 5. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
template_id + |
+No + |
+Long + |
+Template ID. + |
+
name + |
+No + |
+String + |
+Template name. + |
+
desc + |
+No + |
+String + |
+Template description. + |
+
create_time + |
+No + |
+Long + |
+Time when the template is created. + |
+
job_type + |
+No + |
+String + |
+Job template type + |
+
{ + "name": "simple_stream_sql", + "desc": "Example of quick start", + "sql_body": "select * from source_table" +}+
{ + "is_success": true, + "message": "A template is created successfully.", + "template": { + "template_id": 0, + "name": "IoT_example", + "desc": "Example of quick start", + "create_time": 1516952710040, + "job_type": "flink_sql_job" + } +}+
Table 6 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to update existing templates in DLI.
+PUT /v1.0/{project_id}/streaming/job-templates/{template_id}
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
template_id + |
+Yes + |
+String + |
+Template ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
name + |
+No + |
+String + |
+Template name. Length range: 0 to 57 characters. + |
+
desc + |
+No + |
+String + |
+Template description. Length range: 0 to 512 characters. + |
+
sql_body + |
+No + |
+String + |
+Stream SQL statement, which includes at least the following three parts: source, query, and sink. Length range: 0 to 1024 x 1024 characters. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
{ + "name": "simple_stream_sql", + "desc": "Example of quick start", + "sql_body": "select * from source_table" +}+
{ + "is_success": "true", + "message": "The template is updated successfully.", +}+
Table 4 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to delete a template. A template used by jobs can also be deleted.
+DELETE /v1.0/{project_id}/streaming/job-templates/{template_id}
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
template_id + |
+Yes + |
+String + |
+Template ID. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the response is successful. Value true indicates success. + |
+
message + |
+No + |
+String + |
+Message content. + |
+
template + |
+No + |
+Object + |
+Information about the template to be deleted. For details, see Table 3. + |
+
None
+{ + "is_success": "true", + "message": "The template is deleted successfully.", + "template": { + "template_id": 2 + } +}+
Table 4 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to query the job template list. Currently, only custom templates can be queried.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
name + |
+No + |
+String + |
+Template name. Fuzzy query by name is supported. + |
+
tags + |
+No + |
+String + |
+List of tag names. The value is k=v for a single tag. Multiple tags are separated by commas (,). Example: tag1=v1,tag2=v2. + |
+
offset + |
+No + |
+Long + |
+Job offset. + |
+
limit + |
+No + |
+Integer + |
+Number of returned data records. The default value is 10. + |
+
order + |
+No + |
+String + |
+Sorting style of the query results. +
The default value is desc. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successful. + |
+
message + |
+No + |
+String + |
+Message content. + |
+
template_list + |
+No + |
+Object + |
+Information about the template list. For details, see Table 4. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
total_count + |
+No + |
+Integer + |
+Total number of templates. + |
+
templates + |
+No + |
+Array of Objects + |
+Detailed information about a template. For details, see Table 5. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
template_id + |
+No + |
+Integer + |
+Template ID. + |
+
name + |
+No + |
+String + |
+Template name. + |
+
desc + |
+No + |
+String + |
+Template description. + |
+
create_time + |
+No + |
+Long + |
+Time when the template is created. + |
+
update_time + |
+No + |
+Long + |
+Time when the template is updated. + |
+
sql_body + |
+No + |
+String + |
+Stream SQL statement. Contains at least the source, query, and sink parts. + |
+
job_type + |
+No + |
+String + |
+Job template type. + |
+
None
+{ + "is_success": "true", + "message": "The template list is obtained successfully.", + "template_list": { + "total_count": 2, + "templates": [ + { + "template_id": 2, + "name": "updatetest", + "desc": "Example of quick start", + "create_time": 1578748092000, + "update_time": 1578748092000, + "sql_body": "select * from source_table", + "job_type": "flink_sql_job" + }, + { + "template_id": 1, + "name": "we", + "desc": "qwe", + "create_time": 1577951045000, + "update_time": 1577951045000, + "sql_body": "" + } + ] + } +}+
Table 6 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to restart, scale out, and scale in queues.
+Only SQL queues in the Available status can be restarted. (The queue status is Available only after the SQL job is successfully executed.)
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
queue_name + |
+Yes + |
+String + |
+Name of a queue. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
action + |
+Yes + |
+String + |
+Operations to be performed: +
NOTE:
+Currently, only restart, scale_out, and scale_in operations are supported. + |
+
force + |
+No + |
+Boolean + |
+Specifies whether to forcibly restart the queue. This parameter is optional when action is set to restart. The default value is false. + |
+
cu_count + |
+No + |
+Integer + |
+Number of CUs to be scaled in or out. This parameter is optional when action is set to scale_out or scale_in. The value of cu_count must be a multiple of 16. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
job_id + |
+No + |
+String + |
+Specifies the job ID returned when force is set to true. + |
+
queue_name + |
+No + |
+String + |
+Name of the queue to be scaled in or out. + |
+
result + |
+No + |
+Boolean + |
+Indicates the scaling result. + |
+
{ + "action": "restart", + "force": "false" +}+
{ + "action": "scale_out", + "cu_count": 16 +}+
{ + "is_success": true, + "message": "Restart success" +}+
{ + "is_success": true, + "message": "Submit restart job success, it need some time to cancel jobs, please wait for a while and check job status", + "job_id": "d90396c7-3a25-4944-ad1e-99c764d902e7" +}+
{ + "queue_name": "myQueue", + "result": true +}+
Table 4 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+ +Error Code + |
+Error Message + |
+
---|---|
DLI.0015 + |
+Token info for token is null, return. + |
+
DLI.0013 + |
+X-Auth-Token is not defined in request. It is mandatory. Please define and send the request. + |
+
This API is used to obtain the partition list.
+GET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/partitions
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
database_name + |
+Yes + |
+String + |
+Name of a database. + |
+
table_name + |
+Yes + |
+String + |
+Name of a table. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
limit + |
+No + |
+Integer + |
+Number of returned records displayed on each page. The default value is 100. + |
+
offset + |
+No + |
+Integer + |
+Offset. + |
+
filter + |
+No + |
+String + |
+Filtering condition. Currently, only the = condition is supported. For example, name=name1 indicates that the data whose name is name1 in the partition is filtered. name indicates the name of the partition column, and name1 indicates the value of the partition column. The key and value are case insensitive. +Example: GET /v1.0/{project_id}/databases/{database_name}/tables/{table_name}/partitions?part=part2 + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
partitions + |
+No + |
+Object + |
+Partition information. For details, see Table 4. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
total_count + |
+Yes + |
+Long + |
+Total number of partitions. + |
+
partition_infos + |
+Yes + |
+Array of Objects + |
+List of partitions. For details, see Table 5. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
partition_name + |
+Yes + |
+String + |
+Partition name. + |
+
create_time + |
+Yes + |
+Long + |
+Time when a partition is created. + |
+
last_access_time + |
+Yes + |
+Long + |
+Last update time. + |
+
locations + |
+No + |
+Array of Strings + |
+Path. This parameter is displayed only for non-DLI tables. + |
+
last_ddl_time + |
+No + |
+Long + |
+Execution time of the last DDL statement, in seconds. + |
+
num_rows + |
+No + |
+Long + |
+Total rows in the partition. + |
+
num_files + |
+No + |
+Long + |
+Number of files in a partition. + |
+
total_size + |
+No + |
+Long + |
+Total size of data in the partition, in bytes. + |
+
None
+{ + "is_success": true, + "message": "list partitions succeed", + "partitions": { + "total_count": 5, + "partition_infos": [ + { + "partition_name": "name=test", + "create_time": 1579520179000, + "last_access_time": 1579520179000, + "locations": [ + "obs://test/partition" + ] + }, + { + "partition_name": "name=test1", + "create_time": 1579521406000, + "last_access_time": 1579521406000, + "locations": [ + "obs://test/partition" + ] + }, + { + "partition_name": "name=test2", + "create_time": 1579521884000, + "last_access_time": 1579521884000, + "locations": [ + "obs://test/partition" + ] + }, + { + "partition_name": "name=test3", + "create_time": 1579522085000, + "last_access_time": 1579522085000, + "locations": [ + "obs://test/partition" + ] + }, + { + "partition_name": "name=name1/age=age1", + "create_time": 1581409182000, + "last_access_time": 1581409182000, + "locations": [ + "obs://test/0117" + ], + "last_ddl_time": 1581409182, + "total_size": 2130, + "num_rows": -1, + "num_files": 2 + } + ] + } +}+
Table 6 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to view the permissions granted to a user.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
object + |
+Yes + |
+String + |
+Data object to be assigned, which corresponds to the object in API permission assignment. +
|
+
offset + |
+No + |
+Integer + |
+Specifies the offset of the page-based query. + |
+
limit + |
+No + |
+Integer + |
+Number of records to be displayed of the page-based query. + |
+
The following is an example of the URL containing the query parameter:
+GET /v1.0/{project_id}/authorization/privileges?object={object}
+None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+Yes + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+Yes + |
+String + |
+Indicates the system prompt. If execution succeeds, this parameter may be left blank. + |
+
object_name + |
+Yes + |
+String + |
+Object name. + |
+
object_type + |
+Yes + |
+String + |
+Object type. + |
+
privileges + |
+No + |
+Array of Object + |
+Permission information. For details, see Table 4. + |
+
count + |
+No + |
+Integer + |
+Total number of permissions. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_admin + |
+No + |
+Boolean + |
+Whether the database user is an administrator. + |
+
user_name + |
+No + |
+String + |
+Name of the user who has permission on the current database. + |
+
privileges + |
+No + |
+Array of Strings + |
+Permission of the user on the database. + |
+
None
+{ + "is_success": true, + "message": "", + "object_name": "9561", + "object_type": "flink", + "count": 2, + "privileges": [ + { + "user_name": "testuser1", + "is_admin": true, + "privileges": [ + "ALL" + ] + }, + { + "user_name": "user1", + "is_admin": false, + "privileges": [ + "GET" + ] + } + ] +}+
Table 5 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+ +Error Code + |
+Error Message + |
+
---|---|
DLI.0001 + |
+user input validation failed, object_type sql or saprk is not supported now + |
+
This API is used to change the owner of a program package.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
new_owner + |
+Yes + |
+String + |
+New username. The name contains 5 to 32 characters, including only digits, letters, underscores (_), and hyphens (-). It cannot start with a digit. + |
+
group_name + |
+Yes + |
+String + |
+Group name. The name contains a maximum of 64 characters. Only digits, letters, periods (.), underscores (_), and hyphens (-) are allowed. + |
+
resource_name + |
+No + |
+String + |
+Package name. The name can contain only digits, letters, underscores (_), exclamation marks (!), hyphens (-), and periods (.), but cannot start with a period. The length (including the file name extension) cannot exceed 128 characters. +This parameter is mandatory if you want to change the owner of a resource package in a group. + |
+
group_name and resource_name can be used independently or together.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
{ + "new_owner": "scuser1", + "group_name": "groupName" +}+
{ + "is_success": "true", + "message": "" +}+
Table 4 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+ +Error Code + |
+Error Message + |
+
---|---|
DLI.0002 + |
+No such user. userName:ssssss. + |
+
This API is used to export Flink job data.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
obs_dir + |
+Yes + |
+String + |
+OBS path for storing exported job files. + |
+
is_selected + |
+Yes + |
+Boolean + |
+Whether to export a specified job. + |
+
job_selected + |
+No + |
+Array of Longs + |
+This parameter indicates the ID set of jobs to be exported if is_selected is set to true. + NOTE:
+This parameter is mandatory when is_selected is set to true. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
zip_file + |
+No + |
+Array of Strings + |
+Name of the ZIP package containing exported jobs. The ZIP package is stored on OBS. + |
+
{ + "obs_dir": "obs-test", + "is_selected": true, + "job_selected": [100] +}+
{ + "is_success": true, + "message": "The job is exported successfully.", + "zip_file": ["obs-test/aggregate_1582677879475.zip"] +}+
Table 4 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to import Flink job data.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
zip_file + |
+Yes + |
+String + |
+Path of the job ZIP file imported from OBS. You can enter a folder path to import all ZIP files in the folder. + NOTE:
+The folder can contain only .zip files. + |
+
is_cover + |
+No + |
+Boolean + |
+Whether to overwrite an existing job if the name of the imported job is the same as that of the existing job in the service. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
job_mapping + |
+No + |
+Array of Objects + |
+Information about the imported job. For details, see Table 4. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
old_job_id + |
+No + |
+Long + |
+ID of a job before being imported. + |
+
new_job_id + |
+No + |
+Long + |
+ID of a job after being imported. If is_cover is set to false and a job with the same name exists in the service, the returned value of this parameter is -1. + |
+
remark + |
+No + |
+String + |
+Results about an imported job. + |
+
{ + "zip_file": "test/ggregate_1582677879475.zip", + "is_cover": true +}+
{ + "is_success": true, + "message": "The job is imported successfully.", + "job_mapping": [ + { + "old_job_id": "100", + "new_job_id": "200", + "remark": "Job successfully created" + } + ] +}+
Table 5 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to query the authorization about an enhanced datasource connection.
+GET /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}/privileges
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
connection_id + |
+Yes + |
+String + |
+Connection ID. Identifies the UUID of a datasource connection. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
connection_id + |
+No + |
+String + |
+Enhanced datasource connection ID, which is used to identify the UUID of a datasource connection. + |
+
privileges + |
+No + |
+Array of Object + |
+Datasource connection information about each authorized project. For details, see Table 3. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
object + |
+No + |
+String + |
+Object information during authorization. + |
+
applicant_project_id + |
+No + |
+String + |
+ID of an authorized project. + |
+
privileges + |
+No + |
+Array of Strings + |
+Authorization operation information. + |
+
None
+{ + "is_success": true, + "message": "", + "privileges": [ + { + "object": "edsconnections.503fc86a-5e60-4349-92c2-7e399404fa8a", + "applicant_project_id": "330e068af1334c9782f4226acc00a2e2", + "privileges": ["BIND_QUEUE"] + } + ], + "connection_id": "503fc86a-5e60-4349-92c2-7e399404fa8a" +}+
Table 4 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+ +Error Code + |
+Error Message + |
+
---|---|
DLI.0001 + |
+Connection 503fc86a-5e60-4349-92c2-7e399404fa8a does not exist. + |
+
This API is used to create a global variable.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
var_name + |
+Yes + |
+String + |
+A global variable name can contain a maximum of 128 characters, including only digits, letters, and underscores (_), but cannot start with an underscore (_) or contain only digits. + |
+
var_value + |
+Yes + |
+String + |
+Global variable value. + |
+
is_sensitive + |
+No + |
+Boolean + |
+Whether to set a variable as a sensitive variable. The default value is false. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+Message content. + |
+
{ + "var_name": "string", + "var_value": "string", + "is_sensitive": true +}+
{ + "is_success": true, + "message": "string" +}+
Table 4 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+ +Error Code + |
+Error Message + |
+
---|---|
DLI.0001 + |
+Parameter check errors occur. + |
+
DLI.0999 + |
+The object exists. + |
+
This API is used to delete a global variable.
+Only the user who creates a global variable can delete the variable.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
var_name + |
+Yes + |
+String + |
+A global variable name can contain a maximum of 128 characters, including only digits, letters, and underscores (_), but cannot start with an underscore (_) or contain only digits. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
None
+{ + "is_success": true, + "message": "string" +}+
Table 3 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+ +Error Code + |
+Error Message + |
+
---|---|
DLI.0001 + |
+Parameter check errors occur. + |
+
DLI.0999 + |
+Server-side errors occur. + |
+
This API is used to modify a global variable.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
var_name + |
+Yes + |
+String + |
+A global variable name can contain a maximum of 128 characters, including only digits, letters, and underscores (_), but cannot start with an underscore (_) or contain only digits. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
var_value + |
+Yes + |
+String + |
+Global variable value. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+Message content. + |
+
{ + "var_value": "string" +}+
{ + "is_success": true, + "message": "string" +}+
Table 4 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+ +Error Code + |
+Error Message + |
+
---|---|
DLI.0001 + |
+Parameter check errors occur. + |
+
DLI.0999 + |
+Server-side errors occur. + |
+
DLI.12004 + |
+The job does not exist. Check the reason or create a job. + |
+
This API is used to query information about all global variables in the current project.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
limit + |
+No + |
+Integer + |
+Number of returned records displayed on each page. The default value is 100. + |
+
offset + |
+No + |
+Integer + |
+Offset. The default value is 0. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
count + |
+No + |
+Integer + |
+Number of global variables. + |
+
global_vars + |
+No + |
+Array of Objects + |
+Global variable information. For details, see Table 4. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
id + |
+No + |
+Long + |
+Global variable ID. + |
+
var_name + |
+Yes + |
+String + |
+Global variable name. + |
+
var_value + |
+Yes + |
+String + |
+Global variable value. + |
+
project_id + |
+No + |
+String + |
+Project ID. + |
+
user_id + |
+No + |
+String + |
+User ID. + |
+
user_name + |
+No + |
+String + |
+Username + |
+
is_sensitive + |
+No + |
+Boolean + |
+Whether to set a variable as a sensitive variable. + |
+
create_time + |
+No + |
+Long + |
+Creation time + |
+
update_time + |
+No + |
+Long + |
+Update time + |
+
None
+{ + "is_success": true, + "message": "string", + "count": 0, + "global_vars": [ + { + "id": 0, + "var_name": "string", + "var_value": "string", + "project_id": "string", + "user_id": "string" + } + ] +}+
Status Code + |
+Description + |
+
---|---|
200 + |
+All variables are queried successfully. + |
+
400 + |
+The input parameter is invalid. + |
+
If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+ +Error Code + |
+Error Message + |
+
---|---|
DLI.0001 + |
+Parameter check errors occur. + |
+
DLI.0999 + |
+Server-side errors occur. + |
+
This API is used to send an address connectivity test request to a specified queue and insert the test address into the table.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
queue_name + |
+Yes + |
+String + |
+Name of a queue. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
address + |
+Yes + |
+String + |
+Test address. The format is IP address or domain name:port. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+Yes + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+Yes + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
task_id + |
+Yes + |
+String + |
+Request ID + |
+
{ + "address": "iam.xxx.com:443" +}+
{ + "is_success": true, + "message": "check connectivity to address:iam.xxx.com with port: 443 successfully", + "task_id": 9 +}+
Table 4 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to query the connectivity test result after the test is submitted.
+GET /v1.0/{project_id}/queues/{queue_name}/connection-test/{task_id}
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
queue_name + |
+Yes + |
+String + |
+Name of a queue. + |
+
task_id + |
+Yes + |
+String + |
+Job ID. You can call Creating an Address Connectivity Test Request to obtain the value. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+Yes + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+Yes + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
connectivity + |
+Yes + |
+String + |
+Indicates the connectivity test result. + |
+
None
+{ + "is_success": true, + "message": "Get node connectivity status successfully for addressId:9", + "connectivity": "REACHABLE" +}+
Table 3 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to create a scheduled CU change, that is, to create a scheduled CU change for a specified queue.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
queue_name + |
+Yes + |
+String + |
+Name of the queue for which you want to set a scheduled scaling plan. The name can contain only digits, letters, and underscores (_), but cannot contain only digits or start with an underscore (_). The name contains 1 to 128 characters. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
plan_name + |
+Yes + |
+String + |
+Name of a CU change. The name can contain only digits, letters, and underscores (_), but cannot contain only digits or start with an underscore (_). + |
+
target_cu + |
+Yes + |
+Integer + |
+Target value of the CU in the scheduled CU change. + |
+
start_hour + |
+Yes + |
+Integer + |
+Specifies the start hour of the scheduled CU change. + |
+
start_minute + |
+Yes + |
+Integer + |
+Specifies the start minute of a scheduled CU change. + |
+
repeat_day + |
+Yes + |
+Array of strings + |
+Specifies the repetition period of a scheduled CU change. You can select one or more days from Monday to Sunday, or do not select any day. If this parameter is not specified, the scheduled CU change will be executed at the time specified by start_hour: start_minute after the current time. Example: +"repeat_day": ["MONDAY", "TUESDAY", "WEDNESDAY","SUNDAY"]+ |
+
valid_date_begin + |
+No + |
+Long + |
+Start time of the validity period (13-digit timestamp) + |
+
valid_date_end + |
+No + |
+Long + |
+End time of the validity period (13-digit timestamp) + |
+
activate + |
+No + |
+Boolean + |
+Indicates whether the scheduled CU change is activated. The default value is true, indicating that the change is activated. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
{ + "plan_name": "plan_A", + "target_cu": 64, + "start_hour": 20, + "start_minute": 30, + "repeat_day": [ + "MONDAY", + "TUESDAY", + "WEDNESDAY", + "SUNDAY" + ], + "valid_date_begin": 1590949800000, + "valid_date_end": 1591727400000, + "activate": true +}+
{ + "is_success": true, + "message": "" +}+
Table 4 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+ +Error Code + |
+Error Message + |
+
---|---|
DLI.0999 + |
+Queue plans create failed. The plan plan_A can not generate a scale plan, please check all time settings for the plan. + |
+
This API is used to query the scheduled CU changes and list the changes of a specified queue.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
queue_name + |
+Yes + |
+String + |
+Name of the queue for which the scheduled CU change is to be deleted. The name contains 1 to 128 characters. Use commas (,) to separate multiple queue names. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
plans + |
+No + |
+Array of Objects + |
+Scheduled scaling plan information. For details, see Table 3. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
id + |
+No + |
+Long + |
+ID of a scheduled CU change. + |
+
plan_name + |
+No + |
+String + |
+Name of a CU change. The name can contain only digits, letters, and underscores (_), but cannot contain only digits or start with an underscore (_). + |
+
target_cu + |
+No + |
+Integer + |
+Target value of the CU in the scheduled CU change. + |
+
start_hour + |
+No + |
+Integer + |
+Start hour of a queue scaling plan, in the 24-hour format. + |
+
start_minute + |
+No + |
+Integer + |
+Specifies the start minute of a scheduled CU change. + |
+
repeat_day + |
+Yes + |
+Array of strings + |
+Specifies the repetition period of a scheduled CU change. You can select one or more days from Monday to Sunday, or do not select any day. If this parameter is not specified, the scheduled CU change will be executed at the time specified by start_hour: start_minute after the current time. Example: +"repeat_day": ["MONDAY", "TUESDAY", "WEDNESDAY","SUNDAY"]+ |
+
valid_date_begin + |
+No + |
+Long + |
+Start time of the validity period (13-digit timestamp) + |
+
valid_date_end + |
+No + |
+Long + |
+End time of the validity period (13-digit timestamp) + |
+
activate + |
+No + |
+Boolean + |
+Indicates whether the scheduled CU change is activated. The default value is true, indicating that the change is activated. + |
+
last_execute_time + |
+No + |
+Long + |
+Time when the scaling plan was last executed. + |
+
None
+{ + "is_success": true, + "message": "", + "plans": [ + { + "id": 1, + "plan_name": "plan_Aa", + "target_cu": 32, + "start_hour": 11, + "start_minute": 15, + "repeat_day": [ + "MONDAY", + "TUESDAY", + "WEDNESDAY", + "SUNDAY" + ], + "activate": true, + "last_execute_time": 1593573428857 + }, + { + "id": 6, + "plan_name": "plan_Ab", + "target_cu": 16, + "start_hour": 14, + "start_minute": 25, + "repeat_day": [ + "MONDAY", + "TUESDAY", + "WEDNESDAY", + "SUNDAY", + "THURSDAY", + "FRIDAY", + "SATURDAY" + ], + "activate": true, + "last_execute_time": 1593584829260 + } + ] +}+
Table 4 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+ +Error Code + |
+Error Message + |
+
---|---|
DLI.0008 + |
+There is no queue named queue1. + |
+
This API is used to delete scheduled CU changes in batches.
+POST /v1/{project_id}/queues/{queue_name}/plans/batch-delete
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
queue_name + |
+Yes + |
+String + |
+Name of the queue for which the scheduled CU change is to be deleted. The name contains 1 to 128 characters. Use commas (,) to separate multiple queue names. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
plan_ids + |
+Yes + |
+Array of Long + |
+Scaling policy IDs of the queues you want to delete. For details, see Viewing a Scheduled CU Change. Example: "plan_ids": [8,10] + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
{ + "plan_ids": [3,4] +}+
{ + "is_success": true, + "message": "" +}+
Table 4 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+ +Error Code + |
+Error Message + |
+
---|---|
DLI.0002 + |
+The plans with id 8, 9 do not exist. + |
+
This API is used to delete a scheduled CU change for a queue with a specified ID.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
queue_name + |
+Yes + |
+String + |
+Name of the queue for which the scheduled CU change is to be deleted. The name contains 1 to 128 characters. Use commas (,) to separate multiple queue names. + |
+
plan_id + |
+Yes + |
+Long + |
+ID of scheduled CU change to be deleted. For details about how to obtain the IDs, see Viewing a Scheduled CU Change. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
None
+{ + "is_success": true, + "message": "" +}+
Table 3 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+ +Error Code + |
+Error Message + |
+
---|---|
DLI.0002 + |
+The plan with id 8 does not exist. + |
+
This API is used to modify a scheduled CU change for a queue with a specified ID.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
queue_name + |
+Yes + |
+String + |
+Name of the queue for which the scheduled CU change is to be modified. The name contains 1 to 128 characters. Use commas (,) to separate multiple queue names. + |
+
plan_id + |
+Yes + |
+String + |
+ID of scheduled CU change to be modified. Use commas (,) to separate multiple IDs. For details about how to obtain the IDs, see Viewing a Scheduled CU Change. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
plan_name + |
+Yes + |
+String + |
+Name of a CU change. The name can contain only digits, letters, and underscores (_), but cannot contain only digits or start with an underscore (_). + |
+
target_cu + |
+Yes + |
+Integer + |
+Target value of the CU in the scheduled CU change. + |
+
start_hour + |
+Yes + |
+Integer + |
+Specifies the start hour of the scheduled CU change. + |
+
start_minute + |
+Yes + |
+Integer + |
+Specifies the start minute of a scheduled CU change. + |
+
repeat_day + |
+Yes + |
+Array of strings + |
+Specifies the repetition period of a scheduled CU change. You can select one or more days from Monday to Sunday, or do not select any day. If this parameter is not specified, the scheduled CU change will be executed at the time specified by start_hour: start_minute after the current time. Example: +"repeat_day": ["MONDAY", "TUESDAY", "WEDNESDAY","SUNDAY"]+ |
+
valid_date_begin + |
+No + |
+Long + |
+Start time of the validity period (13-digit timestamp) + |
+
valid_date_end + |
+No + |
+Long + |
+End time of the validity period (13-digit timestamp) + |
+
activate + |
+No + |
+Boolean + |
+Indicates whether the scheduled CU change is activated. The default value is true, indicating that the change is activated. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
queue_name + |
+No + |
+String + |
+Name of the queue for which the scheduled CU change is to be modified. The name contains 1 to 128 characters. Use commas (,) to separate multiple queue names. + |
+
plan_id + |
+No + |
+String + |
+ID of scheduled CU change to be modified. Use commas (,) to separate multiple IDs. + |
+
{ + "plan_name": "plan_Ad", + "target_cu": 64, + "start_hour": 19, + "start_minute": 30, + "repeat_day": ["THURSDAY","friday"], + "activate": false +}+
{ + "is_success": true, + "message": "", + "queue_name": "queue1", + "plan_id": 3 +}+
Table 4 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+ +Error Code + |
+Error Message + |
+
---|---|
DLI.0999 + |
+Queue plans create failed. The plan plan_A can not generate a scale plan, please check all time +settings for the plan. + |
+
This API is used to obtain the job execution progress. If a job is being executed, information about its subjobs can be obtained. If a job has just started or has ended, information about its subjobs cannot be obtained.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
job_id + |
+Yes + |
+String + |
+Job ID + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+Yes + |
+Boolean + |
+Indicates whether the request is successfully sent. Value true indicates that the request is successfully sent. + |
+
message + |
+Yes + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
job_id + |
+No + |
+String + |
+ID of a job returned after a job is generated and submitted by using SQL statements. The job ID can be used to query the job status and results. + |
+
status + |
+Yes + |
+String + |
+Job status. The status can be RUNNING, SCALING, LAUNCHING, FINISHED, FAILED, or CANCELLED. + |
+
sub_job_id + |
+No + |
+Integer + |
+ID of a subjob that is running. If the subjob is not running or it is already finished, the subjob ID may be empty. + |
+
progress + |
+No + |
+Double + |
+Progress of a running subjob or the entire job. The value can only be a rough estimate of the subjob progress and does not indicate the detailed job progress. +
|
+
sub_jobs + |
+No + |
+Array of Object + |
+Details about a subjob of a running job. A job may contain multiple subjobs. For details, see Table 3. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
id + |
+No + |
+Integer + |
+Subjob ID, corresponding to jobId of the open-source spark JobData. + |
+
name + |
+No + |
+String + |
+Subjob name, corresponding to the name of the open-source spark JobData. + |
+
description + |
+No + |
+String + |
+Description of a subjob, corresponding to the description of the open-source spark JobData. + |
+
submission_time + |
+No + |
+String + |
+Submission time of a subjob, corresponding to the submissionTime of open-source Spark JobData. + |
+
completion_time + |
+No + |
+String + |
+Completion time of a subjob, corresponding to the completionTime of the open-source Spark JobData. + |
+
stage_ids + |
+No + |
+Array of Integer + |
+Stage ID of the subjob, corresponding to the stageIds of the open-source spark JobData. + |
+
job_group + |
+No + |
+String + |
+ID of a DLI job, corresponding to the jobGroup of open-source Spark JobData. + |
+
status + |
+No + |
+String + |
+Subjob status, corresponding to the status of open-source spark JobData. + |
+
num_tasks + |
+No + |
+Integer + |
+Number of subjobs, corresponding to numTasks of the open-source Spark JobData. + |
+
num_active_tasks + |
+No + |
+Integer + |
+Number of running tasks in a subjob, corresponding to numActiveTasks of the open-source Spark JobData. + |
+
num_completed_tasks + |
+No + |
+Integer + |
+Number of tasks that have been completed in a subjob, corresponding to numCompletedTasks of open-source Spark JobData. + |
+
num_skipped_tasks + |
+No + |
+Integer + |
+Number of tasks skipped in a subjob, corresponding to numSkippedTasks of open-source Spark JobData. + |
+
num_failed_tasks + |
+No + |
+Integer + |
+Number of subtasks that fail to be skipped, corresponding to numFailedTasks of open-source Spark JobData. + |
+
num_killed_tasks + |
+No + |
+Integer + |
+Number of tasks killed in the subjob, corresponding to numKilledTasks of the open-source Spark JobData. + |
+
num_completed_indices + |
+No + |
+Integer + |
+Subjob completion index, corresponding to the numCompletedIndices of the open-source Spark JobData. + |
+
num_active_stages + |
+No + |
+Integer + |
+Number of stages that are running in the subjob, corresponding to numActiveStages of the open-source Spark JobData. + |
+
num_completed_stages + |
+No + |
+Integer + |
+Number of stages that have been completed in the subjob, corresponding to numCompletedStages of the open-source Spark JobData. + |
+
num_skipped_stages + |
+No + |
+Integer + |
+Number of stages skipped in the subjob, corresponding to numSkippedStages of the open-source Spark JobData. + |
+
num_failed_stages + |
+No + |
+Integer + |
+Number of failed stages in a subjob, corresponding to numFailedStages of the open-source Spark JobData. + |
+
killed_tasks_summary + |
+No + |
+Map<string,integer> + |
+Summary of the killed tasks in the subjob, corresponding to killedTasksSummary of open-source spark JobData. + |
+
None
+{ + "is_success": true, + "message": "", + "job_id": "85798b38-ae44-48eb-bb90-7cf0dcdafe7b", + "status": "RUNNING", + "sub_job_id": 0, + "progress": 0, + "sub_jobs": [ + { + "id": 0, + "name": "runJob at FileFormatWriter.scala:266", + "submission_time": "Mon Jul 27 17:24:03 CST 2020", + "stage_ids": [ + 0 + ], + "job_group": "85798b38-ae44-48eb-bb90-7cf0dcdafe7b", + "status": "RUNNING", + "num_tasks": 1, + "num_active_tasks": 1, + "num_completed_tasks": 0, + "num_skipped_tasks": 0, + "num_failed_tasks": 0, + "num_killed_tasks": 0, + "num_completed_indices": 0, + "num_active_stages": 1, + "num_completed_stages": 0, + "num_skipped_stages": 0, + "num_failed_stages": 0 + } + ] +}+
Table 4 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+ +Error Code + |
+Error Message + |
+
---|---|
DLI.0999 + |
+The queue backend version is too old or the queue is busy. + |
+
This API is used to obtain the agency information of a DLI user.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
version + |
+No + |
+String + |
+Agency version information. + |
+
current_roles + |
+No + |
+Array of Strings + |
+Role. The supported values are as follows: +obs_adm: Administrator permissions for accessing and using the Object Storage Service. +dis_adm: Administrator permissions for using Data Ingestion Service data as the data source +ctable_adm: Administrator permissions for accessing and using the CloudTable service +vpc_netadm: Administrator permissions for using the Virtual Private Cloud service +smn_adm: Administrator permissions for using the Simple Message Notification service +te_admin: Tenant Administrator permissions + |
+
None
+{ + "is_success": true, + "message": "", + "version": "v2", + "current_roles": [ + "ctable_adm", + "vpc_netadm", + "ief_adm", + "dis_adm", + "smn_adm", + "obs_adm" + ] +}+
Table 3 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+ +Error Code + |
+Error Message + |
+
---|---|
DLI.0002 + |
+The object does not exist. + |
+
DLI.0999 + |
+An internal error occurre + |
+
This API is used to create an agency for a DLI user.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
roles + |
+Yes + |
+Array of Strings + |
+Role. Currently, only obs_adm, dis_adm, ctable_adm, vpc_netadm, smn_adm, and te_admin are supported. +obs_adm: Administrator permissions for accessing and using the Object Storage Service. +dis_adm: Administrator permissions for using Data Ingestion Service data as the data source +ctable_adm: Administrator permissions for accessing and using the CloudTable service +vpc_netadm: Administrator permissions for using the Virtual Private Cloud service +smn_adm: Administrator permissions for using the Simple Message Notification service +te_admin: Tenant Administrator permissions + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
{ + "roles": [ + "ctable_adm", + "vpc_netadm", + "dis_adm", + "smn_adm", + "obs_adm" + ] +}+
{ + "is_success": true, + "message": "" +}+
Table 4 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This section describes how to create and query a queue using APIs.
+URI format: POST /v1.0/{project_id}/queues
+{ + "queue_name": "queue1", + "description": "test", + "cu_count": 16, + "resource_mode": 1, + "queue_type": "sql" +}+
{ + "is_success": true, + "message": "", + "queue_name": "queue1" +}+
URI format: GET /v1.0/{project_id}/queues/{queue_name}
+{}+
{ + "is_success": true, + "message": "", + "owner": "testuser", + "description": "", + "queue_name": "queue1", + "create_time": 1587613028851, + "queue_type": "sql", + "cu_count": 16, + "resource_id": "03d51b88-db63-4611-b779-9a72ba0cf58b", + "resource_mode": 0 +}+
This section describes how to create and query SQL jobs using APIs.
+URI format: POST /v1.0/{project_id}/databases
+{ + "database_name": "db1", + "description": "this is for test" +}+
{ + "is_success": true, + "message": "" +}+
URI format: POST /v1.0/{project_id}/databases/{database_name}/tables
+{ + "table_name": "tb1", + "data_location": "OBS", + "description": "", + "data_type": "csv", + "data_path": "obs://obs/path1/test.csv", + "columns": [ + { + "column_name": "column1", + "type": "string", + "description": "", + "is_partition_column": true + }, + { + "column_name": "column2", + "type": "string", + "description": "", + "is_partition_column": false + } + ], + "with_column_header": true, + "delimiter": ",", + "quote_char": "\"", + "escape_char": "\\", + "date_format": "yyyy-MM-dd", + "timestamp_format": "yyyy-MM-dd HH:mm:ss" +}+
{ + "is_success": true, + "message": "" +}+
URI format: POST /v1.0/{project_id}/jobs/submit-job
+{ + "currentdb": "db1", + "sql": "select * from tb1 limit 10", + "queue_name": "queue1" +}+
{ + "is_success": true, + "message": "", + "job_id":""95fcc908-9f1b-446c-8643-5653891d9fd9", + "job_type": "QUERY", + "job_mode": "async" +}+
This section describes how to create and submit Spark jobs using APIs.
+URI format: POST /v2.0/{project_id}/resources
+{ + "paths": [ + "https://test.obs.xxx.com/txr_test/jars/spark-sdv-app.jar" + ], + "kind": "jar", + "group": "gatk", + "is_async":"true" +}+
{ + "group_name": "gatk", + "status": "READY", + "resources": [ + "spark-sdv-app.jar", + "wordcount", + "wordcount.py" + ], + "details": [ + { + "create_time": 0, + "update_time": 0, + "resource_type": "jar", + "resource_name": "spark-sdv-app.jar", + "status": "READY", + "underlying_name": "987e208d-d46e-4475-a8c0-a62f0275750b_spark-sdv-app.jar" + }, + { + "create_time": 0, + "update_time": 0, + "resource_type": "jar", + "resource_name": "wordcount", + "status": "READY", + "underlying_name": "987e208d-d46e-4475-a8c0-a62f0275750b_wordcount" + }, + { + "create_time": 0, + "update_time": 0, + "resource_type": "jar", + "resource_name": "wordcount.py", + "status": "READY", + "underlying_name": "987e208d-d46e-4475-a8c0-a62f0275750b_wordcount.py" + } + ], + "create_time": 1551334579654, + "update_time": 1551345369070 +}+
URI format: GET /v2.0/{project_id}/resources/{resource_name}
+{}+
{ + "create_time": 1522055409139, + "update_time": 1522228350501, + "resource_type": "jar", + "resource_name": "luxor-router-1.1.1.jar", + "status": "uploading", + "underlying_name": "7885d26e-c532-40f3-a755-c82c442f19b8_luxor-router-1.1.1.jar", + "owner": "****" +}+
URI format: POST /v2.0/{project_id}/batches
+{ + "sc_type": "A", + "jars": [ + +"spark-examples_2.11-2.1.0.luxor.jar" + ], + "driverMemory": "1G", + "driverCores": 1, + "executorMemory": "1G", + "executorCores": 1, + "numExecutors": 1, + "queue": "cce_general", + "file": +"spark-examples_2.11-2.1.0.luxor.jar", + "className": +"org.apache.spark.examples.SparkPi", + "minRecoveryDelayTime": 10000, + "maxRetryTimes": 20 +}+
{ + "id": "07a3e4e6-9a28-4e92-8d3f-9c538621a166", + "appId": "", + "name": "", + "owner": "test1", + "proxyUser": "", + "state": "starting", + "kind": "", + "log": [], + "sc_type": "CUSTOMIZED", + "cluster_name": "aaa", + "queue": "aaa", + "create_time": 1607589874156, + "update_time": 1607589874156 +}+
URI format: GET /v2.0/{project_id}/batches/{batch_id}/state
+{}+
{ + "id":"0a324461-d9d9-45da-a52a-3b3c7a3d809e", + "state":"Success" +}+
URI format: GET /v2.0/{project_id}/batches/{batch_id}/log
+{}+
{ + "id": "0a324461-d9d9-45da-a52a-3b3c7a3d809e", + "from": 0, + "total": 3, + "log": [ + "Detailed information about job logs" + ] +}+
This section describes how to create and run a user-defined Flink job using APIs.
+URI format: POST /v1.0/{project_id}/streaming/flink-jobs
+{ + "name": "test", + "desc": "job for test", + "queue_name": "testQueue", + "manager_cu_number": 1, + "cu_number": 2, + "parallel_number": 1, + "tm_cus": 1, + "tm_slot_num": 1, + "log_enabled": true, + "obs_bucket": "bucketName", + "smn_topic": "topic", + "main_class": "org.apache.flink.examples.streaming.JavaQueueStream", + "restart_when_exception": false, + "entrypoint": "javaQueueStream.jar", + "entrypoint_args":"-windowSize 2000 -rate3", + "dependency_jars": [ + "myGroup/test.jar", + "myGroup/test1.jar" + ], + "dependency_files": [ + "myGroup/test.csv", + "myGroup/test1.csv" + ] +}+
{ + "is_success": true, + "message": "A Flink job is created successfully.", + "job": { + "job_id": 138, + "status_name": "job_init", + "status_desc": "" + } +}+
URI format: POST /v1.0/{project_id}/streaming/jobs/run
+{ + "job_ids": [131,130,138,137], + "resume_savepoint": true +}+
[ + { + "is_success": "true", + "message": "The request for submitting DLI jobs is delivered successfully." + }, + { + "is_success": "true", + "message": "The request for submitting DLI jobs is delivered successfully." + }, + { + "is_success": "true", + "message": "The request for submitting DLI jobs is delivered successfully." + }, + { + "is_success": "true", + "message": "The request for submitting DLI jobs is delivered successfully." + } +]+
This section describes how to create an enhanced datasource connection using an API.
+URI format: POST /v2.0/{project_id}/datasource/enhanced-connections
+{ + "name": "test1", + "dest_vpc_id": "22094d8f-c310-4621-913d-4c4d655d8495", + "dest_network_id": "78f2562a-36e4-4b39-95b9-f5aab22e1281", + "queues": ["q1","q2"], + "hosts": [ + { + "ip":"192.168.0.1", + "name":"ecs-97f8-0001" + }, + { + "ip":"192.168.0.2", + "name":"ecs-97f8-0002" + } + ] +}+
{ + "is_success": true, + "message": "", + "connection_id": "2a620c33-5609-40c9-affd-2b6453071b0f" +}+
URI format: GET /v2.0/{project_id}/datasource/enhanced-connections/{connection_id}
+{}+
{ + "is_success": true, + "message": "", + "name": "test1", + "id": "2a620c33-5609-40c9-affd-2b6453071b0f", + "available_queue_info": [ + { + "status": "ACTIVE", + "name": "queue1", + "peer_id": "2a620c33-5609-40c9-affd-2b6453071b0f", + "err_msg": "", + "update_time": 1566889577861 + } + ], + "dest_vpc_id": "22094d8f-c310-4621-913d-4c4d655d8495", + "dest_network_id": "78f2562a-36e4-4b39-95b9-f5aab22e1281", + "isPrivis": true, + "create_time": 1566888011125, + "status": "ACTIVE", + "hosts": [ + { + "ip":"192.168.0.1", + "name":"ecs-97f8-0001" + }, + { + "ip":"192.168.0.2", + "name":"ecs-97f8-0002" + } + ] +}+
This API is used to view the job execution result after a job is executed using SQL query statements. Currently, you can only query execution results of jobs of the QUERY type.
+This API can be used to view only the first 1000 result records and does not support pagination query. To view all query results, you need to export the query results first. For details, see Exporting Query Results.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
job_id + |
+Yes + |
+String + |
+Job ID + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
page-size + |
+No + |
+Long + |
+Number of result rows. The value ranges from 1 to 1000. The default rate limit is 1000. + |
+
queue-name + |
+No + |
+String + |
+Name of the execution queue for obtaining job results. If this parameter is not specified, the default system queue is used. + |
+
The following is an example of the URL containing the query parameter:
+GET /v1.0/{project_id}/jobs/{job_id}/preview?page-size={size}&queue-name={queue_name}
+None
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+No + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+No + |
+String + |
+System prompt. If execution succeeds, the parameter setting may be left blank. + |
+
job_id + |
+No + |
+String + |
+Job ID You can get the value by calling Submitting a SQL Job (Recommended). + |
+
job_type + |
+No + |
+String + |
+Job type, including DDL, DCL, IMPORT, EXPORT, QUERY, INSERT, DATA_MIGRATION, UPDATE, DELETE, RESTART_QUEUE and SCALE_QUEUE. +Currently, you can only query execution results of jobs of the QUERY type. + |
+
row_count + |
+No + |
+Integer + |
+Total number of job results. + |
+
input_size + |
+No + |
+long + |
+Amount of data scanned during job execution. + |
+
schema + |
+No + |
+Array of Objects + |
+Name and type of the job result column. + |
+
rows + |
+No + |
+Array of Strings + |
+Job results set. + |
+
None
+{ + "is_success": true, + "message": "", + "job_id": "ead0b276-8ed4-4eb5-b520-58f1511e7033", + "job_type": "QUERY", + "row_count": 1, + "input_size": 74, + "schema": [ + { + "c1": "int" + }, + { + "c2": "string" + } + ], + "rows": [ + [ + 23, + "sda" + ] + ] +}+
Table 4 describes the status code.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+This API is used to generate a static stream graph for a Flink SQL job.
+Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
project_id + |
+Yes + |
+String + |
+Project ID, which is used for resource isolation. For details about how to obtain its value, see Obtaining a Project ID. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
sql_body + |
+Yes + |
+String + |
+SQL + |
+
cu_number + |
+No + |
+Integer + |
+Total number of CUs. + |
+
manager_cu_number + |
+No + |
+Integer + |
+Number of CUs of the management unit. + |
+
parallel_number + |
+No + |
+Integer + |
+Maximum degree of parallelism. + |
+
tm_cus + |
+No + |
+Integer + |
+Number of CUs in a taskManager. + |
+
tm_slot_num + |
+No + |
+Integer + |
+Number of slots in a taskManager. + |
+
operator_config + |
+No + |
+String + |
+Operator configurations. + |
+
static_estimator + |
+No + |
+Boolean + |
+Whether to estimate static resources. + |
+
job_type + |
+No + |
+String + |
+Job types. Only flink_opensource_sql_job job is supported. + |
+
graph_type + |
+No + |
+String + |
+Stream graph type. Currently, the following two types of stream graphs are supported: +
|
+
static_estimator_config + |
+No + |
+String + |
+Traffic or hit ratio of each operator, which is a character string in JSON format. + |
+
Parameter + |
+Mandatory + |
+Type + |
+Description + |
+
---|---|---|---|
is_success + |
+Yes + |
+Boolean + |
+Indicates whether the request is successfully executed. Value true indicates that the request is successfully executed. + |
+
message + |
+Yes + |
+String + |
+System prompt. If execution succeeds, the message may be left blank. + |
+
error_code + |
+Yes + |
+String + |
+Error codes. + |
+
stream_graph + |
+Yes + |
+String + |
+Description of a static stream graph. + |
+
{ + "cu_number": 4, + "manager_cu_number": 1, + "parallel_number": 4, + "tm_cus": 1, + "tm_slot_num": 1, + "sql_body": "", + "operator_config": "", + "static_estimator": true, + "job_type": "flink_opensource_sql_job", + "graph_type": "job_graph" + }+
{ + "is_success": true, + "message": "", + "error_code": "", + "stream_graph": "{\n \"nodes\" : [ {\n \"id\" : 1,\n \"operator_id\" : \"bc764cd8ddf7a0cff126f51c16239658\",\n \"type\" : \"Source\",\n + \"contents\" : \"kafkaSource\",\n \"parallelism\" : 1\n }, {\n \"id\" : 2,\n \"operator_id\" : \"0a448493b4782967b150582570326227\",\n \"type\" : \"select\",\n \"contents\" : \"car_id, car_owner, car_brand, car_speed\",\n \"parallelism\" : 1,\n \"predecessors\" : [ {\n \"id\" : 1\n } ]\n }, {\n \"id\" : 4,\n \"operator_id\" : \"6d2677a0ecc3fd8df0b72ec675edf8f4\",\n \"type\" : \"Sink\",\n \"contents\" : \"kafkaSink\",\n \"parallelism\" : 1,\n \"predecessors\" : [ {\n \"id\" : 2\n } ]\n } ]\n}" +}+
Table 4 describes status codes.
+ + +If an error occurs when this API is invoked, the system does not return the result similar to the preceding example, but returns the error code and error information. For details, see Error Code.
+Public cloud APIs comply with the RESTful API design principles. REST-based Web services are organized into resources. Each resource is identified by one or more Uniform Resource Identifiers (URIs). An application accesses a resource based on the resource's Unified Resource Locator (URL). A URL is usually in the following format: https://Endpoint/uri. In the URL, uri indicates the resource path, that is, the API access path.
+Public cloud APIs use HTTPS as the transmission protocol. Requests/Responses are transmitted by using JSON messages, with media type represented by Application/json.
+For details about how to use APIs, see API Usage Guidelines.
+