diff --git a/docs/lts/umn/ALL_META.TXT.json b/docs/lts/umn/ALL_META.TXT.json
index c668ea15..8c8af207 100644
--- a/docs/lts/umn/ALL_META.TXT.json
+++ b/docs/lts/umn/ALL_META.TXT.json
@@ -39,10 +39,30 @@
"title":"Basic Functions",
"githuburl":""
},
+ {
+ "uri":"lts-0740.html",
+ "product_code":"",
+ "code":"5",
+ "des":"This section describes the restrictions on LTS log read/write.",
+ "doc_type":"",
+ "kw":"Usage Restrictions,Service Overview,User Guide",
+ "title":"Usage Restrictions",
+ "githuburl":""
+ },
+ {
+ "uri":"lts-03205.html",
+ "product_code":"lts",
+ "code":"6",
+ "des":"If you need to assign different permissions to employees in your enterprise to access your LTS resources, is a good choice for fine-grained permissions management. IAM p",
+ "doc_type":"productdesc",
+ "kw":"Permissions Management,Service Overview,User Guide",
+ "title":"Permissions Management",
+ "githuburl":""
+ },
{
"uri":"lts_01_0005.html",
"product_code":"lts",
- "code":"5",
+ "code":"7",
"des":"LTS provides a platform to store and analyze log data for Virtual Private Cloud (VPC). After VPC is associated with a log group and log stream in LTS, Network Interface C",
"doc_type":"productdesc",
"kw":"Related Services,Service Overview,User Guide",
@@ -52,7 +72,7 @@
{
"uri":"lts_01_0008.html",
"product_code":"lts",
- "code":"6",
+ "code":"8",
"des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
"doc_type":"usermanual",
"kw":"Getting Started",
@@ -62,7 +82,7 @@
{
"uri":"lts_01_0009.html",
"product_code":"lts",
- "code":"7",
+ "code":"9",
"des":"A Virtual Private Cloud (VPC) flow log captures information about the traffic going to and from your VPC. You can use flow logs to monitor network traffic, analyze networ",
"doc_type":"usermanual",
"kw":"Configuring the VPC Flow Log Function,Getting Started,User Guide",
@@ -72,7 +92,7 @@
{
"uri":"lts_01_0010.html",
"product_code":"lts",
- "code":"8",
+ "code":"10",
"des":"This section describes how to enable trace analysis on the Cloud Trace Service (CTS) console to report traces to LTS, so you can query the traces on the LTS console.Opera",
"doc_type":"usermanual",
"kw":"Configuring CTS to Upload Traces to LTS,Getting Started,User Guide",
@@ -80,9 +100,9 @@
"githuburl":""
},
{
- "uri":"lts_04_0002.html",
+ "uri":"lts_04_1053.html",
"product_code":"",
- "code":"9",
+ "code":"11",
"des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
"doc_type":"",
"kw":"Log Management",
@@ -90,169 +110,369 @@
"githuburl":""
},
{
- "uri":"lts_04_0003.html",
+ "uri":"lts_04_1153.html",
"product_code":"",
- "code":"10",
- "des":"A log group is a group of log streams which share the same log retention settings. Up to 100 log groups can be created for a single account.Log in to the LTS console, cho",
+ "code":"12",
+ "des":"The LTS console provides resource statistics, your favorite log streams/favorite log streams (local cache), and recently visited log streams.This area shows the read/writ",
"doc_type":"",
+ "kw":"LTS Console,Log Management,User Guide",
+ "title":"LTS Console",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_04_1154.html",
+ "product_code":"",
+ "code":"13",
+ "des":"Log resource statistics are classified into read/write traffic, index traffic, and log volume. The statistics are for reference only. You can also visualize log resource ",
+ "doc_type":"",
+ "kw":"Resource Statistics,Log Management,User Guide",
+ "title":"Resource Statistics",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_04_0003.html",
+ "product_code":"lts",
+ "code":"14",
+ "des":"A log group is a group of log streams. Up to 100 log groups can be created for a single account.Log groups can be created in two ways. They are automatically created when",
+ "doc_type":"usermanual",
"kw":"Managing Log Groups,Log Management,User Guide",
"title":"Managing Log Groups",
"githuburl":""
},
{
"uri":"lts_04_0004.html",
- "product_code":"",
- "code":"11",
+ "product_code":"lts",
+ "code":"15",
"des":"A log stream is the basic unit for reading and writing logs. Sorting logs into different log streams makes it easier to find specific logs when you need them.Up to 100 lo",
- "doc_type":"",
+ "doc_type":"usermanual",
"kw":"Managing Log Streams,Log Management,User Guide",
"title":"Managing Log Streams",
"githuburl":""
},
{
- "uri":"lts_04_0005.html",
+ "uri":"lts_02_0030.html",
"product_code":"lts",
- "code":"12",
- "des":"ICAgent collects logs from hosts based on your specified collection rules, and packages and sends the collected log data to LTS on a log-stream basis. You can view logs o",
+ "code":"16",
+ "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
"doc_type":"usermanual",
- "kw":"Configuring Log Collection Rules,Log Management,User Guide",
- "title":"Configuring Log Collection Rules",
+ "kw":"Log Ingestion",
+ "title":"Log Ingestion",
"githuburl":""
},
{
- "uri":"lts_04_0007.html",
+ "uri":"lts_04_0105.html",
+ "product_code":"lts",
+ "code":"17",
+ "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
+ "doc_type":"usermanual",
+ "kw":"Collecting Logs from Cloud Services",
+ "title":"Collecting Logs from Cloud Services",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_04_0511.html",
"product_code":"",
- "code":"13",
+ "code":"18",
+ "des":"LTS can collect logs from Cloud Container Engine (CCE).ICAgent has been installed and added to the host group.You have disabled Output to AOM.CCE cluster nodes whose cont",
+ "doc_type":"",
+ "kw":"Collecting Logs from CCE,Collecting Logs from Cloud Services,User Guide",
+ "title":"Collecting Logs from CCE",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_04_1031.html",
+ "product_code":"",
+ "code":"19",
+ "des":"ICAgent collects logs from hosts based on your specified collection rules, and packages and sends the collected log data to LTS on a log stream basis. You can view logs o",
+ "doc_type":"",
+ "kw":"Collecting Logs from ECS,Collecting Logs from Cloud Services,User Guide",
+ "title":"Collecting Logs from ECS",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_02_1032.html",
+ "product_code":"",
+ "code":"20",
"des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
"doc_type":"",
- "kw":"Log View",
- "title":"Log View",
+ "kw":"Host Management",
+ "title":"Host Management",
"githuburl":""
},
{
- "uri":"lts_04_0008.html",
+ "uri":"lts_02_1033.html",
"product_code":"",
- "code":"14",
- "des":"You can view the logs reported to the LTS console in real time.You have created log groups and log streams.You have installed ICAgent.You have configured log collection r",
+ "code":"21",
+ "des":"Host groups allow you to configure host log ingestion efficiently. You can sort multiple hosts to a host group and associate the host group with log ingestion configurati",
"doc_type":"",
- "kw":"Viewing Real-Time Logs,Log View,User Guide",
- "title":"Viewing Real-Time Logs",
- "githuburl":""
- },
- {
- "uri":"lts_04_0009.html",
- "product_code":"",
- "code":"15",
- "des":"Follow the directions below to search logs by keyword and time range:On the LTS console, click Log Management.In the log group list, click the name of a log group.In the ",
- "doc_type":"",
- "kw":"Log Search,Log View,User Guide",
- "title":"Log Search",
- "githuburl":""
- },
- {
- "uri":"lts_04_0010.html",
- "product_code":"",
- "code":"16",
- "des":"To search for logs using a keyword repeatedly, perform the following operations to configure quick search.Log in to the LTS console and choose Log Management.In the log g",
- "doc_type":"",
- "kw":"Quick Search,Log View,User Guide",
- "title":"Quick Search",
- "githuburl":""
- },
- {
- "uri":"lts_04_0041.html",
- "product_code":"",
- "code":"17",
- "des":"You can transfer logs to OBS to keep logs for a long time.Local log files are cleared periodically, but the logs transferred to OBS will not be affected.To transfer logs,",
- "doc_type":"",
- "kw":"Log Transfer,User Guide",
- "title":"Log Transfer",
+ "kw":"Managing Host Groups,Host Management,User Guide",
+ "title":"Managing Host Groups",
"githuburl":""
},
{
"uri":"lts_04_0012.html",
- "product_code":"lts",
- "code":"18",
+ "product_code":"",
+ "code":"22",
"des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
- "doc_type":"usermanual",
- "kw":"Agent Management",
- "title":"Agent Management",
+ "doc_type":"",
+ "kw":"Managing Hosts",
+ "title":"Managing Hosts",
"githuburl":""
},
{
"uri":"lts_02_0013.html",
- "product_code":"",
- "code":"19",
- "des":"ICAgent is a log collection tool for LTS. If you use LTS to collect logs from a host running Linux OS, you need to install ICAgent on the host.Ensure that the time and ti",
- "doc_type":"",
- "kw":"Installing ICAgent (Linux),Agent Management,User Guide",
- "title":"Installing ICAgent (Linux)",
+ "product_code":"lts",
+ "code":"23",
+ "des":"ICAgent is a log collection tool for LTS. To use LTS to collect logs from hosts, you need to install ICAgent on the hosts.Ensure that the time and time zone of your local",
+ "doc_type":"usermanual",
+ "kw":"Installing ICAgent,Managing Hosts,User Guide",
+ "title":"Installing ICAgent",
"githuburl":""
},
{
"uri":"lts_02_0014.html",
- "product_code":"",
- "code":"20",
+ "product_code":"lts",
+ "code":"24",
"des":"To deliver a better collection experience, LTS regularly upgrades ICAgent. When LTS prompts you that a new ICAgent version is available, you can follow the directions her",
- "doc_type":"",
- "kw":"Upgrading ICAgent,Agent Management,User Guide",
+ "doc_type":"usermanual",
+ "kw":"Upgrading ICAgent,Managing Hosts,User Guide",
"title":"Upgrading ICAgent",
"githuburl":""
},
{
"uri":"lts_02_0015.html",
- "product_code":"",
- "code":"21",
- "des":"If ICAgent is uninstalled from a host, log collection will be affected. Exercise caution when performing this operation.Uninstalling ICAgent does not delete the installat",
- "doc_type":"",
- "kw":"Uninstalling ICAgent,Agent Management,User Guide",
+ "product_code":"lts",
+ "code":"25",
+ "des":"If ICAgent is uninstalled from a host, log collection will be affected. Exercise caution when performing this operation.Only ICAgent installed on Linux hosts can be unins",
+ "doc_type":"usermanual",
+ "kw":"Uninstalling ICAgent,Managing Hosts,User Guide",
"title":"Uninstalling ICAgent",
"githuburl":""
},
{
"uri":"lts_04_0013.html",
"product_code":"lts",
- "code":"22",
+ "code":"26",
"des":"The following table lists the ICAgent statuses.",
"doc_type":"usermanual",
- "kw":"ICAgent Statuses,Agent Management,User Guide",
+ "kw":"ICAgent Statuses,Managing Hosts,User Guide",
"title":"ICAgent Statuses",
"githuburl":""
},
{
- "uri":"lts_04_0017.html",
+ "uri":"lts_05_0004.html",
+ "product_code":"lts",
+ "code":"27",
+ "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
+ "doc_type":"usermanual",
+ "kw":"Log Search and View",
+ "title":"Log Search and View",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_05_0005.html",
"product_code":"",
- "code":"23",
+ "code":"28",
+ "des":"Follow the directions below to search logs by keyword and time range:On the LTS console, choose Log Management in the navigation pane on the left.In the log group list, c",
+ "doc_type":"",
+ "kw":"Log Search,Log Search and View,User Guide",
+ "title":"Log Search",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_05_0006.html",
+ "product_code":"lts",
+ "code":"29",
+ "des":"You can view reported logs on the LTS console in real time.You have created log groups and log streams.You have installed ICAgent.You have configured log collection rules",
+ "doc_type":"usermanual",
+ "kw":"Viewing Real-Time Logs,Log Search and View,User Guide",
+ "title":"Viewing Real-Time Logs",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_05_0009.html",
+ "product_code":"lts",
+ "code":"30",
+ "des":"To search for logs using a keyword repeatedly, perform the following operations to configure quick search.On the LTS console, choose Log Management in the navigation pane",
+ "doc_type":"usermanual",
+ "kw":"Quick Search,Log Search and View,User Guide",
+ "title":"Quick Search",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_05_0007.html",
+ "product_code":"lts",
+ "code":"31",
+ "des":"Monitoring keywords in logs helps you keep track of system performance and services. For example, the number of ERROR keywords indicates the system health, and the number",
+ "doc_type":"usermanual",
+ "kw":"Quick Analysis,Log Search and View,User Guide",
+ "title":"Quick Analysis",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_04_0002.html",
+ "product_code":"lts",
+ "code":"32",
+ "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
+ "doc_type":"usermanual",
+ "kw":"Log Analysis",
+ "title":"Log Analysis",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_0821.html",
+ "product_code":"lts",
+ "code":"33",
+ "des":"Log data can be structured or unstructured. Structured data is quantitative data or can be defined by unified data models. It has a fixed length and format. Unstructured ",
+ "doc_type":"usermanual",
+ "kw":"Log Structuring,Log Analysis,User Guide",
+ "title":"Log Structuring",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_04_0040.html",
+ "product_code":"lts",
+ "code":"34",
+ "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
+ "doc_type":"usermanual",
+ "kw":"Log Transfer",
+ "title":"Log Transfer",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_04_0011.html",
+ "product_code":"lts",
+ "code":"35",
+ "des":"Logs reported from hosts and cloud services are retained in LTS for seven days by default. You can set the retention period to be 1 to 30 days. Retained logs are deleted ",
+ "doc_type":"usermanual",
+ "kw":"Overview,Log Transfer,User Guide",
+ "title":"Overview",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_04_0041.html",
+ "product_code":"lts",
+ "code":"36",
+ "des":"You can transfer logs to OBS and download log files from the OBS console.To transfer logs, you must have the OBS Administrator permissions apart from the LTS permissions.",
+ "doc_type":"usermanual",
+ "kw":"Transferring Logs to OBS,Log Transfer,User Guide",
+ "title":"Transferring Logs to OBS",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_04_0043.html",
+ "product_code":"lts",
+ "code":"37",
+ "des":"You can use DMS APIs to retrieve logs in real time.Logs have been ingested to LTS.Before registering a DMS Kafka instance, configure an inbound rule to allow access from ",
+ "doc_type":"usermanual",
+ "kw":"Transferring Logs to DMS,Log Transfer,User Guide",
+ "title":"Transferring Logs to DMS",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_06_0003.html",
+ "product_code":"",
+ "code":"38",
"des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
"doc_type":"",
- "kw":"Appendixes",
- "title":"Appendixes",
+ "kw":"Configuration Center",
+ "title":"Configuration Center",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_05_0003.html",
+ "product_code":"",
+ "code":"39",
+ "des":"To reduce the memory, database, and disk space usage, you can set log collection as required. The log collection switch is used to determine whether to collect log data.T",
+ "doc_type":"",
+ "kw":"Log Collection,Configuration Center,User Guide",
+ "title":"Log Collection",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_04_0015.html",
+ "product_code":"lts",
+ "code":"40",
+ "des":"This chapter describes how to use Identity and Access Management (IAM) for fine-grained permissions control for your LTS. With IAM, you can:Create IAM users for personnel",
+ "doc_type":"usermanual",
+ "kw":"Permissions Management,User Guide",
+ "title":"Permissions Management",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_04_0058.html",
+ "product_code":"",
+ "code":"41",
+ "des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
+ "doc_type":"",
+ "kw":"FAQs",
+ "title":"FAQs",
"githuburl":""
},
{
"uri":"lts_03_0015.html",
"product_code":"lts",
- "code":"24",
- "des":"Log in to the console, hover your cursor over your username in the upper right corner, and select My Credentials.Click the Access Keys tab.Click Create Access Key, and en",
+ "code":"42",
+ "des":"Obtain and use the AK/SK of a public account.Log in to the console, hover your cursor over your username in the upper right corner, and select My Credentials.Click the Ac",
"doc_type":"usermanual",
- "kw":"How Do I Obtain an AK/SK Pair?,Appendixes,User Guide",
+ "kw":"How Do I Obtain an AK/SK Pair?,FAQs,User Guide",
"title":"How Do I Obtain an AK/SK Pair?",
"githuburl":""
},
{
- "uri":"lts_03_0002.html",
+ "uri":"lts_faq_0044.html",
"product_code":"lts",
- "code":"25",
- "des":"When installing ICAgent, you can create an IAM agency, and ICAgent will automatically obtain an AK/SK pair and generate the ICAgent installation command.Log in to the con",
+ "code":"43",
+ "des":"No. During log transfer, logs are \"replicated\" to OBS buckets. To view transferred log files, click the name of the corresponding OBS bucket on the Log Transfer page of t",
"doc_type":"usermanual",
- "kw":"How Do I Install ICAgent by Creating an Agency?,Appendixes,User Guide",
- "title":"How Do I Install ICAgent by Creating an Agency?",
+ "kw":"Does LTS Delete Logs That Have Been Transferred to OBS Buckets?,FAQs,User Guide",
+ "title":"Does LTS Delete Logs That Have Been Transferred to OBS Buckets?",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_faq_0314.html",
+ "product_code":"lts",
+ "code":"44",
+ "des":"The OBS bucket used for log transfer has been deleted. Specify another bucket.Access control on the OBS bucket is incorrectly configured. Go to the OBS console to correct",
+ "doc_type":"usermanual",
+ "kw":"What Are the Common Causes of Abnormal Log Transfer?,FAQs,User Guide",
+ "title":"What Are the Common Causes of Abnormal Log Transfer?",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_faq_0610.html",
+ "product_code":"lts",
+ "code":"45",
+ "des":"When Cloud Trace Service (CTS) is connected to LTS, a log group and log stream are automatically created for CTS on the LTS console. To transfer CTS logs to OBS, do as fo",
+ "doc_type":"usermanual",
+ "kw":"How Do I Transfer CTS Logs to an OBS Bucket?,FAQs,User Guide",
+ "title":"How Do I Transfer CTS Logs to an OBS Bucket?",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_faq_0031.html",
+ "product_code":"lts",
+ "code":"46",
+ "des":"No log events are displayed on the Raw Logs tab in a log stream on the LTS console.ICAgent has not been installed.The collection path is incorrectly configured.The Log Co",
+ "doc_type":"usermanual",
+ "kw":"What Can I Do If I Cannot View Raw Logs on the LTS Console?,FAQs,User Guide",
+ "title":"What Can I Do If I Cannot View Raw Logs on the LTS Console?",
+ "githuburl":""
+ },
+ {
+ "uri":"lts_faq_0003.html",
+ "product_code":"lts",
+ "code":"47",
+ "des":"Host logs. ICAgent should be installed on the target hosts for log collection.Cloud service logs. To collect logs from cloud services, such as Elastic Load Balance (ELB) ",
+ "doc_type":"usermanual",
+ "kw":"What Kind of Logs and Files Can LTS Collect?,FAQs,User Guide",
+ "title":"What Kind of Logs and Files Can LTS Collect?",
"githuburl":""
},
{
"uri":"lts_01_0033.html",
"product_code":"lts",
- "code":"26",
+ "code":"48",
"des":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
"doc_type":"usermanual",
"kw":"Change History,User Guide",
diff --git a/docs/lts/umn/CLASS.TXT.json b/docs/lts/umn/CLASS.TXT.json
index 6419aa4a..0f20c330 100644
--- a/docs/lts/umn/CLASS.TXT.json
+++ b/docs/lts/umn/CLASS.TXT.json
@@ -35,6 +35,24 @@
"p_code":"1",
"code":"4"
},
+ {
+ "desc":"This section describes the restrictions on LTS log read/write.",
+ "product_code":"lts",
+ "title":"Usage Restrictions",
+ "uri":"lts-0740.html",
+ "doc_type":"usermanual",
+ "p_code":"1",
+ "code":"5"
+ },
+ {
+ "desc":"If you need to assign different permissions to employees in your enterprise to access your LTS resources, is a good choice for fine-grained permissions management. IAM p",
+ "product_code":"lts",
+ "title":"Permissions Management",
+ "uri":"lts-03205.html",
+ "doc_type":"usermanual",
+ "p_code":"1",
+ "code":"6"
+ },
{
"desc":"LTS provides a platform to store and analyze log data for Virtual Private Cloud (VPC). After VPC is associated with a log group and log stream in LTS, Network Interface C",
"product_code":"lts",
@@ -42,7 +60,7 @@
"uri":"lts_01_0005.html",
"doc_type":"usermanual",
"p_code":"1",
- "code":"5"
+ "code":"7"
},
{
"desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
@@ -51,7 +69,7 @@
"uri":"lts_01_0008.html",
"doc_type":"usermanual",
"p_code":"",
- "code":"6"
+ "code":"8"
},
{
"desc":"A Virtual Private Cloud (VPC) flow log captures information about the traffic going to and from your VPC. You can use flow logs to monitor network traffic, analyze networ",
@@ -59,8 +77,8 @@
"title":"Configuring the VPC Flow Log Function",
"uri":"lts_01_0009.html",
"doc_type":"usermanual",
- "p_code":"6",
- "code":"7"
+ "p_code":"8",
+ "code":"9"
},
{
"desc":"This section describes how to enable trace analysis on the Cloud Trace Service (CTS) console to report traces to LTS, so you can query the traces on the LTS console.Opera",
@@ -68,26 +86,44 @@
"title":"Configuring CTS to Upload Traces to LTS",
"uri":"lts_01_0010.html",
"doc_type":"usermanual",
- "p_code":"6",
- "code":"8"
+ "p_code":"8",
+ "code":"10"
},
{
"desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
"product_code":"lts",
"title":"Log Management",
- "uri":"lts_04_0002.html",
+ "uri":"lts_04_1053.html",
"doc_type":"usermanual",
"p_code":"",
- "code":"9"
+ "code":"11"
},
{
- "desc":"A log group is a group of log streams which share the same log retention settings. Up to 100 log groups can be created for a single account.Log in to the LTS console, cho",
+ "desc":"The LTS console provides resource statistics, your favorite log streams/favorite log streams (local cache), and recently visited log streams.This area shows the read/writ",
+ "product_code":"lts",
+ "title":"LTS Console",
+ "uri":"lts_04_1153.html",
+ "doc_type":"usermanual",
+ "p_code":"11",
+ "code":"12"
+ },
+ {
+ "desc":"Log resource statistics are classified into read/write traffic, index traffic, and log volume. The statistics are for reference only. You can also visualize log resource ",
+ "product_code":"lts",
+ "title":"Resource Statistics",
+ "uri":"lts_04_1154.html",
+ "doc_type":"usermanual",
+ "p_code":"11",
+ "code":"13"
+ },
+ {
+ "desc":"A log group is a group of log streams. Up to 100 log groups can be created for a single account.Log groups can be created in two ways. They are automatically created when",
"product_code":"lts",
"title":"Managing Log Groups",
"uri":"lts_04_0003.html",
"doc_type":"usermanual",
- "p_code":"9",
- "code":"10"
+ "p_code":"11",
+ "code":"14"
},
{
"desc":"A log stream is the basic unit for reading and writing logs. Sorting logs into different log streams makes it easier to find specific logs when you need them.Up to 100 lo",
@@ -95,80 +131,80 @@
"title":"Managing Log Streams",
"uri":"lts_04_0004.html",
"doc_type":"usermanual",
- "p_code":"9",
- "code":"11"
- },
- {
- "desc":"ICAgent collects logs from hosts based on your specified collection rules, and packages and sends the collected log data to LTS on a log-stream basis. You can view logs o",
- "product_code":"lts",
- "title":"Configuring Log Collection Rules",
- "uri":"lts_04_0005.html",
- "doc_type":"usermanual",
- "p_code":"9",
- "code":"12"
- },
- {
- "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
- "product_code":"lts",
- "title":"Log View",
- "uri":"lts_04_0007.html",
- "doc_type":"usermanual",
- "p_code":"",
- "code":"13"
- },
- {
- "desc":"You can view the logs reported to the LTS console in real time.You have created log groups and log streams.You have installed ICAgent.You have configured log collection r",
- "product_code":"lts",
- "title":"Viewing Real-Time Logs",
- "uri":"lts_04_0008.html",
- "doc_type":"usermanual",
- "p_code":"13",
- "code":"14"
- },
- {
- "desc":"Follow the directions below to search logs by keyword and time range:On the LTS console, click Log Management.In the log group list, click the name of a log group.In the ",
- "product_code":"lts",
- "title":"Log Search",
- "uri":"lts_04_0009.html",
- "doc_type":"usermanual",
- "p_code":"13",
+ "p_code":"11",
"code":"15"
},
{
- "desc":"To search for logs using a keyword repeatedly, perform the following operations to configure quick search.Log in to the LTS console and choose Log Management.In the log g",
+ "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
"product_code":"lts",
- "title":"Quick Search",
- "uri":"lts_04_0010.html",
- "doc_type":"usermanual",
- "p_code":"13",
- "code":"16"
- },
- {
- "desc":"You can transfer logs to OBS to keep logs for a long time.Local log files are cleared periodically, but the logs transferred to OBS will not be affected.To transfer logs,",
- "product_code":"lts",
- "title":"Log Transfer",
- "uri":"lts_04_0041.html",
+ "title":"Log Ingestion",
+ "uri":"lts_02_0030.html",
"doc_type":"usermanual",
"p_code":"",
- "code":"17"
+ "code":"16"
},
{
"desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
"product_code":"lts",
- "title":"Agent Management",
- "uri":"lts_04_0012.html",
+ "title":"Collecting Logs from Cloud Services",
+ "uri":"lts_04_0105.html",
"doc_type":"usermanual",
- "p_code":"",
+ "p_code":"16",
+ "code":"17"
+ },
+ {
+ "desc":"LTS can collect logs from Cloud Container Engine (CCE).ICAgent has been installed and added to the host group.You have disabled Output to AOM.CCE cluster nodes whose cont",
+ "product_code":"lts",
+ "title":"Collecting Logs from CCE",
+ "uri":"lts_04_0511.html",
+ "doc_type":"usermanual",
+ "p_code":"17",
"code":"18"
},
{
- "desc":"ICAgent is a log collection tool for LTS. If you use LTS to collect logs from a host running Linux OS, you need to install ICAgent on the host.Ensure that the time and ti",
+ "desc":"ICAgent collects logs from hosts based on your specified collection rules, and packages and sends the collected log data to LTS on a log stream basis. You can view logs o",
"product_code":"lts",
- "title":"Installing ICAgent (Linux)",
+ "title":"Collecting Logs from ECS",
+ "uri":"lts_04_1031.html",
+ "doc_type":"usermanual",
+ "p_code":"17",
+ "code":"19"
+ },
+ {
+ "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
+ "product_code":"lts",
+ "title":"Host Management",
+ "uri":"lts_02_1032.html",
+ "doc_type":"usermanual",
+ "p_code":"",
+ "code":"20"
+ },
+ {
+ "desc":"Host groups allow you to configure host log ingestion efficiently. You can sort multiple hosts to a host group and associate the host group with log ingestion configurati",
+ "product_code":"lts",
+ "title":"Managing Host Groups",
+ "uri":"lts_02_1033.html",
+ "doc_type":"usermanual",
+ "p_code":"20",
+ "code":"21"
+ },
+ {
+ "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
+ "product_code":"lts",
+ "title":"Managing Hosts",
+ "uri":"lts_04_0012.html",
+ "doc_type":"usermanual",
+ "p_code":"20",
+ "code":"22"
+ },
+ {
+ "desc":"ICAgent is a log collection tool for LTS. To use LTS to collect logs from hosts, you need to install ICAgent on the hosts.Ensure that the time and time zone of your local",
+ "product_code":"lts",
+ "title":"Installing ICAgent",
"uri":"lts_02_0013.html",
"doc_type":"usermanual",
- "p_code":"18",
- "code":"19"
+ "p_code":"22",
+ "code":"23"
},
{
"desc":"To deliver a better collection experience, LTS regularly upgrades ICAgent. When LTS prompts you that a new ICAgent version is available, you can follow the directions her",
@@ -176,17 +212,17 @@
"title":"Upgrading ICAgent",
"uri":"lts_02_0014.html",
"doc_type":"usermanual",
- "p_code":"18",
- "code":"20"
+ "p_code":"22",
+ "code":"24"
},
{
- "desc":"If ICAgent is uninstalled from a host, log collection will be affected. Exercise caution when performing this operation.Uninstalling ICAgent does not delete the installat",
+ "desc":"If ICAgent is uninstalled from a host, log collection will be affected. Exercise caution when performing this operation.Only ICAgent installed on Linux hosts can be unins",
"product_code":"lts",
"title":"Uninstalling ICAgent",
"uri":"lts_02_0015.html",
"doc_type":"usermanual",
- "p_code":"18",
- "code":"21"
+ "p_code":"22",
+ "code":"25"
},
{
"desc":"The following table lists the ICAgent statuses.",
@@ -194,35 +230,197 @@
"title":"ICAgent Statuses",
"uri":"lts_04_0013.html",
"doc_type":"usermanual",
- "p_code":"18",
- "code":"22"
+ "p_code":"22",
+ "code":"26"
},
{
"desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
"product_code":"lts",
- "title":"Appendixes",
- "uri":"lts_04_0017.html",
+ "title":"Log Search and View",
+ "uri":"lts_05_0004.html",
"doc_type":"usermanual",
"p_code":"",
- "code":"23"
+ "code":"27"
},
{
- "desc":"Log in to the console, hover your cursor over your username in the upper right corner, and select My Credentials.Click the Access Keys tab.Click Create Access Key, and en",
+ "desc":"Follow the directions below to search logs by keyword and time range:On the LTS console, choose Log Management in the navigation pane on the left.In the log group list, c",
+ "product_code":"lts",
+ "title":"Log Search",
+ "uri":"lts_05_0005.html",
+ "doc_type":"usermanual",
+ "p_code":"27",
+ "code":"28"
+ },
+ {
+ "desc":"You can view reported logs on the LTS console in real time.You have created log groups and log streams.You have installed ICAgent.You have configured log collection rules",
+ "product_code":"lts",
+ "title":"Viewing Real-Time Logs",
+ "uri":"lts_05_0006.html",
+ "doc_type":"usermanual",
+ "p_code":"27",
+ "code":"29"
+ },
+ {
+ "desc":"To search for logs using a keyword repeatedly, perform the following operations to configure quick search.On the LTS console, choose Log Management in the navigation pane",
+ "product_code":"lts",
+ "title":"Quick Search",
+ "uri":"lts_05_0009.html",
+ "doc_type":"usermanual",
+ "p_code":"27",
+ "code":"30"
+ },
+ {
+ "desc":"Monitoring keywords in logs helps you keep track of system performance and services. For example, the number of ERROR keywords indicates the system health, and the number",
+ "product_code":"lts",
+ "title":"Quick Analysis",
+ "uri":"lts_05_0007.html",
+ "doc_type":"usermanual",
+ "p_code":"27",
+ "code":"31"
+ },
+ {
+ "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
+ "product_code":"lts",
+ "title":"Log Analysis",
+ "uri":"lts_04_0002.html",
+ "doc_type":"usermanual",
+ "p_code":"",
+ "code":"32"
+ },
+ {
+ "desc":"Log data can be structured or unstructured. Structured data is quantitative data or can be defined by unified data models. It has a fixed length and format. Unstructured ",
+ "product_code":"lts",
+ "title":"Log Structuring",
+ "uri":"lts_0821.html",
+ "doc_type":"usermanual",
+ "p_code":"32",
+ "code":"33"
+ },
+ {
+ "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
+ "product_code":"lts",
+ "title":"Log Transfer",
+ "uri":"lts_04_0040.html",
+ "doc_type":"usermanual",
+ "p_code":"",
+ "code":"34"
+ },
+ {
+ "desc":"Logs reported from hosts and cloud services are retained in LTS for seven days by default. You can set the retention period to be 1 to 30 days. Retained logs are deleted ",
+ "product_code":"lts",
+ "title":"Overview",
+ "uri":"lts_04_0011.html",
+ "doc_type":"usermanual",
+ "p_code":"34",
+ "code":"35"
+ },
+ {
+ "desc":"You can transfer logs to OBS and download log files from the OBS console.To transfer logs, you must have the OBS Administrator permissions apart from the LTS permissions.",
+ "product_code":"lts",
+ "title":"Transferring Logs to OBS",
+ "uri":"lts_04_0041.html",
+ "doc_type":"usermanual",
+ "p_code":"34",
+ "code":"36"
+ },
+ {
+ "desc":"You can use DMS APIs to retrieve logs in real time.Logs have been ingested to LTS.Before registering a DMS Kafka instance, configure an inbound rule to allow access from ",
+ "product_code":"lts",
+ "title":"Transferring Logs to DMS",
+ "uri":"lts_04_0043.html",
+ "doc_type":"usermanual",
+ "p_code":"34",
+ "code":"37"
+ },
+ {
+ "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
+ "product_code":"lts",
+ "title":"Configuration Center",
+ "uri":"lts_06_0003.html",
+ "doc_type":"usermanual",
+ "p_code":"",
+ "code":"38"
+ },
+ {
+ "desc":"To reduce the memory, database, and disk space usage, you can set log collection as required. The log collection switch is used to determine whether to collect log data.T",
+ "product_code":"lts",
+ "title":"Log Collection",
+ "uri":"lts_05_0003.html",
+ "doc_type":"usermanual",
+ "p_code":"38",
+ "code":"39"
+ },
+ {
+ "desc":"This chapter describes how to use Identity and Access Management (IAM) for fine-grained permissions control for your LTS. With IAM, you can:Create IAM users for personnel",
+ "product_code":"lts",
+ "title":"Permissions Management",
+ "uri":"lts_04_0015.html",
+ "doc_type":"usermanual",
+ "p_code":"",
+ "code":"40"
+ },
+ {
+ "desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
+ "product_code":"lts",
+ "title":"FAQs",
+ "uri":"lts_04_0058.html",
+ "doc_type":"usermanual",
+ "p_code":"",
+ "code":"41"
+ },
+ {
+ "desc":"Obtain and use the AK/SK of a public account.Log in to the console, hover your cursor over your username in the upper right corner, and select My Credentials.Click the Ac",
"product_code":"lts",
"title":"How Do I Obtain an AK/SK Pair?",
"uri":"lts_03_0015.html",
"doc_type":"usermanual",
- "p_code":"23",
- "code":"24"
+ "p_code":"41",
+ "code":"42"
},
{
- "desc":"When installing ICAgent, you can create an IAM agency, and ICAgent will automatically obtain an AK/SK pair and generate the ICAgent installation command.Log in to the con",
+ "desc":"No. During log transfer, logs are \"replicated\" to OBS buckets. To view transferred log files, click the name of the corresponding OBS bucket on the Log Transfer page of t",
"product_code":"lts",
- "title":"How Do I Install ICAgent by Creating an Agency?",
- "uri":"lts_03_0002.html",
+ "title":"Does LTS Delete Logs That Have Been Transferred to OBS Buckets?",
+ "uri":"lts_faq_0044.html",
"doc_type":"usermanual",
- "p_code":"23",
- "code":"25"
+ "p_code":"41",
+ "code":"43"
+ },
+ {
+ "desc":"The OBS bucket used for log transfer has been deleted. Specify another bucket.Access control on the OBS bucket is incorrectly configured. Go to the OBS console to correct",
+ "product_code":"lts",
+ "title":"What Are the Common Causes of Abnormal Log Transfer?",
+ "uri":"lts_faq_0314.html",
+ "doc_type":"usermanual",
+ "p_code":"41",
+ "code":"44"
+ },
+ {
+ "desc":"When Cloud Trace Service (CTS) is connected to LTS, a log group and log stream are automatically created for CTS on the LTS console. To transfer CTS logs to OBS, do as fo",
+ "product_code":"lts",
+ "title":"How Do I Transfer CTS Logs to an OBS Bucket?",
+ "uri":"lts_faq_0610.html",
+ "doc_type":"usermanual",
+ "p_code":"41",
+ "code":"45"
+ },
+ {
+ "desc":"No log events are displayed on the Raw Logs tab in a log stream on the LTS console.ICAgent has not been installed.The collection path is incorrectly configured.The Log Co",
+ "product_code":"lts",
+ "title":"What Can I Do If I Cannot View Raw Logs on the LTS Console?",
+ "uri":"lts_faq_0031.html",
+ "doc_type":"usermanual",
+ "p_code":"41",
+ "code":"46"
+ },
+ {
+ "desc":"Host logs. ICAgent should be installed on the target hosts for log collection.Cloud service logs. To collect logs from cloud services, such as Elastic Load Balance (ELB) ",
+ "product_code":"lts",
+ "title":"What Kind of Logs and Files Can LTS Collect?",
+ "uri":"lts_faq_0003.html",
+ "doc_type":"usermanual",
+ "p_code":"41",
+ "code":"47"
},
{
"desc":"HUAWEI CLOUD Help Center presents technical documents to help you quickly get started with HUAWEI CLOUD services. The technical documents include Service Overview, Price Details, Purchase Guide, User Guide, API Reference, Best Practices, FAQs, and Videos.",
@@ -231,6 +429,6 @@
"uri":"lts_01_0033.html",
"doc_type":"usermanual",
"p_code":"",
- "code":"26"
+ "code":"48"
}
]
\ No newline at end of file
diff --git a/docs/lts/umn/en-us_image_0000001119722454.png b/docs/lts/umn/en-us_image_0000001119722454.png
new file mode 100644
index 00000000..dce0ff9b
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001119722454.png differ
diff --git a/docs/lts/umn/en-us_image_0000001119882370.png b/docs/lts/umn/en-us_image_0000001119882370.png
new file mode 100644
index 00000000..71d02b43
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001119882370.png differ
diff --git a/docs/lts/umn/en-us_image_0000001123555094.png b/docs/lts/umn/en-us_image_0000001123555094.png
new file mode 100644
index 00000000..b4eed81e
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001123555094.png differ
diff --git a/docs/lts/umn/en-us_image_0000001123715432.png b/docs/lts/umn/en-us_image_0000001123715432.png
new file mode 100644
index 00000000..88bc7990
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001123715432.png differ
diff --git a/docs/lts/umn/en-us_image_0000001165708405.png b/docs/lts/umn/en-us_image_0000001165708405.png
new file mode 100644
index 00000000..ee48eccc
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001165708405.png differ
diff --git a/docs/lts/umn/en-us_image_0000001165793419.png b/docs/lts/umn/en-us_image_0000001165793419.png
new file mode 100644
index 00000000..88bc7990
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001165793419.png differ
diff --git a/docs/lts/umn/en-us_image_0000001166602143.png b/docs/lts/umn/en-us_image_0000001166602143.png
new file mode 100644
index 00000000..ee48eccc
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001166602143.png differ
diff --git a/docs/lts/umn/en-us_image_0000001166682183.png b/docs/lts/umn/en-us_image_0000001166682183.png
new file mode 100644
index 00000000..71d02b43
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001166682183.png differ
diff --git a/docs/lts/umn/en-us_image_0000001217747176.png b/docs/lts/umn/en-us_image_0000001217747176.png
new file mode 100644
index 00000000..dd00276e
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001217747176.png differ
diff --git a/docs/lts/umn/en-us_image_0000001217754022.png b/docs/lts/umn/en-us_image_0000001217754022.png
new file mode 100644
index 00000000..2d8d1ffe
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001217754022.png differ
diff --git a/docs/lts/umn/en-us_image_0000001217758588.png b/docs/lts/umn/en-us_image_0000001217758588.png
new file mode 100644
index 00000000..dd00276e
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001217758588.png differ
diff --git a/docs/lts/umn/en-us_image_0000001252099070.png b/docs/lts/umn/en-us_image_0000001252099070.png
new file mode 100644
index 00000000..97cad98b
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001252099070.png differ
diff --git a/docs/lts/umn/en-us_image_0000001252258790.png b/docs/lts/umn/en-us_image_0000001252258790.png
new file mode 100644
index 00000000..c574fda5
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001252258790.png differ
diff --git a/docs/lts/umn/en-us_image_0000001259071431.png b/docs/lts/umn/en-us_image_0000001259071431.png
new file mode 100644
index 00000000..509394a5
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001259071431.png differ
diff --git a/docs/lts/umn/en-us_image_0000001259310901.png b/docs/lts/umn/en-us_image_0000001259310901.png
new file mode 100644
index 00000000..de71ab7e
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001259310901.png differ
diff --git a/docs/lts/umn/en-us_image_0000001261967201.png b/docs/lts/umn/en-us_image_0000001261967201.png
new file mode 100644
index 00000000..00354274
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001261967201.png differ
diff --git a/docs/lts/umn/en-us_image_0000001262546024.png b/docs/lts/umn/en-us_image_0000001262546024.png
new file mode 100644
index 00000000..be53803d
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001262546024.png differ
diff --git a/docs/lts/umn/en-us_image_0000001262546228.png b/docs/lts/umn/en-us_image_0000001262546228.png
new file mode 100644
index 00000000..35b51a80
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001262546228.png differ
diff --git a/docs/lts/umn/en-us_image_0000001262550041.png b/docs/lts/umn/en-us_image_0000001262550041.png
new file mode 100644
index 00000000..2d8d1ffe
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001262550041.png differ
diff --git a/docs/lts/umn/en-us_image_0000001262557500.png b/docs/lts/umn/en-us_image_0000001262557500.png
new file mode 100644
index 00000000..16f256b0
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001262557500.png differ
diff --git a/docs/lts/umn/en-us_image_0000001262713829.png b/docs/lts/umn/en-us_image_0000001262713829.png
new file mode 100644
index 00000000..f72e1e35
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001262713829.png differ
diff --git a/docs/lts/umn/en-us_image_0000001262717056.png b/docs/lts/umn/en-us_image_0000001262717056.png
new file mode 100644
index 00000000..dd00276e
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001262717056.png differ
diff --git a/docs/lts/umn/en-us_image_0000001298698089.png b/docs/lts/umn/en-us_image_0000001298698089.png
new file mode 100644
index 00000000..cdb95281
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001298698089.png differ
diff --git a/docs/lts/umn/en-us_image_0000001309911389.png b/docs/lts/umn/en-us_image_0000001309911389.png
new file mode 100644
index 00000000..f31159ba
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001309911389.png differ
diff --git a/docs/lts/umn/en-us_image_0000001310281889.png b/docs/lts/umn/en-us_image_0000001310281889.png
new file mode 100644
index 00000000..9f4508e5
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001310281889.png differ
diff --git a/docs/lts/umn/en-us_image_0000001316788136.png b/docs/lts/umn/en-us_image_0000001316788136.png
new file mode 100644
index 00000000..2dfc7710
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001316788136.png differ
diff --git a/docs/lts/umn/en-us_image_0000001320576858.png b/docs/lts/umn/en-us_image_0000001320576858.png
new file mode 100644
index 00000000..b63fe404
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001320576858.png differ
diff --git a/docs/lts/umn/en-us_image_0000001368010788.png b/docs/lts/umn/en-us_image_0000001368010788.png
deleted file mode 100644
index 796e7400..00000000
Binary files a/docs/lts/umn/en-us_image_0000001368010788.png and /dev/null differ
diff --git a/docs/lts/umn/en-us_image_0000001368172808.png b/docs/lts/umn/en-us_image_0000001368172808.png
deleted file mode 100644
index 69c4544a..00000000
Binary files a/docs/lts/umn/en-us_image_0000001368172808.png and /dev/null differ
diff --git a/docs/lts/umn/en-us_image_0000001380340913.png b/docs/lts/umn/en-us_image_0000001380340913.png
new file mode 100644
index 00000000..b7cb3079
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001380340913.png differ
diff --git a/docs/lts/umn/en-us_image_0000001408943272.png b/docs/lts/umn/en-us_image_0000001408943272.png
new file mode 100644
index 00000000..02efcbce
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001408943272.png differ
diff --git a/docs/lts/umn/en-us_image_0000001409026272.png b/docs/lts/umn/en-us_image_0000001409026272.png
new file mode 100644
index 00000000..03c68b81
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001409026272.png differ
diff --git a/docs/lts/umn/en-us_image_0000001409428384.png b/docs/lts/umn/en-us_image_0000001409428384.png
new file mode 100644
index 00000000..c569018a
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001409428384.png differ
diff --git a/docs/lts/umn/en-us_image_0000001409812490.png b/docs/lts/umn/en-us_image_0000001409812490.png
new file mode 100644
index 00000000..fa3b559f
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001409812490.png differ
diff --git a/docs/lts/umn/en-us_image_0000001409813242.png b/docs/lts/umn/en-us_image_0000001409813242.png
new file mode 100644
index 00000000..d86961cc
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001409813242.png differ
diff --git a/docs/lts/umn/en-us_image_0000001409870506.png b/docs/lts/umn/en-us_image_0000001409870506.png
new file mode 100644
index 00000000..9868734e
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001409870506.png differ
diff --git a/docs/lts/umn/en-us_image_0000001410024590.png b/docs/lts/umn/en-us_image_0000001410024590.png
new file mode 100644
index 00000000..85f45715
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001410024590.png differ
diff --git a/docs/lts/umn/en-us_image_0000001410029766.png b/docs/lts/umn/en-us_image_0000001410029766.png
new file mode 100644
index 00000000..6393cade
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001410029766.png differ
diff --git a/docs/lts/umn/en-us_image_0000001410131678.png b/docs/lts/umn/en-us_image_0000001410131678.png
new file mode 100644
index 00000000..5e676426
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001410131678.png differ
diff --git a/docs/lts/umn/en-us_image_0000001410398388.png b/docs/lts/umn/en-us_image_0000001410398388.png
new file mode 100644
index 00000000..5fd20a4e
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001410398388.png differ
diff --git a/docs/lts/umn/en-us_image_0000001410422998.png b/docs/lts/umn/en-us_image_0000001410422998.png
new file mode 100644
index 00000000..642987f4
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001410422998.png differ
diff --git a/docs/lts/umn/en-us_image_0000001413544114.png b/docs/lts/umn/en-us_image_0000001413544114.png
new file mode 100644
index 00000000..dd00276e
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001413544114.png differ
diff --git a/docs/lts/umn/en-us_image_0000001419010601.png b/docs/lts/umn/en-us_image_0000001419010601.png
deleted file mode 100644
index 3099d744..00000000
Binary files a/docs/lts/umn/en-us_image_0000001419010601.png and /dev/null differ
diff --git a/docs/lts/umn/en-us_image_0000001419012101.png b/docs/lts/umn/en-us_image_0000001419012101.png
deleted file mode 100644
index 528d7b2d..00000000
Binary files a/docs/lts/umn/en-us_image_0000001419012101.png and /dev/null differ
diff --git a/docs/lts/umn/en-us_image_0000001421609924.png b/docs/lts/umn/en-us_image_0000001421609924.png
new file mode 100644
index 00000000..85781b47
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001421609924.png differ
diff --git a/docs/lts/umn/en-us_image_0000001424310404.png b/docs/lts/umn/en-us_image_0000001424310404.png
new file mode 100644
index 00000000..b0db4e32
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001424310404.png differ
diff --git a/docs/lts/umn/en-us_image_0000001424418870.png b/docs/lts/umn/en-us_image_0000001424418870.png
new file mode 100644
index 00000000..8a2ad03e
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001424418870.png differ
diff --git a/docs/lts/umn/en-us_image_0000001424467768.png b/docs/lts/umn/en-us_image_0000001424467768.png
new file mode 100644
index 00000000..5fb3b98f
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001424467768.png differ
diff --git a/docs/lts/umn/en-us_image_0000001424573742.png b/docs/lts/umn/en-us_image_0000001424573742.png
new file mode 100644
index 00000000..c124a272
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001424573742.png differ
diff --git a/docs/lts/umn/en-us_image_0000001424625748.png b/docs/lts/umn/en-us_image_0000001424625748.png
new file mode 100644
index 00000000..8a2ad03e
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001424625748.png differ
diff --git a/docs/lts/umn/en-us_image_0000001424736642.png b/docs/lts/umn/en-us_image_0000001424736642.png
new file mode 100644
index 00000000..8b48bcbf
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001424736642.png differ
diff --git a/docs/lts/umn/en-us_image_0000001424778604.png b/docs/lts/umn/en-us_image_0000001424778604.png
new file mode 100644
index 00000000..a2359b99
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001424778604.png differ
diff --git a/docs/lts/umn/en-us_image_0000001424794336.png b/docs/lts/umn/en-us_image_0000001424794336.png
new file mode 100644
index 00000000..0c0cccaf
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001424794336.png differ
diff --git a/docs/lts/umn/en-us_image_0000001427369668.png b/docs/lts/umn/en-us_image_0000001427369668.png
new file mode 100644
index 00000000..bef994c4
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001427369668.png differ
diff --git a/docs/lts/umn/en-us_image_0000001459223293.png b/docs/lts/umn/en-us_image_0000001459223293.png
new file mode 100644
index 00000000..1af4d8c9
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001459223293.png differ
diff --git a/docs/lts/umn/en-us_image_0000001459223925.png b/docs/lts/umn/en-us_image_0000001459223925.png
new file mode 100644
index 00000000..647d79eb
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001459223925.png differ
diff --git a/docs/lts/umn/en-us_image_0000001459383609.png b/docs/lts/umn/en-us_image_0000001459383609.png
new file mode 100644
index 00000000..33d834b2
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001459383609.png differ
diff --git a/docs/lts/umn/en-us_image_0000001459387853.png b/docs/lts/umn/en-us_image_0000001459387853.png
new file mode 100644
index 00000000..33204f60
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001459387853.png differ
diff --git a/docs/lts/umn/en-us_image_0000001459614217.png b/docs/lts/umn/en-us_image_0000001459614217.png
new file mode 100644
index 00000000..81ea8619
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001459614217.png differ
diff --git a/docs/lts/umn/en-us_image_0000001459633297.png b/docs/lts/umn/en-us_image_0000001459633297.png
new file mode 100644
index 00000000..9f76b8ee
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001459633297.png differ
diff --git a/docs/lts/umn/en-us_image_0000001459871273.png b/docs/lts/umn/en-us_image_0000001459871273.png
new file mode 100644
index 00000000..58f396c3
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001459871273.png differ
diff --git a/docs/lts/umn/en-us_image_0000001459892873.png b/docs/lts/umn/en-us_image_0000001459892873.png
new file mode 100644
index 00000000..fd4ce154
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001459892873.png differ
diff --git a/docs/lts/umn/en-us_image_0000001459933005.png b/docs/lts/umn/en-us_image_0000001459933005.png
new file mode 100644
index 00000000..04334fee
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001459933005.png differ
diff --git a/docs/lts/umn/en-us_image_0000001460010097.png b/docs/lts/umn/en-us_image_0000001460010097.png
new file mode 100644
index 00000000..2b15d3fa
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001460010097.png differ
diff --git a/docs/lts/umn/en-us_image_0000001460235561.png b/docs/lts/umn/en-us_image_0000001460235561.png
new file mode 100644
index 00000000..18116816
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001460235561.png differ
diff --git a/docs/lts/umn/en-us_image_0000001460330173.png b/docs/lts/umn/en-us_image_0000001460330173.png
new file mode 100644
index 00000000..41413064
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001460330173.png differ
diff --git a/docs/lts/umn/en-us_image_0000001460332517.png b/docs/lts/umn/en-us_image_0000001460332517.png
new file mode 100644
index 00000000..d522b4d4
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001460332517.png differ
diff --git a/docs/lts/umn/en-us_image_0000001460346829.png b/docs/lts/umn/en-us_image_0000001460346829.png
new file mode 100644
index 00000000..af78b5c9
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001460346829.png differ
diff --git a/docs/lts/umn/en-us_image_0000001460349521.png b/docs/lts/umn/en-us_image_0000001460349521.png
new file mode 100644
index 00000000..8665bf2e
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001460349521.png differ
diff --git a/docs/lts/umn/en-us_image_0000001460358861.png b/docs/lts/umn/en-us_image_0000001460358861.png
new file mode 100644
index 00000000..64cb8dc0
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001460358861.png differ
diff --git a/docs/lts/umn/en-us_image_0000001460416749.png b/docs/lts/umn/en-us_image_0000001460416749.png
new file mode 100644
index 00000000..459a0046
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001460416749.png differ
diff --git a/docs/lts/umn/en-us_image_0000001460454221.png b/docs/lts/umn/en-us_image_0000001460454221.png
new file mode 100644
index 00000000..9bd9a998
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001460454221.png differ
diff --git a/docs/lts/umn/en-us_image_0000001460456897.png b/docs/lts/umn/en-us_image_0000001460456897.png
new file mode 100644
index 00000000..b0eed24b
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001460456897.png differ
diff --git a/docs/lts/umn/en-us_image_0000001463823649.png b/docs/lts/umn/en-us_image_0000001463823649.png
new file mode 100644
index 00000000..dd00276e
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001463823649.png differ
diff --git a/docs/lts/umn/en-us_image_0000001463903813.png b/docs/lts/umn/en-us_image_0000001463903813.png
new file mode 100644
index 00000000..dd00276e
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001463903813.png differ
diff --git a/docs/lts/umn/en-us_image_0000001474096881.png b/docs/lts/umn/en-us_image_0000001474096881.png
new file mode 100644
index 00000000..d91746c8
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001474096881.png differ
diff --git a/docs/lts/umn/en-us_image_0000001474342201.png b/docs/lts/umn/en-us_image_0000001474342201.png
new file mode 100644
index 00000000..2af5afe7
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001474342201.png differ
diff --git a/docs/lts/umn/en-us_image_0000001474530441.png b/docs/lts/umn/en-us_image_0000001474530441.png
new file mode 100644
index 00000000..10841356
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001474530441.png differ
diff --git a/docs/lts/umn/en-us_image_0000001474668261.png b/docs/lts/umn/en-us_image_0000001474668261.png
new file mode 100644
index 00000000..71160afb
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001474668261.png differ
diff --git a/docs/lts/umn/en-us_image_0000001476964177.png b/docs/lts/umn/en-us_image_0000001476964177.png
new file mode 100644
index 00000000..5fd20a4e
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001476964177.png differ
diff --git a/docs/lts/umn/en-us_image_0000001477332293.png b/docs/lts/umn/en-us_image_0000001477332293.png
new file mode 100644
index 00000000..8a2dd43e
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001477332293.png differ
diff --git a/docs/lts/umn/en-us_image_0000001481236306.png b/docs/lts/umn/en-us_image_0000001481236306.png
new file mode 100644
index 00000000..57971e3a
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001481236306.png differ
diff --git a/docs/lts/umn/en-us_image_0000001500031545.png b/docs/lts/umn/en-us_image_0000001500031545.png
new file mode 100644
index 00000000..f26f3424
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001500031545.png differ
diff --git a/docs/lts/umn/en-us_image_0000001536874781.png b/docs/lts/umn/en-us_image_0000001536874781.png
new file mode 100644
index 00000000..be2c9ea1
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001536874781.png differ
diff --git a/docs/lts/umn/en-us_image_0000001543219709.png b/docs/lts/umn/en-us_image_0000001543219709.png
new file mode 100644
index 00000000..f5d013a2
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001543219709.png differ
diff --git a/docs/lts/umn/en-us_image_0000001561940610.png b/docs/lts/umn/en-us_image_0000001561940610.png
new file mode 100644
index 00000000..ca90ce63
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001561940610.png differ
diff --git a/docs/lts/umn/en-us_image_0000001565005278.png b/docs/lts/umn/en-us_image_0000001565005278.png
new file mode 100644
index 00000000..02efcbce
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001565005278.png differ
diff --git a/docs/lts/umn/en-us_image_0000001576175974.png b/docs/lts/umn/en-us_image_0000001576175974.png
new file mode 100644
index 00000000..c9d020ac
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001576175974.png differ
diff --git a/docs/lts/umn/en-us_image_0000001576328724.png b/docs/lts/umn/en-us_image_0000001576328724.png
new file mode 100644
index 00000000..04bc317c
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001576328724.png differ
diff --git a/docs/lts/umn/en-us_image_0000001576333032.png b/docs/lts/umn/en-us_image_0000001576333032.png
new file mode 100644
index 00000000..513fef15
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001576333032.png differ
diff --git a/docs/lts/umn/en-us_image_0000001588482889.png b/docs/lts/umn/en-us_image_0000001588482889.png
new file mode 100644
index 00000000..fc686da4
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001588482889.png differ
diff --git a/docs/lts/umn/en-us_image_0000001611750029.png b/docs/lts/umn/en-us_image_0000001611750029.png
new file mode 100644
index 00000000..e0ae09a6
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001611750029.png differ
diff --git a/docs/lts/umn/en-us_image_0000001612861593.png b/docs/lts/umn/en-us_image_0000001612861593.png
new file mode 100644
index 00000000..ca90ce63
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001612861593.png differ
diff --git a/docs/lts/umn/en-us_image_0000001614981805.png b/docs/lts/umn/en-us_image_0000001614981805.png
new file mode 100644
index 00000000..814fbce3
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001614981805.png differ
diff --git a/docs/lts/umn/en-us_image_0000001615509865.png b/docs/lts/umn/en-us_image_0000001615509865.png
new file mode 100644
index 00000000..02efcbce
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001615509865.png differ
diff --git a/docs/lts/umn/en-us_image_0000001626564893.png b/docs/lts/umn/en-us_image_0000001626564893.png
new file mode 100644
index 00000000..3c9fba2b
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001626564893.png differ
diff --git a/docs/lts/umn/en-us_image_0000001626584937.png b/docs/lts/umn/en-us_image_0000001626584937.png
new file mode 100644
index 00000000..0d894863
Binary files /dev/null and b/docs/lts/umn/en-us_image_0000001626584937.png differ
diff --git a/docs/lts/umn/en-us_image_0178020286.png b/docs/lts/umn/en-us_image_0178020286.png
deleted file mode 100644
index 2df583a1..00000000
Binary files a/docs/lts/umn/en-us_image_0178020286.png and /dev/null differ
diff --git a/docs/lts/umn/en-us_image_0224007615.png b/docs/lts/umn/en-us_image_0224007615.png
deleted file mode 100644
index 9c2cc1b3..00000000
Binary files a/docs/lts/umn/en-us_image_0224007615.png and /dev/null differ
diff --git a/docs/lts/umn/en-us_image_0224007618.png b/docs/lts/umn/en-us_image_0224007618.png
deleted file mode 100644
index f3ec3896..00000000
Binary files a/docs/lts/umn/en-us_image_0224007618.png and /dev/null differ
diff --git a/docs/lts/umn/en-us_image_0224007629.png b/docs/lts/umn/en-us_image_0224007629.png
deleted file mode 100644
index add263ef..00000000
Binary files a/docs/lts/umn/en-us_image_0224007629.png and /dev/null differ
diff --git a/docs/lts/umn/en-us_image_0224007642.png b/docs/lts/umn/en-us_image_0224007642.png
deleted file mode 100644
index a82a1885..00000000
Binary files a/docs/lts/umn/en-us_image_0224007642.png and /dev/null differ
diff --git a/docs/lts/umn/en-us_image_0224007643.png b/docs/lts/umn/en-us_image_0224007643.png
deleted file mode 100644
index 96735ebf..00000000
Binary files a/docs/lts/umn/en-us_image_0224007643.png and /dev/null differ
diff --git a/docs/lts/umn/en-us_image_0224007663.png b/docs/lts/umn/en-us_image_0224007663.png
deleted file mode 100644
index b62af39d..00000000
Binary files a/docs/lts/umn/en-us_image_0224007663.png and /dev/null differ
diff --git a/docs/lts/umn/en-us_image_0224007683.png b/docs/lts/umn/en-us_image_0224007683.png
deleted file mode 100644
index addc5197..00000000
Binary files a/docs/lts/umn/en-us_image_0224007683.png and /dev/null differ
diff --git a/docs/lts/umn/en-us_image_0224007684.png b/docs/lts/umn/en-us_image_0224007684.png
deleted file mode 100644
index 1e55d0de..00000000
Binary files a/docs/lts/umn/en-us_image_0224007684.png and /dev/null differ
diff --git a/docs/lts/umn/en-us_image_0224007690.png b/docs/lts/umn/en-us_image_0224007690.png
deleted file mode 100644
index f9d5799a..00000000
Binary files a/docs/lts/umn/en-us_image_0224007690.png and /dev/null differ
diff --git a/docs/lts/umn/en-us_image_0231061605.png b/docs/lts/umn/en-us_image_0231061605.png
new file mode 100644
index 00000000..7ab00e59
Binary files /dev/null and b/docs/lts/umn/en-us_image_0231061605.png differ
diff --git a/docs/lts/umn/lts-03205.html b/docs/lts/umn/lts-03205.html
new file mode 100644
index 00000000..bac45c40
--- /dev/null
+++ b/docs/lts/umn/lts-03205.html
@@ -0,0 +1,717 @@
+
+
+
Permissions Management
+Description
If you need to assign different permissions to employees in your enterprise to access your LTS resources, is a good choice for fine-grained permissions management. IAM provides identity authentication, permissions management, and access control, helping you secure access to your LTS resources.
+
With IAM, you can use your account to create IAM users for your employees, and assign permissions to the users to control their access to LTS resources. For example, some software developers in your enterprise need to use LTS resources but should not delete them or perform other high-risk operations. In this case, you can create IAM users for the software developers and grant them only the permissions required.
+
If your account does not need individual IAM users for permissions management, you may skip over this section.
+
IAM can be used for free. You pay only for the resources in your account. For more information about IAM, see IAM Service Overview.
+
+
LTS Permissions
By default, new IAM users do not have permissions assigned. You need to add users to one or more groups, and attach permissions policies or roles to these groups. Users inherit permissions from the groups to which they are added and can perform specified operations on cloud services based on the permissions.
+
LTS is a project-level service deployed and accessed in specific physical regions. To assign LTS permissions to a user group, specify the scope as region-specific projects and select projects for the permissions to take effect. If All projects is selected, the permissions will take effect for the user group in all region-specific projects. When accessing LTS, the users need to switch to a region where they have been authorized to use LTS.
+
Policies: A type of fine-grained authorization mechanism that defines permissions required to perform operations on specific cloud resources under certain conditions. This mechanism allows for more flexible policy-based authorization, meeting requirements for secure access control. For example, you can grant Elastic Cloud Server (ECS) users only the permissions for managing a certain type of ECSs. Most policies define permissions based on APIs.
+
The system permissions supported by LTS are listed in Table 1.
+
+
Table 1 LTS system permissionsName
+ |
+Description
+ |
+Type
+ |
+Dependency
+ |
+
+
+LTS FullAccess
+ |
+Full permissions for LTS. Users with these permissions can perform operations on LTS.
+ |
+System-defined policy
+ |
+CCE Administrator, OBS Administrator, and AOM FullAccess
+ |
+
+LTS ReadOnlyAccess
+ |
+Read-only permissions for LTS. Users with these permissions can only view LTS data.
+ |
+System-defined policy
+ |
+CCE Administrator, OBS Administrator, and AOM FullAccess
+ |
+
+LTS Administrator
+ |
+Administrator permissions for LTS.
+ |
+System-defined role
+ |
+This role is dependent on the Tenant Guest and Tenant Administrator roles.
+ |
+
+
+
+
+
Table 2 lists the common operations supported by each system-defined policy and role of LTS. Choose the appropriate policies and roles according to this table.
+
+
Table 2 Common operations supported by each LTS system policy or roleOperation
+ |
+LTS FullAccess
+ |
+LTS ReadOnlyAccess
+ |
+LTS Administrator
+ |
+
+
+Querying a log group
+ |
+√
+ |
+√
+ |
+√
+ |
+
+Creating a log group
+ |
+√
+ |
+×
+ |
+√
+ |
+
+Modifying a log group
+ |
+√
+ |
+×
+ |
+√
+ |
+
+Deleting a log group
+ |
+√
+ |
+×
+ |
+√
+ |
+
+Querying a log stream
+ |
+√
+ |
+√
+ |
+√
+ |
+
+Creating a log stream
+ |
+√
+ |
+×
+ |
+√
+ |
+
+Modifying a log stream
+ |
+√
+ |
+×
+ |
+√
+ |
+
+Deleting a log stream
+ |
+√
+ |
+×
+ |
+√
+ |
+
+Configuring log collection from hosts
+ |
+√
+ |
+×
+ |
+√
+ |
+
+Viewing a log transfer task
+ |
+√
+ |
+√
+ |
+√
+ |
+
+Creating a log transfer task
+ |
+√
+ |
+×
+ |
+√
+ |
+
+Modifying a log transfer task
+ |
+√
+ |
+×
+ |
+√
+ |
+
+Deleting a log transfer task
+ |
+√
+ |
+×
+ |
+√
+ |
+
+Enabling a log transfer task
+ |
+√
+ |
+×
+ |
+√
+ |
+
+Disabling a log transfer task
+ |
+√
+ |
+×
+ |
+√
+ |
+
+Installing ICAgent
+ |
+√
+ |
+×
+ |
+√
+ |
+
+Upgrading ICAgent
+ |
+√
+ |
+×
+ |
+√
+ |
+
+Uninstalling ICAgent
+ |
+√
+ |
+×
+ |
+√
+ |
+
+
+
+
+
To use a custom fine-grained policy, log in to IAM as the administrator and select fine-grained permissions of LTS as required.
+
Table 3 describes fine-grained permission dependencies of LTS.
+
+
Table 3 Fine-grained permission dependencies of LTSPermission
+ |
+Description
+ |
+Dependency
+ |
+
+
+lts:agents:list
+ |
+List agents
+ |
+None
+ |
+
+lts:buckets:get
+ |
+Get bucket
+ |
+None
+ |
+
+lts:groups:put
+ |
+Put log group
+ |
+None
+ |
+
+lts:transfers:create
+ |
+Create transfer
+ |
+obs:bucket:PutBucketAcl
+obs:bucket:GetBucketAcl
+obs:bucket:GetEncryptionConfiguration
+obs:bucket:HeadBucket
+dis:streams:list
+dis:streamPolicies:list
+ |
+
+lts:groups:get
+ |
+Get log group
+ |
+None
+ |
+
+lts:transfers:put
+ |
+Put transfer
+ |
+obs:bucket:PutBucketAcl
+obs:bucket:GetBucketAcl
+obs:bucket:GetEncryptionConfiguration
+obs:bucket:HeadBucket
+dis:streams:list
+dis:streamPolicies:list
+ |
+
+lts:resourceTags:delete
+ |
+Delete resource tag
+ |
+None
+ |
+
+lts:ecsOsLogPaths:list
+ |
+List ecs os logs paths
+ |
+None
+ |
+
+lts:structConfig:create
+ |
+Create struct config
+ |
+None
+ |
+
+lts:agentsConf:get
+ |
+Get agent conf
+ |
+None
+ |
+
+lts:logIndex:list
+ |
+Get log index
+ |
+None
+ |
+
+lts:transfers:delete
+ |
+Delete transfer
+ |
+None
+ |
+
+lts:regex:create
+ |
+Create struct regex
+ |
+None
+ |
+
+lts:subscriptions:delete
+ |
+Delete subscription
+ |
+None
+ |
+
+lts:overviewLogsLast:list
+ |
+List overview last logs
+ |
+None
+ |
+
+lts:logIndex:get
+ |
+Get log index
+ |
+None
+ |
+
+lts:sqlalarmrules:create
+ |
+Create alarm options
+ |
+None
+ |
+
+lts:agentsConf:create
+ |
+Create agent conf
+ |
+None
+ |
+
+lts:sqlalarmrules:get
+ |
+Get alarm options
+ |
+None
+ |
+
+lts:datasources:batchdelete
+ |
+Batch delete datasource
+ |
+None
+ |
+
+lts:structConfig:put
+ |
+Update struct config
+ |
+None
+ |
+
+lts:groups:list
+ |
+List log groups
+ |
+None
+ |
+
+lts:sqlalarmrules:delete
+ |
+Delete alarm options
+ |
+None
+ |
+
+lts:transfers:action
+ |
+Enabled transfer
+ |
+None
+ |
+
+lts:datasources:post
+ |
+Post datasource
+ |
+None
+ |
+
+lts:topics:create
+ |
+Create log topic
+ |
+None
+ |
+
+lts:resourceTags:get
+ |
+Query resource tags
+ |
+None
+ |
+
+lts:logs:list
+ |
+List logs
+ |
+None
+ |
+
+lts:subscriptions:create
+ |
+Create subscription
+ |
+None
+ |
+
+lts:overviewLogsTopTopic:get
+ |
+List overview top logs
+ |
+None
+ |
+
+lts:datasources:put
+ |
+Put datasource
+ |
+None
+ |
+
+lts:structConfig:delete
+ |
+Delete struct config
+ |
+None
+ |
+
+lts:logIndex:delete
+ |
+Deleting a specified log index
+ |
+None
+ |
+
+lts:topics:delete
+ |
+Delete log topics
+ |
+None
+ |
+
+lts:agentSupportedOsLogPaths:list
+ |
+List agent supported os logs paths
+ |
+None
+ |
+
+lts:topics:put
+ |
+Put log topic
+ |
+None
+ |
+
+lts:agentHeartbeat:post
+ |
+Post agent heartbeat
+ |
+None
+ |
+
+lts:logsByName:upload
+ |
+Upload logs by name
+ |
+None
+ |
+
+lts:buckets:list
+ |
+List buckets
+ |
+None
+ |
+
+lts:logIndex:post
+ |
+Create log index
+ |
+None
+ |
+
+lts:logContext:list
+ |
+List logs context
+ |
+None
+ |
+
+lts:groups:delete
+ |
+Delete log group
+ |
+None
+ |
+
+lts:resourceTags:put
+ |
+Update resource tags
+ |
+None
+ |
+
+lts:structConfig:get
+ |
+Get struct config
+ |
+None
+ |
+
+lts:overviewLogTotal:get
+ |
+Get overview logs total
+ |
+None
+ |
+
+lts:subscriptions:put
+ |
+Put subscription
+ |
+None
+ |
+
+lts:subscriptions:list
+ |
+List subscription
+ |
+None
+ |
+
+lts:datasources:delete
+ |
+Delete datasource
+ |
+None
+ |
+
+lts:transfersStatus:get
+ |
+List transfer status
+ |
+None
+ |
+
+lts:logIndex:put
+ |
+Put log index
+ |
+None
+ |
+
+lts:sqlalarmrules:put
+ |
+Modify alarm options
+ |
+None
+ |
+
+lts:logs:upload
+ |
+Upload logs
+ |
+None
+ |
+
+lts:agentDetails:list
+ |
+List agent diagnostic log
+ |
+None
+ |
+
+lts:agentsConf:put
+ |
+Put agent conf
+ |
+None
+ |
+
+lts:logstreams:list
+ |
+Check logstream resources
+ |
+None
+ |
+
+lts:subscriptions:get
+ |
+Get subscription
+ |
+None
+ |
+
+lts:disStreams:list
+ |
+Query DIS pipe
+ |
+None
+ |
+
+lts:groupTopics:put
+ |
+Create log group and log topic
+ |
+None
+ |
+
+lts:resourceInstance:list
+ |
+Query resource instance
+ |
+None
+ |
+
+lts:transfers:list
+ |
+List transfers
+ |
+None
+ |
+
+lts:topics:get
+ |
+Get log topic
+ |
+None
+ |
+
+lts:agentsConf:delete
+ |
+Delete agent conf
+ |
+None
+ |
+
+lts:agentEcs:list
+ |
+List agent ecs
+ |
+None
+ |
+
+lts:indiceLogs:list
+ |
+Search indiceLogs
+ |
+None
+ |
+
+lts:topics:list
+ |
+List log topic
+ |
+None
+ |
+
+
+
+
+
+
+
+
diff --git a/docs/lts/umn/lts-0740.html b/docs/lts/umn/lts-0740.html
new file mode 100644
index 00000000..17c65a08
--- /dev/null
+++ b/docs/lts/umn/lts-0740.html
@@ -0,0 +1,125 @@
+
+
+Usage Restrictions
+This section describes the restrictions on LTS log read/write.
+
+
Table 1 Log read/write restrictionsScope
+ |
+Item
+ |
+Description
+ |
+Remarks
+ |
+
+
+Account
+ |
+Log write traffic
+ |
+Logs can be written at up to 5 MB/s in an account.
+ |
+To increase the upper limit, contact technical support engineers.
+ |
+
+Log writes
+ |
+Logs can be written up to 1000 times per second in an account.
+ |
+To increase the upper limit, contact technical support engineers.
+ |
+
+Log query
+ |
+Up to 1 MB of logs can be returned in a single API query for an account.
+ |
+To increase the upper limit, contact technical support engineers.
+ |
+
+Log reads
+ |
+Logs can be read up to 100 times per minute in an account.
+ |
+To increase the upper limit, contact technical support engineers.
+ |
+
+
+ Log group
+ |
+Log write traffic
+ |
+Logs can be written at up to 5 MB/s in a log group.
+ |
+Not mandatory. Service quality cannot be ensured if this limit is exceeded.
+ |
+
+Log writes
+ |
+Logs can be written up to 100 times per second in a log group.
+ |
+Not mandatory. Service quality cannot be ensured if this limit is exceeded.
+ |
+
+Log query traffic
+ |
+Up to 10 MB of logs can be returned in a single API query for a log group.
+ |
+N/A
+
+ |
+
+Log reads
+ |
+Logs can be read up to 50 times per minute in a log group.
+ |
+Not mandatory. Service quality cannot be ensured if this limit is exceeded.
+
+ |
+
+Log stream
+ |
+Log write traffic
+ |
+Logs can be written at up to 5 MB/s in a log stream.
+ |
+Not mandatory. Service quality cannot be ensured if this limit is exceeded.
+ |
+
+Log writes
+ |
+Logs can be written up to 50 times per second in a log stream.
+ |
+Not mandatory. Service quality cannot be ensured if this limit is exceeded.
+ |
+
+Log query traffic
+ |
+Up to 10 MB of logs can be returned in a single API query for a log stream.
+ |
+N/A
+ |
+
+Log reads
+ |
+Logs can be read up to 10 times per minute in a log stream.
+ |
+Not mandatory. Service quality cannot be ensured if this limit is exceeded.
+ |
+
+Log time
+ |
+Logs in a period of 24 hours can be collected. Logs generated 24 hours before or after the current time cannot be collected.
+ |
+N/A
+ |
+
+
+
+
+
+
+
diff --git a/docs/lts/umn/lts_01_0001.html b/docs/lts/umn/lts_01_0001.html
index dcccf665..c943b618 100644
--- a/docs/lts/umn/lts_01_0001.html
+++ b/docs/lts/umn/lts_01_0001.html
@@ -10,6 +10,10 @@
Basic Functions
+Usage Restrictions
+
+Permissions Management
+
Related Services
diff --git a/docs/lts/umn/lts_01_0005.html b/docs/lts/umn/lts_01_0005.html
index 7706657f..7c78f6a3 100644
--- a/docs/lts/umn/lts_01_0005.html
+++ b/docs/lts/umn/lts_01_0005.html
@@ -11,7 +11,7 @@
WAF
You can record attack logs and access logs of Web Application Firewall (WAF) in LTS, and use the logs for real-time decision-making, device O&M, and service trend analysis.
-ELB
You can ingest access logs of Elastic Load Balance (ELB) to a log stream of a log group in LTS. Then you can check the access logs for details on HTTP and HTTPS requests sent to your load balancer and perform log analysis.
+
ELB
You can ingest access logs of Elastic Load Balance (ELB) to a log stream of a log group in LTS. Then you can check the access logs to view details about HTTP and HTTPS requests sent to layer 7 load balancers and perform log analysis.
diff --git a/docs/lts/umn/lts_01_0009.html b/docs/lts/umn/lts_01_0009.html
index 2266d75e..0005ef8b 100644
--- a/docs/lts/umn/lts_01_0009.html
+++ b/docs/lts/umn/lts_01_0009.html
@@ -2,39 +2,39 @@
Configuring the VPC Flow Log Function
Scenarios
A Virtual Private Cloud (VPC) flow log captures information about the traffic going to and from your VPC. You can use flow logs to monitor network traffic, analyze network attacks, and determine whether security groups and firewall rules need to be modified.
-
To obtain traffic details of VPC Network Interface Cards (NICs), you can enable Log Tank Service (LTS) and view logs about the NICs on the LTS console.
-
This section describes how to create a VPC flow log to report logs to LTS.
+
You can obtain traffic details of VPC Network Interface Cards (NICs) from their logs on the LTS console.
+
This section describes how to configure VPC flow log reporting.
-
Constraints
- A VPC is available.
- Currently, only C3, CC3, and P2 ECSs are supported.
+
Constraints
- A VPC is available.
- Currently, only C3, M3, and S2 ECSs are supported.
-
Operation Process
Figure 1 Flowchart
+
Operation Process
Figure 1 Flowchart
Operations in Figure 1 are performed on different consoles:
-
- LTS console: Creating a log group and creating a log topic.
- VPC console: Creating a VPC flow log and viewing the flow log.
+
- Create log groups and log streams on the LTS console.
- Create and view VPC flow logs on the VPC console.
-
Creating a Log Group
- Log in to the management console.
- In the upper left corner of the management console, select the target region and project.
- Click Service List and choose Management & Deployment > Log Tank Service.
Figure 2 Log management
- - On the Log Management page, click Create Log Group.
Figure 3 Creating a log group
+Creating a Log Group
- Log in to the management console.
- Click Service List and choose Management & Deployment > Log Tank Service.
Figure 2 Log management
+ - On the Log Management page, click Create Log Group.
Figure 3 Creating a log group
- On the displayed page, enter a log group name.
-
Table 1 Parameter descriptionParameter
+Table 1 Parameter descriptionParameter
|
-Description
+ | Description
|
-Example Value
+ | Example Value
|
-Log Group Name
+ | Log Group Name
|
-Specifies the log group name which must be globally unique. The configuration rules are as follows:
-- Must be a string of 1 to 64 characters.
- Only allows uppercase and lowercase letters, digits, underscores (_), hyphens (-), and periods (.). The name cannot start or end with a period.
+ | Specifies the log group name which must be globally unique. The configuration rules are as follows:
+- Must be a string of 1 to 64 characters.
- Only allows uppercase and lowercase letters, digits, underscores (_), hyphens (-), and periods (.). The name cannot start with a period or underscore, or end with a period.
|
-lts-group-wule
+ | lts-group-7hjg
|
-Log Retention Duration
+ | Log Retention Duration
|
-Specifies the time period, in the unit of days, of storing logs in the LTS database. The default retention period for logs is seven days. Any logs stored longer than the retention period will be deleted.
+ | Specifies the time period, in the unit of days, of storing logs in the LTS database. The default retention period for logs is seven days. Any logs stored longer than the retention period will be deleted.
|
-7
+ | 7
|
@@ -42,36 +42,36 @@
- Click OK.
-Creating a Log TopicTo create a log topic in the log group, perform the following operations:
- - Log in to the management console.
- In the upper left corner of the management console, select the target region and project.
- Click Service List and choose Management & Deployment > Log Tank Service.
- In the log group list, click the name of the target log group.
Figure 4 Log topic list
- - On the displayed page, click Create Log Topic.
Figure 5 Creating a log topic
- - On the displayed page, enter a name.
-
Table 2 Parameter descriptionParameter
+Creating a Log StreamTo create a log stream in the log group, perform the following operations:
+ - Log in to the management console.
- Click Service List and choose Management & Deployment > Log Tank Service.
- Click
next to the log group name to expand the log stream list.Figure 4 Log stream list
+ - Click Create Log Stream.
Figure 5 Creating a log stream
+ - On the displayed page, enter a log stream name.
+
Table 2 Log stream parametersParameter
|
-Description
+ | Description
|
Example Value
|
-Log Topic Name
+ | Log Stream Name
|
-Specifies the log topic name. The name must be unique in a log group. The configuration rules are as follows:
-- Must be a string of 1 to 64 characters.
- Only allows uppercase and lowercase letters, digits, underscores (_), hyphens (-), and periods (.). The name cannot start or end with a period.
+ | The log stream name must be unique in a log group. The configuration rules are as follows:
+- Must be a string of 1 to 64 characters.
- Cannot start with a period (.) or underscore (_) or end with a period (.).
|
-LogTopic1
+ | lts-topic-ncwm
|
- - Click OK.
+ - Click OK. The log stream is created.
-Creating a VPC Flow Log- Log in to the management console.
- In the upper left corner of the management console, select the target region and project.
- Choose Service List > Network > Virtual Private Cloud.
- In the navigation pane on the left, choose VPC Flow Logs.
- In the upper right corner, click Create VPC Flow Log. On the displayed page, configure parameters as prompted.
Figure 6 Creating a VPC flow log
+Creating a VPC Flow Log- Log in to the management console.
- In the upper left corner of the management console, select the target region and project.
- Choose Service List > Network > Virtual Private Cloud.
- In the navigation pane on the left, choose VPC Flow Logs.
- In the upper right corner, click Create VPC Flow Log. On the displayed page, configure parameters as prompted.
Figure 6 Creating a VPC flow log
Table 3 Parameter descriptionParameter
|
-Description
+ | Description
|
Example Value
|
@@ -87,7 +87,7 @@
Resource Type
|
-Specifies the type of resources whose traffic is to be logged. Currently, Resource Type can only be NIC.
+ | Specifies the resource type whose traffic is to be logged. You can select NIC, Subnet, or VPC.
|
NIC
|
@@ -115,7 +115,7 @@
Log Topic
|
-Specifies the log topic created in LTS.
+ | Specifies the log stream created in LTS.
|
LogTopic1
|
@@ -138,7 +138,7 @@
- Log in to the management console.
- In the upper left corner of the management console, select the target region and project.
- Choose Service List > Network > Virtual Private Cloud.
- In the navigation pane on the left, choose VPC Flow Logs.
- Locate the target VPC flow log and click View Log Record in the Operation column to view information about the flow log record in LTS.
Figure 7 Viewing a log record
Figure 8 Viewing details of a log record
The flow log record is in the following format:
-<version> <project-id> <interface-id> <srcaddr> <dstaddr> <srcport> <dstport> <protocol> <packets> <bytes> <start> <end> <action> <log-status>
+<version> <project-id> <interface-id> <srcaddr> <dstaddr> <srcport> <dstport> <protocol> <packets> <bytes> <start> <end> <action> <log-status>
Example 1: The following is an example of a flow log record in which traffic was allowed during the capture window:
1 5f67944957444bd6bb4fe3b367de8f3d 1d515d18-1b36-47dc-a983-bd6512aed4bd 192.168.0.154 192.168.3.25 38929 53 17 1 96 1548752136 1548752736 ACCEPT OK
Value 1 indicates the VPC flow log version. Traffic with a size of 96 bytes to NIC 1d515d18-1b36-47dc-a983-bd6512aed4bd during the past 10 minutes (from 16:55:36 to 17:05:36 on January 29, 2019) was allowed. A data packet was transmitted over the UDP protocol from source IP address 192.168.0.154 and port 38929 to destination IP address 192.168.3.25 and port 53.
@@ -259,7 +259,7 @@
- You can enter a keyword on the log topic details page on the LTS console to search for flow log records.
+ You can search for flow log records by keyword on the log stream details page on the LTS console.
diff --git a/docs/lts/umn/lts_01_0010.html b/docs/lts/umn/lts_01_0010.html
index 753e7ebe..b58fcb3c 100644
--- a/docs/lts/umn/lts_01_0010.html
+++ b/docs/lts/umn/lts_01_0010.html
@@ -13,19 +13,18 @@
Currently, only one tracker can be created for each account.
-Configuring the TrackerYou can perform the following operations to enable the trace analysis function, thus reporting CTS traces to LTS.
+ Configuring the TrackerFollow the directions below to enable transfer to LTS so traces can be reported to LTS:
- On the Tracker page, click Configure in the Operation column.
The Configure Tracker page is displayed.
- - On the displayed page, enable Trace Analysis.
Figure 3 Configuring a tracker
- During CTS trace reporting, the system automatically creates a log group and a log topic on the LTS console.
+ - Enable Transfer to LTS.
Figure 3 Configuring a tracker
+ A log group and a log stream will be created automatically for the reported traces on the LTS console.
- - Click OK.
+ - Click Next to preview the created tracker.
- Click Configure.
Viewing Logs in Real TimeYou can perform the following operations to view logs reported by CTS:
- - Click Service List and choose Management & Deployment > Log Tank Service.
- In the log group list, click the name of the target log group.
- In the log topic list, locate the target log topic and click View in the Operation column.
Figure 4 Viewing logs in real time
-Logs are reported to LTS every 10 minutes. In the log display area, you may wait for at most 10 minutes to view the logs.
-In addition, you can customize log display by clicking Clear, Pause, or Close in the upper right corner. Figure 5 Log display area
-
+ - Click Service List and choose Management & Deployment > Log Tank Service.
- In the log group list, click
on the left of a log group name. The log stream list is displayed. - Click Log Stream to go to the log details and view real-time logs.
Figure 4 Viewing logs in real time
+Logs are reported to LTS once every five seconds. You may wait for at most five seconds before the logs are displayed.
+You can control log display by clicking Clear or Pause in the upper right corner.
diff --git a/docs/lts/umn/lts_01_0033.html b/docs/lts/umn/lts_01_0033.html
index 5ea5c49e..1a1b07ce 100644
--- a/docs/lts/umn/lts_01_0033.html
+++ b/docs/lts/umn/lts_01_0033.html
@@ -8,7 +8,41 @@
|
-2022-09-02
+ | 2023-6-30
+ |
+Updated figures in Collecting Logs from CCE.
+ |
+
+2023-5-30
+ |
+- Added the fine-grained permission dependency table in section "Permission Management."
- Deleted descriptions of filters.
+ |
+
+2023-04-30
+ |
+Optimized FAQs
+ |
+
+2023-03-30
+ |
+The modification is as follows:
+- Added the procedure in section "Collecting Logs from Hosts".
- Modified parameter descriptions in section "Reporting Flow Logs from VPC to LTS".
- Modified the deletion icon in section "Managing Log Streams".
+ |
+
+2022-11-30
+ |
+- Added the following sections:
+
- Optimized the following sections:
+
+ |
+
+2022-10-18
+ |
+The modification is as follows:
+- Deleted descriptions about the OBS Administrator permission in section "Log Transfer".
- Added rules for setting a custom path in section "Log Transfer".
- Deleted descriptions about Web Application Firewall (WAF) and Elastic Load Balance (ELB) in section "Related Services".
+ |
+
+2022-09-02
|
The modification is as follows:
- Modified description of restrictions in section "Managing Log Groups."
- Modified description of restrictions in section "Managing Log Streams."
- Modified description of restrictions in section "Quick Search."
diff --git a/docs/lts/umn/lts_02_0013.html b/docs/lts/umn/lts_02_0013.html
index 3cb95e7b..48da007a 100644
--- a/docs/lts/umn/lts_02_0013.html
+++ b/docs/lts/umn/lts_02_0013.html
@@ -1,61 +1,71 @@
-Installing ICAgent (Linux)
-ICAgent is a log collection tool for LTS. If you use LTS to collect logs from a host running Linux OS, you need to install ICAgent on the host.
- PrerequisitesEnsure that the time and time zone of your local browser are consistent with those of the host to install ICAgent. If they are inconsistent, errors may occur during log reporting.
-
+ Installing ICAgent
+ ICAgent is a log collection tool for LTS. To use LTS to collect logs from hosts, you need to install ICAgent on the hosts.
+ PrerequisitesEnsure that the time and time zone of your local browser are consistent with those of the host to install ICAgent. If they are inconsistent, errors may occur during log reporting.
- Installation MethodsThere are two methods to install ICAgent.
+ Installation MethodsThere are two methods to install ICAgent.
- Table 1 Installation methodsMethod
+Table 1 Installation methodsMethod
|
-Scenario
+ | Scenario
|
-Initial installation
+ | Initial installation
|
-You can use this method to install ICAgent on a host that has no ICAgent installed.
+ | You can use this method to install ICAgent on a host that has no ICAgent installed.
|
-Inherited installation
+ | Inherited installation (supported only for Linux hosts)
|
-When ICAgent has already been installed on one host but needs to be installed on multiple hosts, you can use this method.
+ | When ICAgent has already been installed on one host but needs to be installed on multiple hosts, you can use this method.
|
-Initial Installation- Log in to the LTS console and choose Agent Management in the navigation pane on the left.
- Click Install ICAgent.

- - Select an installation mode:
- Obtain AK/SK. For details, see How Do I Obtain an AK/SK Pair?
If the AK/SK pair expires or is deleted, the ICAgent status may become abnormal. In this case, create an AK/SK pair and generate a new installation command. Log in to the host and run the command to reinstall ICAgent.
+ Initial Installation (Linux)- Log in to the LTS console and choose Host Management in the navigation pane on the left.
- Click Install ICAgent in the upper right corner.
Figure 1 Installing ICAgent
+ - Set OS to Linux.
- Select an installation mode:
- Obtain AK/SK. For details, see How Do I Obtain an AK/SK Pair?
Obtain and use the AK/SK of a public account. Ensure that the public account and AK/SK will not be deleted or disabled. If the AK/SK is deleted, the ICAgent cannot report data to LTS.
- - Create an agency. For details, see How Do I Install ICAgent by Creating an Agency?
- - Click Copy Command to copy the ICAgent installation command.
- Log in as user root to the host which is deployed in the region same as that you are logged in to (for example, by using a remote login tool such as PuTTY) and run the copied command. If you have chosen Obtain AK/SK as the installation mode, enter the AK/SK pair as prompted.
- When the message ICAgent install success is displayed, ICAgent has been installed in the /opt/oss/servicemgr/ directory of the host. You can then view the ICAgent status on the Agent Management page of the LTS console.
- If the installation fails, uninstall ICAgent and reinstall it. If reinstallation fails, contact technical support.
+
+
+ - Click Copy Command to copy the ICAgent installation command.
- Log in as user root to the host which is deployed in the region same as that you are logged in to (for example, by using a remote login tool such as PuTTY) and run the copied command. If you have chosen Obtain AK/SK as the installation mode, enter the AK/SK pair as prompted.
- When the message ICAgent install success is displayed, ICAgent has been installed in the /opt/oss/servicemgr/ directory of the host. You can then view the ICAgent status on the Host Management > Hosts page of the LTS console.
- If the installation fails, uninstall ICAgent and reinstall it. If the reinstallation fails, contact technical support.
-Inherited InstallationLet's assume that you need to install ICAgent on multiple hosts, and one of the hosts already has ICAgent installed. The ICAgent installation package, ICProbeAgent.tar.gz, is in the /opt/ICAgent/ directory. You can follow the directions below to install ICAgent on other hosts one by one.
- - Run the following command on the host where ICAgent has been installed, where x.x.x.x is the IP address of the host you want to install ICAgent on.
bash /opt/oss/servicemgr/ICAgent/bin/remoteInstall/remote_install.sh -ip x.x.x.x
- - Enter the password for user root of the host when prompted.
- If the Expect tool is installed on the host that has ICAgent installed, the ICAgent installation should be able to complete without prompting you for a password.
- Ensure that user root can run SSH or SCP commands on the host where ICAgent has been installed to remotely communicate with the host to install ICAgent.
- When the message ICAgent install success is displayed, ICAgent has been installed in the /opt/oss/servicemgr/ directory of the host. You can then view the ICAgent status on the Agent Management page of the LTS console.
- If the installation fails, uninstall ICAgent and reinstall it. If reinstallation fails, contact technical support.
+ Initial Installation (Windows)- Log in to the LTS console and choose Host Management in the navigation pane on the left.
- Click Install ICAgent in the upper right corner.
- Set OS to Windows.

+ - Download the ICAgent installation package to the host.
You can download it by clicking the name of the package or copying the download URL to the address bar of your browser.
+ - Save the ICAgent installation package to a directory, for example, C:\ICAgent, and decompress the package.
- Enter the AK/SK pair to generate the ICAgent installation command. For details about how to obtain an AK/SK pair, see How Do I Obtain an AK/SK Pair?
If the AK/SK pair expires or is deleted, the ICAgent status may become abnormal. In this case, create an AK/SK pair and generate a new installation command. Log in to the host and run the command to reinstall ICAgent.
+
+ - Open the Command Prompt, go to the directory where the ICAgent installation package is decompressed, and run the copied command.
If the message Service icagent installed successfully is displayed, the installation is successful.
+
+
+
+ Inherited Installation (Linux)Let's assume that you need to install ICAgent on multiple hosts, and one of the hosts already has ICAgent installed. The ICAgent installation package, ICProbeAgent.tar.gz, is in the /opt/ICAgent/ directory. You can follow the directions below to install ICAgent on other hosts one by one.
+ - Run the following command on the host where ICAgent has been installed, where x.x.x.x is the IP address of the host you want to install ICAgent on.
bash /opt/oss/servicemgr/ICAgent/bin/remoteInstall/remote_install.sh -ip x.x.x.x
+ - Enter the password for user root of the host when prompted.
- If the Expect tool is installed on the host that has ICAgent installed, the ICAgent installation should be able to complete without prompting you for a password. Otherwise, enter the password as prompted.
- Ensure that user root can run SSH or SCP commands on the host where ICAgent has been installed to remotely communicate with the remote host to install ICAgent.
- When the message ICAgent install success is displayed, ICAgent has been installed in the /opt/oss/servicemgr/ directory of the host. You can then view the ICAgent status on the Host Management > Hosts page of the LTS console.
- If the installation fails, uninstall ICAgent and reinstall it. If reinstallation fails, contact technical support.
- Batch Inherited InstallationLet's assume that you need to install ICAgent on multiple hosts, and one of the hosts already has ICAgent installed. The ICAgent installation package, ICProbeAgent.tar.gz, is in the /opt/ICAgent/ directory. You can follow the directions below to install ICAgent on other hosts in batches.
- - The hosts must all belong to the same Virtual Private Cloud (VPC) and be on the same subnet.
+ Batch Inherited Installation (Linux)Let's assume that you need to install ICAgent on multiple hosts, and one of the hosts already has ICAgent installed. The ICAgent installation package, ICProbeAgent.tar.gz, is in the /opt/ICAgent/ directory. You can follow the directions below to install ICAgent on other hosts in batches.
+ - The hosts must all belong to the same Virtual Private Cloud (VPC) and be on the same subnet.
- Python 3.* is required for batch installation. If you are prompted that Python cannot be found during ICAgent installation, install Python of a proper version and try again.
- Prerequisites
- The IP addresses and passwords of all hosts to install ICAgent have been collected, sorted in the iplist.cfg file, and uploaded to the /opt/ICAgent/ directory on the host that has ICAgent installed. Each IP address and password in the iplist.cfg file must be separated by a space, as shown in the following:
- 192.168.2.109 Password of the host where ICAgent is to be installed
- 192.168.0.139 Password of the host where ICAgent is to be installed
- - The iplist.cfg file contains sensitive information. You are advised to clear it after using it.
- - If all hosts share a password, list only IP addresses in the iplist.cfg file and enter the password manually during execution. If one of the hosts uses a different password, type the password behind its IP address.
+ Prerequisites
+ The IP addresses and passwords of all hosts to install ICAgent have been collected, sorted in the iplist.cfg file, and uploaded to the /opt/ICAgent/ directory on the host that has ICAgent installed. Each IP address and password in the iplist.cfg file must be separated by a space, as shown in the following example:
+ 192.168.0.109 Password (Replace the IP address and password with the actual ones)
+ 192.168.0.39 Password (Replace the IP address and password with the actual ones)
+ - Because the iplist.cfg file contains sensitive information, you are advised to clear it after using it.
+ - If all hosts share a password, list only IP addresses in the iplist.cfg file and enter the password manually during execution. If one of the hosts uses a different password, type the password behind its IP address.
- Procedure
- - Run the following command on the host that has ICAgent installed:
bash /opt/oss/servicemgr/ICAgent/bin/remoteInstall/remote_install.sh -batchModeConfig /opt/ICAgent/iplist.cfg
-Enter the password for user root of the hosts to install ICAgent. If the passwords of all hosts have been configured in the iplist.cfg file, press Enter to skip this step.
-batch install begin
+Procedure
+- Run the following command on the host that has ICAgent installed:
bash /opt/oss/servicemgr/ICAgent/bin/remoteInstall/remote_install.sh -batchModeConfig /opt/ICAgent/iplist.cfg
+Enter the default password for user root of the hosts to install ICAgent. If the passwords of all hosts have been configured in the iplist.cfg file, press Enter to skip this step.
+batch install begin
Please input default passwd:
send cmd to 192.168.0.109
send cmd to 192.168.0.39
@@ -65,12 +75,12 @@ send cmd to 192.168.0.39
End of install agent: 192.168.0.39
End of install agent: 192.168.0.109
All hosts install icagent finish.
-If the message All hosts install icagent finish. is displayed, ICAgent has been installed on all the hosts listed in the configuration file.
- - Choose Agent Management in the LTS navigation pane to view the ICAgent status. For details, see ICAgent Status.
+If the message All hosts install icagent finish. is displayed, ICAgent has been installed on all the hosts listed in the configuration file.
+ - You can then view the ICAgent status on the Host Management > Hosts page of the LTS console.
diff --git a/docs/lts/umn/lts_02_0014.html b/docs/lts/umn/lts_02_0014.html
index 0482503e..c46038b9 100644
--- a/docs/lts/umn/lts_02_0014.html
+++ b/docs/lts/umn/lts_02_0014.html
@@ -1,16 +1,23 @@
Upgrading ICAgent
- To deliver a better collection experience, LTS regularly upgrades ICAgent. When LTS prompts you that a new ICAgent version is available, you can follow the directions here to obtain the latest version.
- Procedure- Log in to the LTS console and choose Agent Management in the navigation pane on the left.
- Select one or more hosts where ICAgent is to be upgraded, and click Upgrade ICAgent.
- In the Upgrade ICAgent dialog box, click Yes.
The upgrade begins. This process takes about a minute. When the ICAgent status changes from Upgrading to Running, the ICAgent upgrade has completed.
- If the ICAgent status is abnormal after the upgrade or if the upgrade fails, log in to the host and run the installation command. ICAgent can be re-installed on top of itself.
+ To deliver a better collection experience, LTS regularly upgrades ICAgent. When LTS prompts you that a new ICAgent version is available, you can follow the directions here to obtain the latest version.
+ Linux hosts support ICAgent upgrade on the Host Management page of the LTS console.
+
+ Procedure- Log in to the LTS console and choose Host Management in the navigation pane on the left.
- On the Host Management page, click the Hosts tab.
- Select Hosts. Select one or more hosts where ICAgent is to be upgraded, and click Upgrade ICAgent.
Select CCE Cluster. In the drop-down list on the right, select the cluster whose ICAgent is to be upgraded, and click Upgrade ICAgent.
+Figure 1 Upgrading ICAgent
+ - If you create a CCE cluster for the first time, ICAgents will be installed on hosts in the cluster by default, and logs will be reported to AOM. Output to AOM is enabled by default. To report logs to LTS, disable Output to AOM before upgrading ICAgents. You are advised to choose Log Ingestion > Cloud Service > Cloud Container Engine (CCE) to collect container data and output it to LTS instead of AOM.
- CCE cluster ID (ClusterID): Each cluster has a fixed ID.
- When ICAgent is upgraded, LTS creates log groups and host groups for your CCE cluster. The name of the log group and host group is k8s-log-{ClusterID}. You can create an ingestion configuration (Cloud Services > Cloud Container Engine (CCE)) to add logs of the current CCE cluster to the log group.
- If the ICAgent is not installed on hosts in a cluster or the ICAgent version is too early, click Upgrade ICAgent to install the ICAgent on all hosts in the cluster.
+
+
+ - In the displayed dialog box, click OK.
The upgrade begins. This process takes about a minute. When the ICAgent status changes from Upgrading to Running, the ICAgent upgrade has completed.
+ If the ICAgent is abnormal after the upgrade or if the upgrade fails, log in to the host and run the installation command. ICAgent can be re-installed on top of itself.
diff --git a/docs/lts/umn/lts_02_0015.html b/docs/lts/umn/lts_02_0015.html
index 9565c622..a478bf19 100644
--- a/docs/lts/umn/lts_02_0015.html
+++ b/docs/lts/umn/lts_02_0015.html
@@ -1,42 +1,42 @@
Uninstalling ICAgent
- If ICAgent is uninstalled from a host, log collection will be affected. Exercise caution when performing this operation.
- Uninstalling ICAgent does not delete the installation files. You need to delete them manually if necessary.
+ If ICAgent is uninstalled from a host, log collection will be affected. Exercise caution when performing this operation.
+ Only ICAgent installed on Linux hosts can be uninstalled from the Host Management page of the LTS console. To uninstall ICAgent from a Windows host, go to \ICProbeAgent\bin\manual\win in the directory where the ICAgent installation package was decompressed, and double-click the script named uninstall.bat. If the message ICAgent uninstall success is displayed, the uninstallation was successful.
+ Uninstalling ICAgent does not delete the installation files. You need to delete them manually if necessary.
- There are a number of ways to uninstall ICAgent:
-
- Uninstalling ICAgent on the Console- Log in to the LTS console and choose Agent Management in the navigation pane on the left.
- Select one or more hosts where ICAgent is to be uninstalled and click Uninstall ICAgent.
- In the Uninstall ICAgent dialog box, click Yes.
The uninstallation begins. This process takes about a minute.
-When the ICAgent status changes from Uninstalling to Uninstalled, the ICAgent uninstallation has completed.
- To reinstall ICAgent, wait for 5 minutes after the uninstallation completes, or the reinstalled ICAgent may be unintentionally uninstalled again.
+ There are a number of ways to uninstall ICAgent:
+
+ Uninstalling ICAgent on the Console- Log in to the LTS console and choose Host Management in the navigation pane on the left.
- Click the Hosts tab.
- Select one or more hosts where ICAgent is to be uninstalled and click Uninstall ICAgent.
- In the displayed dialog box, click OK.
The uninstallation begins. This process takes about a minute.
+The ICAgent uninstalled will be removed from the host list.
+ To reinstall ICAgent, wait for 5 minutes after the uninstallation completes, or the reinstalled ICAgent may be unintentionally uninstalled again.
- Uninstalling ICAgent on a Host- Log in to a host where ICAgent is to be uninstalled as user root.
- Run the following command:
bash /opt/oss/servicemgr/ICAgent/bin/manual/uninstall.sh;
-If the message ICAgent uninstall success is displayed, the uninstallation has completed.
+Uninstalling ICAgent on a Host- Log in to a host where ICAgent is to be uninstalled as user root.
- Run the following command:
bash /opt/oss/servicemgr/ICAgent/bin/manual/uninstall.sh;
+If the message ICAgent uninstall success is displayed, the uninstallation has completed.
-Remotely Uninstalling ICAgentYou can uninstall ICAgent on one host remotely from another host.
- - On a host that has ICAgent installed, run the following command. x.x.x.x indicates the IP address of the host you wish to uninstall ICAgent from.
bash /opt/oss/servicemgr/ICAgent/bin/remoteUninstall/remote_uninstall.sh -ip x.x.x.x
- - Enter the password for user root for the remote host.
- If the Expect tool has been installed on the host you are running the command from, ICAgent will be uninstalled from the remote host after the command is executed. If the Expect tool has not been installed, you will need to enter the password of user root for the remote host as prompted.
- Ensure that user root can run SSH or SCP commands on the host where ICAgent has been installed to communicate with the remote host.
- If the message ICAgent uninstall success is displayed, the uninstallation has completed.
+ Remotely Uninstalling ICAgentYou can uninstall ICAgent on one host remotely from another host.
+ - Run the following command on the host where ICAgent has been installed, x.x.x.x is the IP address of the host you want to uninstall ICAgent from.
bash /opt/oss/servicemgr/ICAgent/bin/remoteUninstall/remote_uninstall.sh -ip x.x.x.x
+ - Enter the password for user root of the host when prompted.
- If the Expect tool is installed on the host that has ICAgent installed, the ICAgent uninstallation should be able to complete without prompting you for a password. Otherwise, enter the password as prompted.
- Ensure that user root can run SSH or SCP commands on the host where ICAgent has been installed to communicate with the remote host to uninstall ICAgent.
- If the message ICAgent uninstall success is displayed, the uninstallation has completed.
- Batch Uninstalling ICAgentIf ICAgent has been installed on a host and the ICAgent installation package ICProbeAgent.tar.gz is in the /opt/ICAgent/ directory of the host, you can use this method to uninstall ICAgent from multiple hosts at once.
- The hosts must all belong to the same Virtual Private Cloud (VPC) and be on the same subnet.
+ Batch Uninstalling ICAgentIf ICAgent has been installed on a host and the ICAgent installation package ICProbeAgent.tar.gz is in the /opt/ICAgent/ directory of the host, you can use this method to uninstall ICAgent from multiple hosts at once.
+ The hosts must all belong to the same Virtual Private Cloud (VPC) and be on the same subnet.
- Prerequisites
-
- The IP addresses and passwords of all hosts to uninstall ICAgent have been collected, sorted in the iplist.cfg file, and uploaded to the /opt/ICAgent/ directory on the host that has ICAgent installed. Each IP address and password in the iplist.cfg file must be separated by a space, as shown in the following example:
- 192.168.0.109 Password (Replace the IP address and password with the actual ones)
- 192.168.0.39 Password (Replace the IP address and password with the actual ones)
- - The iplist.cfg file contains sensitive information. You are advised to clear it after using it.
- - If all hosts share a password, list only IP addresses in the iplist.cfg file and enter the password manually during execution. If one of the hosts uses a different password, type the password behind its IP address.
+ Prerequisites
+ The IP addresses and passwords of all hosts to uninstall ICAgent have been collected, sorted in the iplist.cfg file, and uploaded to the /opt/ICAgent/ directory on the host that has ICAgent installed. Each IP address and password in the iplist.cfg file must be separated by a space, as shown in the following example:
+ 192.168.0.109 Password (Replace the IP address and password with the actual ones)
+ 192.168.0.39 Password (Replace the IP address and password with the actual ones)
+ - Because the iplist.cfg file contains sensitive information, you are advised to clear it after using it.
+ - If all hosts share a password, list only IP addresses in the iplist.cfg file and enter the password during execution. If one of the hosts uses a different password, type the password behind its IP address.
- Procedure
- - Run the following command on the host that has ICAgent installed:
bash /opt/oss/servicemgr/ICAgent/bin/remoteUninstall/remote_uninstall.sh -batchModeConfig /opt/ICAgent/iplist.cfg
-Enter the default password for user root of the hosts to uninstall ICAgent. If the passwords of all hosts have been configured in the iplist.cfg file, press Enter to skip this step.
-batch uninstall begin
+Procedure
+- Run the following command on the host that has ICAgent installed:
bash /opt/oss/servicemgr/ICAgent/bin/remoteUninstall/remote_uninstall.sh -batchModeConfig /opt/ICAgent/iplist.cfg
+Enter the default password for user root of the hosts to uninstall ICAgent. If the passwords of all hosts have been configured in the iplist.cfg file, press Enter to skip this step.
+batch uninstall begin
Please input default passwd:
send cmd to 192.168.0.109
send cmd to 192.168.0.39
@@ -44,13 +44,13 @@ send cmd to 192.168.0.39
End of uninstall agent: 192.168.0.109
End of uninstall agent: 192.168.0.39
All hosts uninstall icagent finish.
-If the message All hosts uninstall icagent finish. is displayed, the batch uninstallation has completed.
- - Choose Agent Management in the LTS navigation pane to view the ICAgent status of the host.
+If the message All hosts uninstall icagent finish. is displayed, the batch uninstallation has completed.
+ - Choose Host Management > Hosts on the LTS console to view the ICAgent status.
diff --git a/docs/lts/umn/lts_02_0030.html b/docs/lts/umn/lts_02_0030.html
new file mode 100644
index 00000000..50dda95d
--- /dev/null
+++ b/docs/lts/umn/lts_02_0030.html
@@ -0,0 +1,11 @@
+
+
+ Log Ingestion
+
+
+
diff --git a/docs/lts/umn/lts_02_1032.html b/docs/lts/umn/lts_02_1032.html
new file mode 100644
index 00000000..ab5af2ff
--- /dev/null
+++ b/docs/lts/umn/lts_02_1032.html
@@ -0,0 +1,13 @@
+
+
+ Host Management
+
+
+
diff --git a/docs/lts/umn/lts_02_1033.html b/docs/lts/umn/lts_02_1033.html
new file mode 100644
index 00000000..f65548ae
--- /dev/null
+++ b/docs/lts/umn/lts_02_1033.html
@@ -0,0 +1,102 @@
+
+
+ Managing Host Groups
+ Host groups allow you to configure host log ingestion efficiently. You can sort multiple hosts to a host group and associate the host group with log ingestion configurations. The ingestion configurations will be applied to all the hosts in the host group, saving you the trouble of configuring the hosts individually.
+ - When there is a new host, simply add it to a host group and the host will automatically inherit the log ingestion configurations associated with the host group.
- You can also use host groups to modify the log collection paths for multiple hosts at one go.
+ Creating a Host Group (IP Address)- Log in to the LTS console, and choose Host Management in the navigation pane on the left. On the displayed page, click Create Host Group in the upper right corner.
- In the displayed slide-out panel, enter a host group name and select a host OS (Linux).
Figure 1 Creating an IP address host group
+
+ - In the host list, select one or more hosts to add to the group and click OK.
- You can filter hosts by host name or host IP address. You can also click
and enter multiple host IP addresses in the displayed search box to search for matches. - If your desired hosts are not in the list, click Install ICAgent. On the displayed page, install ICAgent on the hosts as prompted. For details, see Installing ICAgent.
+
+
+ Creating a Host Group (Custom Identifier)- Log in to the LTS console, and choose Host Management in the navigation pane on the left. On the displayed page, click Create Host Group in the upper right corner.
- On the displayed Create Host Group page, enter a host group name in the Host Group field and set Host Group OS to Custom Identifier.
Figure 2 Creating a custom identifier host group
+
+ - A host group with a custom ID supports only Linux hosts.
- You can click Learn about the rules for filling in the collection path to learn how to configure paths.
+
+
+ - Click Add to add a custom identifier.
Up to 10 custom identifiers can be added.
+
+ - Click OK.
- Run the following commands to create the custom_tag file:
- Run the cd /opt/cloud command. In the cloud directory, run the mkdir lts command to create the lts directory.
- Run the chmod 750 lts command to modify the permission on the lts directory.
- Run the touch custom_tag command in the lts directory to create the custom_tag file.
- Run the chmod 640 custom_tag;vi custom_tag command to modify the custom_tag permission and open the file.
- Press i to enter the insert mode, enter a custom identifier, press Esc, enter :wq!, save the modification and exit.
+ After 5, you can use either of the following methods to add hosts to a custom host group:
+ Method 1 (recommended):
+ Linux
+ In the custom_tag file of the /opt/cloud/lts directory on the host, view the host identifier and add it to the custom host group identifiers to add the host to the host group. For example, in the custom_tag file of the /opt/cloud/lts directory on the host, the identifier of the host is test1, and the custom identifier of the host group is test1. That is, the host is added to the host group.
+ Method 2:
+ Linux
+ - To add a host to a host group, add the custom host group identifier to the custom_tag file in the /opt/cloud/lts directory on the host. For example, if the custom identifier of the host group is test, enter test in the custom_tag file to add the host to the host group.
- If multiple custom identifiers are added, enter any custom identifier in the custom_tag file of the /opt/cloud/lts directory on the host to add the host to the host group.
+
+
+
+
+ Modifying a Host GroupYou can change the name of a host group, add hosts to or remove hosts from a host group, or associate a host group with log ingestion configurations.
+
+ Table 1 Operations on host groupsOperation
+ |
+Procedure
+ |
+
+
+Changing a host group name
+ |
+- Log in to the LTS console. In the navigation pane on the left, choose Host Management.
- On the Host Groups tab, click
in the Operation column of the row containing the target host group. - On the displayed dialog box, change the host group name and customized identifier.
- Click OK.
+ |
+
+Adding hosts to a host group
+ |
+Method 1:
+- On the Host Management page, click the Host Groups tab, and click
in the row containing the target host group. - Click Add Host.
- In the displayed slide-out panel, all hosts that are not in the host group and run the selected OS type are displayed. Select the hosts to be added to the host group.
- You can filter hosts by host name or host IP address. You can also click
and enter multiple host IP addresses in the displayed search box to search for matches. - If your desired hosts are not in the list, click Install ICAgent. On the displayed page, install ICAgent on the hosts as prompted. For details, see Installing ICAgent.
+ - Click OK.
+Method 2:
+- On the Host Management page, click the Hosts tab.
- In the host list, select the target hosts and click Add to Host Group.
- In the displayed slide-out panel, select the target host group.
- Click OK.
+ |
+
+Removing a host from a host group
+ |
+- On the Host Management page, click the Host Groups tab, and click
in the row containing the target host group. - In the host list, click Remove in the Operation column of the row containing the host to be removed.
- In the displayed dialog box, click OK.
+ NOTE: This operation is not supported for hosts in the custom identifier host group.
+
+ |
+
+Uninstalling ICAgent from a host
+ |
+- On the Host Management page, click the Host Groups tab, and click
in the row containing the target host group. - In the host list, click Uninstall ICAgent in the Operation column of the row containing the target host.
- In the displayed dialog box, click OK to uninstall ICAgent from the host and remove the host from the host group.
NOTE: - This operation is not supported for hosts in the custom identifier host group.
- If the host has also been added to other host groups, it will be removed from those groups as well.
+
+
+ |
+
+Removing hosts from a host group
+ |
+- On the Host Management page, click the Host Groups tab, and click
in the row containing the target host group. - In the host list, select the target hosts and click the Remove button above the list.
- Click OK.
+ |
+
+Associating a host group with an ingestion configuration
+ |
+- On the Host Management page, click the Host Groups tab, and click
in the row containing the target host group. - Click the Associated Ingestion Configuration tab.
- Click Associate.
- In the displayed slide-out panel, select the target ingestion configuration.
- Click OK. The associated ingestion configuration is displayed in the list.
+ |
+
+Disassociating a host group from an ingestion configuration
+ |
+- On the Associated Ingestion Configuration tab, click Disassociate in the Operation column of the row containing the target ingestion configuration.
- Click OK.
+ |
+
+Disassociating a host group from multiple ingestion configurations
+ |
+- On the Associated Ingestion Configuration tab, select the target ingestion configurations and click the Disassociate button above the list.
- Click OK.
+ |
+
+
+
+
+
+ Deleting Host GroupsDeleting a single host group
+ - Log in to the LTS console. In the navigation pane on the left, choose Host Management.
- On the Host Groups tab, click
in the Operation column of the row containing the target host group.Figure 3 Deleting a host group
+ - In the displayed dialog box, click OK.
+ Deleting host groups in batches
+ - On the Host Groups tab, select multiple host groups to be deleted and click Delete above the list.
- In the displayed dialog box, click OK.
+
+
+
+
diff --git a/docs/lts/umn/lts_03_0002.html b/docs/lts/umn/lts_03_0002.html
deleted file mode 100644
index 2f7af697..00000000
--- a/docs/lts/umn/lts_03_0002.html
+++ /dev/null
@@ -1,55 +0,0 @@
-
-
- How Do I Install ICAgent by Creating an Agency?
- When installing ICAgent, you can create an IAM agency, and ICAgent will automatically obtain an AK/SK pair and generate the ICAgent installation command.
- Procedure- Log in to the console and choose Service List > Management & Deployment > Identity and Access Management.
- Choose Agencies in the navigation pane on the left.
- Click Create Agency in the upper right corner and set parameters as follows:
-
Table 1 Agency parametersParameter
- |
-Description
- |
-
-
-Agency Name
- |
-Set the agency name. For example, lts_ecm_trust.
- |
-
-Agency Type
- |
-Select Cloud service.
- |
-
-Cloud Service
- |
-Select Elastic Cloud Server (ECS) and Bare Metal Server (BMS).
- |
-
-Validity Period
- |
-Select Unlimited.
- |
-
-Description
- |
-(Optional) Provide details about the agency.
- |
-
-Permissions
- |
-- In the Permissions area, click Assign Permissions.
- Search for LTS Admin and APM Administrator and select them.
- Click OK on the Assign Permissions page.
- |
-
-
-
-
- - Click OK on the Create Agency page.
-
- Making an Agency Effective- Choose Service List > Computing > Elastic Cloud Server.
- Click the ECS where ICAgent is installed. The ECS details page is displayed.
- Select the created agency and confirm the configuration to make the agency effective.
- (Optional) If you want to set an agency when you are purchasing an ECS, do as follows: Click Buy ECS on the ECS console. In the Configure Advanced Settings step, set Advanced Options to Configure now and select an agency from the Agency drop-down list. Set the other parameters and click Next.
-
-
-
-
diff --git a/docs/lts/umn/lts_03_0015.html b/docs/lts/umn/lts_03_0015.html
index 0b6ab625..0b69504f 100644
--- a/docs/lts/umn/lts_03_0015.html
+++ b/docs/lts/umn/lts_03_0015.html
@@ -1,14 +1,15 @@
How Do I Obtain an AK/SK Pair?
- Procedure- Log in to the console, hover your cursor over your username in the upper right corner, and select My Credentials.
- Click the Access Keys tab.
- Click Create Access Key, and enter the login password.
- Click OK. The credentials file is automatically downloaded.
- Obtain the AK/SK pair from the credentials file.
+ Obtain and use the AK/SK of a public account.
+ Procedure- Log in to the console, hover your cursor over your username in the upper right corner, and select My Credentials.
- Click the Access Keys tab.
- Click Create Access Key, and enter the login password.
- Click OK. The credentials file is automatically downloaded.
- Obtain the AK/SK pair from the credentials file.
Up to two access keys can be created for each user. An access key can be downloaded only right after it is created. Keep it secure after it is generated. If the Create Access Key button is grayed out, delete an access key first before creating one.
diff --git a/docs/lts/umn/lts_04_0002.html b/docs/lts/umn/lts_04_0002.html
index 39b735ea..eac9eb30 100644
--- a/docs/lts/umn/lts_04_0002.html
+++ b/docs/lts/umn/lts_04_0002.html
@@ -1,14 +1,10 @@
- Log Management
-
+ Log Analysis
+
diff --git a/docs/lts/umn/lts_04_0003.html b/docs/lts/umn/lts_04_0003.html
index dac79d90..3f241359 100644
--- a/docs/lts/umn/lts_04_0003.html
+++ b/docs/lts/umn/lts_04_0003.html
@@ -1,25 +1,31 @@
Managing Log Groups
- A log group is a group of log streams which share the same log retention settings. Up to 100 log groups can be created for a single account.
- Creating a Log Group- Log in to the LTS console, choose Log Management in the navigation pane on the left, and click Create Log Group in the upper right corner.

- - In the dialog box displayed, enter a log group name. After a log group is created, its name cannot be changed. A log group name:
- Can contain only letters, digits, hyphens (-), underscores (_), and periods (.).
- Cannot start with a period (.) or underscore (_), and cannot end with a period (.).
- Can contain 1 to 64 characters.
+A log group is a group of log streams. Up to 100 log groups can be created for a single account.
+ Creating a Log GroupLog groups can be created in two ways. They are automatically created when other services are connected to LTS, or you can create one manually by following the steps described here.
+ - Log in to the LTS console, choose Log Management in the navigation pane on the left, and click Create Log Group in the upper right corner.
Figure 1 Creating a log group
+
+ - In the dialog box displayed, enter a log group name. After a log group is created, its name cannot be changed. A log group name:
- Can contain only letters, digits, hyphens (-), underscores (_), and periods (.).
- Cannot start with a period (.) or underscore (_) or end with a period (.).
- Can contain 1 to 64 characters.
Collected logs are sent to the log streams in the log group. If there are too many logs to collect, you are advised to separate logs into different log groups based on log types, and name log groups in an easily identifiable way.
- - Log retention duration is 7 days by default and cannot be modified.

-
- - Click OK.
- In the log group list on the Log Management page, you can view details of the log group, including log group name, log retention duration, creation time, and creation type.
+ - Set Log Retention Duration. You can set it to 1 to 30 days. If this parameter is not specified, logs are retained for 7 days by default.
Figure 2 Creating a log group
+ - Click OK.
- In the log group list, you can view details of log groups, including log group name, log retention duration (days), creation type, creation time, and number of log streams.
- Click the log group name, the details page of one of its log streams is displayed.
- When multiple log groups are created concurrently, there may be a limit exceeding error.
+
Deleting a Log GroupYou can delete a log group that is no longer needed. Deleting a log group will also delete the log streams and log data in the log group. Deleted log groups cannot be recovered. Exercise caution when performing the deletion.
If you want to delete a log group that is associated with a log transfer task, delete the task first.
- - In the log group list on the Log Management page, locate the target log group and click Delete in the Operation column.
- Enter DELETE and click Yes.
+
- In the log group list on the Log Management page, locate the target log group and click Delete in the Operation column.
- Enter DELETE and click OK.
Figure 3 Deleting a log group
+ Searching Log Groups/StreamsIn the log group list, click the search box and set the following filter criteria:
+ - Log group/stream
- Log group name/ID
- Log stream name/ID
+ Figure 4 Searching log groups/streams
+
diff --git a/docs/lts/umn/lts_04_0004.html b/docs/lts/umn/lts_04_0004.html
index 8ae53698..99a0f520 100644
--- a/docs/lts/umn/lts_04_0004.html
+++ b/docs/lts/umn/lts_04_0004.html
@@ -5,13 +5,14 @@
Up to 100 log streams can be created in a log group. The upper limit cannot be increased. If you cannot create a log stream because the upper limit is reached, you are advised to delete log streams that are no longer needed and try again, or create log streams in a new log group.
PrerequisitesYou have created a log group.
-Creating a Log Stream- On the LTS console, click the name of the created log group.
- Click Create Log Stream in the upper left corner, and enter a log stream name. After a log stream is created, its name cannot be changed. A log stream name:
- Can contain only letters, digits, hyphens (-), underscores (_), and periods (.).
- Cannot start with a period (.) or underscore (_), and cannot end with a period (.).
- Can contain 1 to 64 characters.
-
+Creating a Log StreamLog streams can be created in two ways. They are automatically created when other services are connected to LTS, or you can create one manually by following the steps described here.
+ - On the LTS console, click
on the left of a log group name. - Click Create Log Stream in the upper left corner of the displayed page, and enter a log stream name. After a log stream is created, its name cannot be changed. A log stream name:
- Can contain only letters, digits, hyphens (-), underscores (_), and periods (.).
- Cannot start with a period (.) or underscore (_) or end with a period (.).
- Can contain 1 to 64 characters.
+Figure 1 Creating a log stream
Collected logs are sent to the created log stream. If there are too many logs to collect, you are advised to separate logs into different log streams based on log types, and name log streams in an easily identifiable way.
- - Click OK.
On the log stream page, you can view details of the log stream, including log stream name, creation time, and creation type.
-
- Logs cannot be downloaded from log streams on the LTS console. You need to transfer logs to Object Storage Service (OBS) and download them from the OBS console.
+ - Click OK.
In the log stream list, you can view details of the list, including the log stream name, creation time, and creation type.
+Figure 2 A created log stream
+ Logs cannot be downloaded from log streams on the LTS console. You need to transfer logs to Object Storage Service (OBS) and download them from the OBS console.
@@ -19,13 +20,16 @@
Deleting a Log StreamYou can delete a log stream that is no longer needed. Deleting a log stream will also delete the log data in the log stream. Deleted log streams cannot be recovered. Exercise caution when performing the deletion.
- Before deleting a log stream, check whether any log collection task is configured for it. If there is a log collection task, deleting the log stream may affect log reporting.
- If you want to delete a log stream that is associated with a log transfer task, delete the task first.
- - In the log stream list, locate the target log stream and click Delete in the Operation column.
- Enter DELETE and click Yes.
+
- In the log stream list, locate the target log stream and click
in the Operation column. - Enter DELETE and click OK.
Figure 3 Deleting a log stream
+
diff --git a/docs/lts/umn/lts_04_0005.html b/docs/lts/umn/lts_04_0005.html
deleted file mode 100644
index 9f80f558..00000000
--- a/docs/lts/umn/lts_04_0005.html
+++ /dev/null
@@ -1,92 +0,0 @@
-
-
- Configuring Log Collection Rules
- ICAgent collects logs from hosts based on your specified collection rules, and packages and sends the collected log data to LTS on a log-stream basis. You can view logs on the LTS console in real time.
- Prerequisites- You have created a log group.
- You have created a log stream.
- You have installed ICAgent.
-
- Procedure- Log in to the LTS console, choose Log Management in the navigation pane on the left, and click the name of a log group.
- Click the name of a log stream. Logs will be collected to this log stream.
- Click Collection Configuration in the navigation pane. The collection configuration details page is displayed.
- Click Add Path.
- In the Add Host step, select the host whose logs you want to collect and click Next.
- In the Configure Collection Path step, add the path of logs you want to collect.
- Logs can be collected recursively. A double asterisk (**) can represent up to 5 directory levels in a path.
For example, /var/logs/**/a.log will match the following logs:
-/var/logs/1/a.log
-/var/logs/1/2/a.log
-/var/logs/1/2/3/a.log
-/var/logs/1/2/3/4/a.log
-/var/logs/1/2/3/4/5/a.log
- - /1/2/3/4/5/ indicates the 5 levels of directories under the /var/logs directory. All the a.log files found in all these levels of directories will be collected.
- Only one double asterisk (**) can be contained in a collection path. For example, /var/logs/**/a.log is acceptable but /opt/test/**/log/** is not.
- A collection path cannot begin with a double asterisk (**), such as /**/test to avoid collecting system files.
-
- - You can use an asterisk (*) as a wildcard for fuzzy match. The wildcard (*) can represent one or more characters of a directory or file name.
-
- If the collection path is set to a directory (such as /var/logs/), only .log, .trace, and .out files in the directory are collected.
If the collection path is set to a file name, the corresponding file is collected. Only text files can be collected.
-
- - Ensure that sensitive information is not collected.
- System logs are not collected by LTS by default. If you need to collect system logs, configure a corresponding collection path.
- LTS cannot collect logs of PostgreSQL (database) instances. It only collects logs of ECS (host) instances.
- A collection path can be configured only once. It means that a path of a host cannot be added for different log streams. Otherwise, log collection may be abnormal.
- If a collection path of a host has been configured in Application Operations Management (AOM), do not configure the path in LTS. If a path is configured in both AOM and LTS, only the path that is configured later takes effect.
-
- - Click Next: Configure Collection.
-
Table 1 Log collection settingsParameter
- |
-Description
- |
-
-
-Log Format
- |
-- Single-line: Each log line is displayed as a single log event.
- Multi-line: Multiple lines of exception log events can be displayed as a single log event. This is helpful when you check logs to locate problems.
- |
-
-Log Time
- |
-System time: log collection time by default. It is displayed at the beginning of each log event.
- NOTE: Log printing time is the time when logs are printed. Log collection time is the time when logs are collected and sent by ICAgent to LTS.
- ICAgent collects and sends logs to LTS with an interval of 1 second.
-
- |
-
-Time wildcard: You can set a time wildcard so that ICAgent will look for the log printing time as the beginning of a log event.
-- If the time format in a log event is 2019-01-01 23:59:59, the time wildcard should be set to YYYY-MM-DD hh:mm:ss.
- If the time format in a log event is 19-1-1 23:59:59, the time wildcard should be set to YY-M-D hh:mm:ss.
- NOTE: If a log event does not contain year information, ICAgent regards it as printed in the current year.
-
-Example:
-YY - year (19)
-YYYY - year (2019)
-M - month (1)
-MM - month (01)
-D - day (1)
-DD - day (01)
-hh - hours (23)
-mm - minutes (59)
-ss - seconds (59)
-hpm - hours (03PM)
-h:mmpm - hours:minutes (03:04PM)
-h:mm:sspm - hours:minutes:seconds (03:04:05PM)
-hh:mm:ss ZZZZ (16:05:06 +0100)
-hh:mm:ss ZZZ (16:05:06 CET)
-hh:mm:ss ZZ (16:05:06 +01:00)
- |
-
-Log Segmentation
- |
-This parameter needs to be specified if the Log Format is set to Multi-line. By generation time indicates that a time wildcard is used to detect log boundaries, whereas By regular expression indicates that a regular expression is used.
- |
-
-Regular Expression
- |
-You can set a regular expression to look for a specific pattern to indicate the beginning of a log event. This parameter needs to be specified when you select Multi-line for Log Format and By regular expression for Log Segmentation.
- |
-
-
-
-
- The time wildcard and regular expression will look for the specified pattern right from the beginning of each log line. If no match is found, the system time, which may be different from the time in the log event, is used. For example, if the time wildcard is set to YYYY-MM-DD hh:mm:ss but the time in the log event is [2019-01-01 23:59:59], they cannot be matched. In general cases, you are advised to select Single-line for Log Format and System time for Log Time.
-
- - Click OK. LTS will collect logs based on your specified collection rules.
The configurations of the log collection path can still be changed after the path is added.
-
-
-
-
-
diff --git a/docs/lts/umn/lts_04_0007.html b/docs/lts/umn/lts_04_0007.html
deleted file mode 100644
index e567e29d..00000000
--- a/docs/lts/umn/lts_04_0007.html
+++ /dev/null
@@ -1,15 +0,0 @@
-
-
- Log View
-
-
-
diff --git a/docs/lts/umn/lts_04_0008.html b/docs/lts/umn/lts_04_0008.html
deleted file mode 100644
index 99869529..00000000
--- a/docs/lts/umn/lts_04_0008.html
+++ /dev/null
@@ -1,22 +0,0 @@
-
-
- Viewing Real-Time Logs
- You can view the logs reported to the LTS console in real time.
- Prerequisites- You have created log groups and log streams.
- You have installed ICAgent.
- You have configured log collection rules.
-
- Procedure- Log in to the LTS console and choose Log Management.
- In the log group list, click the name of a log group.
- In the log stream list, click the name of a log stream.
- On the log stream details page, click the Real-Time Logs tab to view logs in real time.
-
- Logs are reported to LTS once every five seconds. You may wait for at most five seconds before the logs are displayed.
- You can control log display by clicking Clear or Pause in the upper right corner.
- Stay on the Real-Time Logs tab to keep updating them in real time. If you leave the Real-Time Logs tab page, logs will stop being loaded in real time. The next time you access the tab, the logs that were shown before you left the tab will not be displayed.
-
-
-
-
-
diff --git a/docs/lts/umn/lts_04_0009.html b/docs/lts/umn/lts_04_0009.html
deleted file mode 100644
index 77146429..00000000
--- a/docs/lts/umn/lts_04_0009.html
+++ /dev/null
@@ -1,91 +0,0 @@
-
-
- Log Search
- Follow the directions below to search logs by keyword and time range:
- - On the LTS console, click Log Management.
- In the log group list, click the name of a log group.
- In the log stream list, click the name of a log stream.
Alternatively, click Search in the Operation column of the row containing the target log stream.
- - In the upper right corner, select a time range.
- On the displayed page, enter a keyword in the search box.
- Click
to start searching.Logs that contain the keyword are displayed.
-
- Search Syntax and ExamplesSearch syntax:
-
- Table 1 Search syntaxFilter
- |
-Description
- |
-
-
-Exact search by keyword
- |
-LTS searches for logs containing the exact keyword (case-sensitive) that you specify. A keyword is the word between two adjacent delimiters.
-You can add an asterisk (*) after a keyword, for example, error*, if you are not familiar with delimiters.
- |
-
-Exact search by phrase
- |
-LTS searches for logs containing the exact phrase (case-sensitive) that you specify.
- |
-
-&&
- |
-Intersection of search results.
- |
-
-||
- |
-Union of search results.
- |
-
-AND
- |
-Intersection of search results.
- |
-
-NOT
- |
-Logs that contain the keyword after NOT are excluded.
- |
-
-*
- |
-Fuzzy search. The asterisk (*) can only be after a keyword to replace an unspecified number of characters.
- |
-
-?
- |
-Fuzzy search. The question mark (?) can be put in the middle or at the end of a keyword to replace a character.
- |
-
-
-
-
- Operators (such as &&, ||, AND, NOT, *, ?, and :) contained in raw logs cannot be used to search for logs.
-
- Search rules:
-
- - Searching using multiple key-value pairs is supported. The format is key1:value1 key2:value2.
For example, if you enter casInstanceID:in* hostIP:x.x.x.x kkfa, the following is displayed:
-{
- "casApplicationName": "app",
- "casEnvironmentName": "env",
- "hostName": "ecs-zc-xxxx",
- "collectTime": "15838xxxxx",
- "hostIP": "x.x.x.x",
- "logContent": "4312341341341314 kkfa dfa\\n", (Keyword)
- "appName": "testhshroma",
- "casInstanceID": "ins-111",
- "hostId": "b32dcfc0-615b-4de6-a796-dfccaxxxxxx",
- "casComponentID": "com-111"
-}
-
- Search examples:
- - Search for logs containing start: Enter start.
- Search for logs containing start to refresh: Enter start to refresh.
- Search for logs containing both start and unexpected: Enter start && unexpected or start AND unexpected.
- Search for logs containing start or unexpected: Enter start || unexpected.
- - Search for the logs containing keyword start but not unexpected: Enter start NOT unexpected.
- error*: logs that contain error.
- er?or: logs that start with er, is followed by any single character, and end with or.
- query1 AND query2 NOT query3: logs that contain both query1 and query2 but not query3.
- - Keywords are case-sensitive.
- The asterisk (*) and question mark (?) do not match special characters such as hyphens (-) and spaces.
-
-
-
-
-
diff --git a/docs/lts/umn/lts_04_0010.html b/docs/lts/umn/lts_04_0010.html
deleted file mode 100644
index 78c7005e..00000000
--- a/docs/lts/umn/lts_04_0010.html
+++ /dev/null
@@ -1,24 +0,0 @@
-
-
- Quick Search
- To search for logs using a keyword repeatedly, perform the following operations to configure quick search.
- Procedure- Log in to the LTS console and choose Log Management.
- In the log group list, click the name of a log group.
- In the log stream list, click the name of a log stream.
Alternatively, click Search in the Operation column of the row containing the target log stream.
- - On the log stream details page, click Add Quick Search and configure the Name and Keyword parameters.
- Set Name. A quick search name:
- Can contain only letters, digits, hyphens (-), underscores (_), and periods (.).
- Cannot start with a period (.) or underscore (_), and cannot end with a period (.).
- Can contain 1 to 64 characters.
- - Set Keyword to a keyword that will be frequently used, for example, error*.
-
- - Click OK.
Added query search filters are displayed next to Add Quick Search. Click the name of a quick search filter to view the search results.
-
-
- Viewing Context of a LogYou can check the log events generated before and after a log event for quick fault locating.
- - Log in to the LTS console and choose Log Management.
- In the log group list, click the name of a log group.
- In the log stream list, click the name of a log stream.
Alternatively, click Search in the Operation column of the row containing the target log stream.
- - On the Original Logs tab page, click View Context in the row of a log event.
The context of the log is displayed.
-
-
-
-
-
-
diff --git a/docs/lts/umn/lts_04_0011.html b/docs/lts/umn/lts_04_0011.html
new file mode 100644
index 00000000..b9a5776c
--- /dev/null
+++ b/docs/lts/umn/lts_04_0011.html
@@ -0,0 +1,14 @@
+
+
+ Overview
+ Logs reported from hosts and cloud services are retained in LTS for seven days by default. You can set the retention period to be 1 to 30 days. Retained logs are deleted once the retention period is over. For long-term storage, you can transfer logs to Object Storage Service (OBS).
+ Log transfer refers to when logs are replicated to other cloud services. Retained logs are deleted once the retention period is over, but the logs that have been transferred to other services are not affected.
+
+
+
+
+
diff --git a/docs/lts/umn/lts_04_0012.html b/docs/lts/umn/lts_04_0012.html
index 04ce3bf4..8fae72d5 100644
--- a/docs/lts/umn/lts_04_0012.html
+++ b/docs/lts/umn/lts_04_0012.html
@@ -1,10 +1,10 @@
- Agent Management
-
+ Managing Hosts
+
diff --git a/docs/lts/umn/lts_04_0013.html b/docs/lts/umn/lts_04_0013.html
index 4392b734..5e6db4ec 100644
--- a/docs/lts/umn/lts_04_0013.html
+++ b/docs/lts/umn/lts_04_0013.html
@@ -1,52 +1,52 @@
ICAgent Statuses
- The following table lists the ICAgent statuses.
+ The following table lists the ICAgent statuses.
- Table 1 ICAgent statusesStatus
+Table 1 ICAgent statusesStatus
|
-Description
+ | Description
|
-Running
+ | Running
|
-ICAgent is running properly.
+ | ICAgent is running properly.
|
-Uninstalled
+ | Uninstalled
|
-ICAgent is not installed.
+ | ICAgent is not installed.
|
-Installing
+ | Installing
|
-ICAgent is being installed. This process takes about a minute.
+ | ICAgent is being installed. This process takes about a minute.
|
-Installation failed
+ | Installation failed
|
-ICAgent installation failed.
+ | ICAgent installation failed.
|
-Upgrading
+ | Upgrading
|
-ICAgent is being upgraded. This process takes about a minute.
+ | ICAgent is being upgraded. This process takes about a minute.
|
-Upgrade failed
+ | Upgrade failed
|
-ICAgent upgrade failed.
+ | ICAgent upgrade failed.
|
-Offline
+ | Offline
|
-Check for network faults and rectify such faults.
+ | ICAgent is abnormal because the Access Key ID/Secret Access Key (AK/SK) pair is incorrect. Obtain the correct AK/SK pair and install ICAgent again.
|
-Faulty
+ | Faulty
|
-ICAgent is faulty. Contact technical support.
+ | ICAgent is faulty. Contact technical support.
|
@@ -55,7 +55,7 @@
diff --git a/docs/lts/umn/lts_04_0015.html b/docs/lts/umn/lts_04_0015.html
new file mode 100644
index 00000000..44271a1d
--- /dev/null
+++ b/docs/lts/umn/lts_04_0015.html
@@ -0,0 +1,16 @@
+
+
+Permissions Management
+This chapter describes how to use Identity and Access Management (IAM) for fine-grained permissions control for your LTS. With IAM, you can:
+ - Create IAM users for personnel based on your enterprise's organizational structure. Each IAM user has their own identity credentials for accessing LTS resources
- Grant only the permissions required for users to perform a specific task.
- Entrust an account or a cloud service to perform professional and efficient O&M on your LTS resources.
+ If your account meets your permissions requirements, skip this section.
+ This section describes the procedure for granting user permissions. Figure 1 shows the process flow.
+ PrerequisitesBefore granting permissions to user groups, learn about "Permissions Management" in the section Service Overview) for LTS and select the permissions as required. For system permissions of other cloud services, see Permissions supported by IAM.
+
+ Process FlowFigure 1 Process of granting permissions to a user
+ - Log in to the IAM console. Create a user group on the IAM console and grant the LTS FullAccess permission to the user group. For details, see Create a user group and grant it permissions.
If you select the LTS FullAccess permissions, the Tenant Guest policy that the permission depends on is automatically selected. You also need to grant the Tenant Administrator policy for the global service project to the user group.
+
+ - Create a user on the IAM console and add the user to the user group created in 1. For details, see Create an IAM user and add it to the created user group.
- Log in to the console by using the created user and verify permissions in the authorized region. For details, see Log in as the IAM user and verify permissions.
+
+
+
diff --git a/docs/lts/umn/lts_04_0017.html b/docs/lts/umn/lts_04_0017.html
deleted file mode 100644
index 5aa0ddd3..00000000
--- a/docs/lts/umn/lts_04_0017.html
+++ /dev/null
@@ -1,13 +0,0 @@
-
-
-Appendixes
-
-
-
diff --git a/docs/lts/umn/lts_04_0040.html b/docs/lts/umn/lts_04_0040.html
new file mode 100644
index 00000000..4b6b38bf
--- /dev/null
+++ b/docs/lts/umn/lts_04_0040.html
@@ -0,0 +1,15 @@
+
+
+Log Transfer
+
+
+
diff --git a/docs/lts/umn/lts_04_0041.html b/docs/lts/umn/lts_04_0041.html
index ad1a30a3..e01e1484 100644
--- a/docs/lts/umn/lts_04_0041.html
+++ b/docs/lts/umn/lts_04_0041.html
@@ -1,15 +1,12 @@
-
- Log Transfer
-
- You can transfer logs to OBS to keep logs for a long time.
- Local log files are cleared periodically, but the logs transferred to OBS will not be affected.
- To transfer logs, you must have the OBS Administrator permissions apart from the LTS permissions.
+ Transferring Logs to OBS
+ You can transfer logs to OBS and download log files from the OBS console.
+ To transfer logs, you must have the OBS Administrator permissions apart from the LTS permissions.
- Prerequisites- Logs have been ingested to LTS.
- You have purchased an OBS bucket.
+ Prerequisites- Logs have been ingested to LTS.
- You have created an OBS bucket.
- Creating a Log Transfer Task- Log in to the LTS console and choose Log Transfer in the navigation pane on the left.
- Click Create Log Transfer in the upper right corner.
- On the displayed page, configure the log transfer parameters.
After a transfer task is created, you can modify parameters except the log group name, and log stream name.
+ Creating a Log Transfer Task- Log in to the LTS console and choose Log Transfer in the navigation pane on the left.
- Click Configure Log Transfer in the upper right corner.
- On the displayed page, configure the log transfer parameters.
After a transfer task is created, you can modify parameters except the log group name, transfer destination and log stream name.
Table 1 Transfer parametersParameter
@@ -20,14 +17,35 @@
|
-Log Group Name
+ | Enable Transfer
+ |
+Enabled by default.
+ |
+Enabled
+ |
+
+Transfer Log to OBS
+ |
+Enable log transfer to OBS.
+ |
+OBS
+ |
+
+Transfer Destination
+ |
+Select a cloud service for log transfer.
+ |
+OBS
+ |
+
+Log Group Name
|
Select a log group.
|
N/A
|
-Log Stream Name
+ | Log Stream Name
|
Select a log stream.
|
@@ -36,22 +54,31 @@
OBS Bucket
|
-- Select an OBS bucket.
- If no OBS buckets are available, click View OBS Bucket to access the OBS console and create an OBS bucket.
+- Select an OBS bucket.
- If no OBS buckets are available, click View OBS Bucket to access the OBS console and create an OBS bucket.
- If encryption has been enabled for the selected OBS bucket, select a key name and select I agree to grant permissions on Key Management Service (KMS) to LTS so LTS can create and use keys to encrypt and decrypt transferred logs.
- Only Standard OBS buckets are supported in LTS.
|
N/A
|
|
-Custom Log Transfer Path
+ | Key Name
|
-- Enabled: Logs will be transferred to a custom path to separate transferred log files of different log streams.
The format is /LogTanks/Region name/Custom path. A custom path must meet the following requirements: - Must start with /LogTanks/Region name.
- Can contain 1–64 characters.
-
- - Disabled: Logs will be transferred to the default path. The default path is LogTanks/Region name/2019/01/01/Log group/Log stream/Log file name.
+ | Select a key name for an OBS bucket for which encryption has been enabled. If no keys are available, click Create Key and Authorize to go to the Data Encryption Workshop (DEW) console and create a key.
|
-LTS-test
+ | N/A
|
-Log File Prefix
+ | Custom Log Transfer Path
+ |
+- Enabled: Logs will be transferred to a custom path to separate transferred log files of different log streams.
The format is /LogTanks/Region name/Custom path. The default custom path is lts/%Y/%m/%d, where %Y indicates the year, %m indicates the month, and %d indicates the day. A custom path must meet the following requirements: - Must start with /LogTanks/Region name.
- Can contain only letters, digits, and the following special characters: &$@;:,=+?-._/ %. The character % can only be followed only by Y (year), m (month), d (day), H (hour), and M (minute). Any number of characters can be added before and after %Y, %m, %d, %H, and %M, and the sequence of these variables can be changed.
- Can contain 1–128 characters.
+
+Example:
+- If you enter LTS-test/%Y/%m/%done/%H/%m, the path is LogTanks/Region name/LTS-test/Y/m/done/H/m/Log file name.
- If you enter LTS-test/%d/%H/%m/%Y, the path is LogTanks/Region name/LTS-test/d/H/m/Y/Log file name.
+ - Disabled: Logs will be transferred to the default path. The default path is LogTanks/Region name/2019/01/01/Log group/Log stream/Log file name.
+ |
+LTS-test/%Y/%m/%done/%H/%m
+ |
+
+Log Prefix
|
The file name prefix of the log files transferred to an OBS bucket
The prefix must meet the following requirements:
@@ -72,36 +99,46 @@
| Json
|
-Transfer Log to OBS
- |
-Enable log transfer to OBS.
- |
-Enabled
- |
-
-Transfer Period
+ | Log Transfer Interval
|
The interval for automatically transferring logs to OBS buckets. The value can be 2, 5, or 30 minutes, or 1, 3, 6, or 12 hours.
|
3 hours
|
+Time Zone
+ |
+When logs are transferred to OBS buckets, the time in the transfer directory and file name will use the specified UTC time zone.
+ |
+(UTC) Coordinated Universal Time
+ |
+
- - Click OK. When the log transfer status changes to Normal, the transfer task has been created.
- Click the OBS bucket name in the OBS Bucket column to access the OBS console and view the transferred log files.
Transferred logs can be downloaded from OBS to your local computer for viewing.
-
+ - Click OK. When the log transfer status changes to Normal, the transfer task has been created.
- Click the OBS bucket name in the Transfer Destination column to access the OBS console and view the transferred log files.
Transferred logs can be downloaded from OBS to your local computer for viewing.
+Figure 1 Transferring logs to OBS
+ Logs stored in OBS are in raw or JSON format.
+
+
- Viewing Transfer StatusThe status of a transfer task can be Normal, Abnormal, or Disabled.
- - Normal: The log transfer task works properly.
- Abnormal: An error occurred in the log transfer task. The possible causes are as follows:
- The OBS bucket has been deleted. Specify another OBS bucket.
- Access control on the OBS bucket is configured incorrectly. Access the OBS console to correct the settings.
- The key for the encrypted OBS bucket has been deleted or the authorization has been canceled. Ensure that the key is valid.
- - Disabled: The log transfer task is stopped.
+ Modifying a Log Transfer Task- Log in to the LTS console and choose Log Transfer in the navigation pane on the left.
- Locate the row that contains the target transfer task and click Modify in the Operation column.
- Click OK.
- Deleting a Log Transfer TaskIf logs do not need to be transferred, you can delete the transfer task.
- - After a transfer task is deleted, log transfer will be stopped. Exercise caution when performing the deletion.
- After a transfer task is deleted, the logs that have been transferred remain in OBS.
+ Deleting a Log Transfer TaskIf logs do not need to be transferred, you can delete the transfer task.
+ - After a transfer task is deleted, log transfer will be stopped. Exercise caution when performing the deletion.
- After a transfer task is deleted, the logs that have been transferred remain in OBS.
- When you create a transfer task, OBS will grant read and write permissions to LTS for the selected bucket. If one OBS bucket is used by multiple transfer tasks, perform the following operations to delete the transfer task:
- If only one transfer task is created using this OBS bucket, delete the bucket access permission granted to specific users on the Access Control > Bucket ACLs tab page on the OBS console when you delete the transfer task.
- If multiple transfer tasks are created using this OBS bucket, do not delete the bucket access permission. Otherwise, data transfer will fail.
+
- - Log in to the LTS console and choose Log Transfer in the navigation pane on the left.
- Locate the row of the target transfer task and choose More > Delete in the Operation column.
- Click OK.
+ - Log in to the LTS console and choose Log Transfer in the navigation pane on the left.
- Locate the row of the target transfer task and choose Delete in the Operation column.
- Click OK.
+
+ Viewing Transfer StatusThe status of a transfer task can be Normal, Abnormal, or Disabled.
+ - Normal: The log transfer task works properly.
- Abnormal: An error occurred in the log transfer task. The possible causes are as follows:
- The OBS bucket has been deleted. Specify another OBS bucket.
- Access control on the OBS bucket is configured incorrectly. Access the OBS console to correct the settings.
- The key for the encrypted OBS bucket has been deleted or the authorization has been canceled. Ensure that the key is valid.
+ - Disabled: The log transfer task is stopped.
+
+
+
-
diff --git a/docs/lts/umn/lts_04_0043.html b/docs/lts/umn/lts_04_0043.html
new file mode 100644
index 00000000..b04bfbd8
--- /dev/null
+++ b/docs/lts/umn/lts_04_0043.html
@@ -0,0 +1,138 @@
+
+
+ Transferring Logs to DMS
+ You can use DMS APIs to retrieve logs in real time.
+ Prerequisites- Logs have been ingested to LTS.
- Before registering a DMS Kafka instance, configure an inbound rule to allow access from 198.19.128.0/17 over port 9011.
+
+ Procedure- Log in to the LTS console and choose Log Transfer in the navigation pane on the left.
- Click Create Log Transfer in the upper right corner.
- On the displayed page, configure the log transfer parameters.
After a transfer task is created, you can modify parameters except the log group name and transfer mode.
+
+
+Table 1 Transfer parametersParameter
+ |
+Description
+ |
+Example Value
+ |
+
+
+Enable Transfer
+ |
+Enabled by default.
+ |
+Enabled
+ |
+
+Transfer Destination
+ |
+Select a cloud service for log transfer.
+ |
+DMS
+ |
+
+Log Group Name
+ |
+Select a log group.
+ |
+N/A
+ |
+
+Log Stream Name
+ |
+Select a log stream.
+ |
+N/A
+ |
+
+Kafka Instance
+ |
+Select a Kafka instance. If no instances are available, click View Kafka Instances to access the DMS console and create a Kafka premium instance.
+If a Kafka instance has been registered, you can modify it. For details about how to register a Kafka instance, see Registering a Kafka Instance.
+ |
+N/A
+ |
+
+Topic
+ |
+Select a topic for the Kafka instance. If no topics are available, access the DMS console and create a topic for the Kafka premium instance.
+ |
+topic-01
+ |
+
+Format
+ |
+Only the raw log format is supported. The following is an example:
+(Logs displayed on the LTS console are in the raw format.) Sep 30 07:30:01 ecs-bd70 CRON[3459]: (root) CMD (/opt/oss/servicemgr/ICAgent/bin/manual/mstart.sh > /dev/null 2>&1)
+
+ |
+Raw Log Format
+ |
+
+Log Transfer Interval
+ |
+Logs are transferred to the Kafka instance in real time.
+ |
+Real time
+ |
+
+Filter by Tag Fields
+ |
+During transfer, logs will be filtered by tag fields collected by ICAgent.
+- Disabled: Logs will not be filtered by tag fields.
- Enabled: Default tag fields include those for hosts (hostIP, hostId, hostName, pathFile, and collectTime) and for Kubernetes (clusterName, clusterId, nameSpace, podName, and appName). Optional common tag fields are regionName and projectId.
+ |
+Enabled
+ |
+
+
+
+
+ - Click OK. When the log transfer status changes to Normal, the transfer task has been created. If you transfer logs of another account, the log group and stream belong to the delegator. When you click the name of the delegator's log group or stream on the Log Transfer page, you will be directed to the log group or stream through the agency.
- Click the Kafka premium instance in the Transfer Destination column to access its basic information page.
+
+ Registering a Kafka Instance- If you select a Kafka instance that is not registered, access the page for registering the Kafka instance.
- Configure the parameters for registering a Kafka instance.
+
Parameter
+ |
+Description
+ |
+Example Value
+ |
+
+
+Kafka Instance
+ |
+DMS instance name.
+ |
+Kafka-01
+ |
+
+Create DMS Network
+ |
+Connect the Kafka instance to LTS so that LTS can send data through this network.
+ |
+-
+ |
+
+Username
+ |
+If SASL authentication is enabled for the Kafka instance, enter the username for SASL authentication.
+ |
+DMS
+ |
+
+Password
+ |
+If SASL authentication is enabled for the Kafka instance, enter the password for SASL authentication.
+ |
+-
+ |
+
+
+
+
+ - Click OK.
+
+
+
+
diff --git a/docs/lts/umn/lts_04_0058.html b/docs/lts/umn/lts_04_0058.html
new file mode 100644
index 00000000..a36b20e3
--- /dev/null
+++ b/docs/lts/umn/lts_04_0058.html
@@ -0,0 +1,21 @@
+
+
+ FAQs
+
+
+
diff --git a/docs/lts/umn/lts_04_0105.html b/docs/lts/umn/lts_04_0105.html
new file mode 100644
index 00000000..ccf41256
--- /dev/null
+++ b/docs/lts/umn/lts_04_0105.html
@@ -0,0 +1,17 @@
+
+
+ Collecting Logs from Cloud Services
+
+
+
diff --git a/docs/lts/umn/lts_04_0511.html b/docs/lts/umn/lts_04_0511.html
new file mode 100644
index 00000000..654b8d23
--- /dev/null
+++ b/docs/lts/umn/lts_04_0511.html
@@ -0,0 +1,230 @@
+
+
+ Collecting Logs from CCE
+ LTS can collect logs from Cloud Container Engine (CCE).
+
+
+ ProcedurePerform the following operations to configure CCE log ingestion:
+ - Log in to the LTS console.
- In the navigation pane on the left, choose Log Ingestion and click CCE (Cloud Container Engine).
- Select Log Stream
Choose between Custom log stream and Fixed log stream to suite your requirements.
+Custom log stream
+- Select a log group from the Log Group drop-down list. If there are no desired log groups, click Create Log Group to create one.
- Select a log stream from the Log Stream drop-down list. If there are no desired log streams, click Create Log Stream to create one.
- Click Next: Install Log Collection Component.
+
+

+
+
+
+
+Fixed log stream
+Logs will be collected to a fixed log stream. By default, a CCE cluster has four types of log streams. Three of them are supported currently, including standard output/error (stdout-{ClusterID}), node file (hostfile-{ClusterID}), and container file (containerfile-{ClusterID}). Log streams are automatically named with a cluster ID. For example, if the cluster ID is Cluster01, the standard output/error log stream is stdout-Cluster01.
+Four log streams can be created in a CCE cluster, including standard output/error (stdout-{ClusterID}), node file (hostfile-{ClusterID}), container file (containerfile-{ClusterID}), and Kubernetes event (event-{ClusterID}) (coming soon). If one of them has been created in a log group, the log stream will no longer be created in the same log group or other log groups.
+- Select a cluster from the CCE Cluster drop-down list.
- Select a log group from the Log Group drop-down list. If there are no desired log groups, click Create Log Group to create one.
- Click Next: Install Log Collection Component.
+
+

+
+
+ - Install Log Collection Component
To install the CCE log collection component, perform the following steps:
+1. Log in to the LTS console.
+2. In the navigation pane on the left, choose Host Management.
+3. On the displayed page, choose Hosts > CCE clusters and select a CCE cluster.
+4. Click Upgrade ICAgent.
+5. In the displayed dialog box, click OK.
+ - To ingest logs from CCE, the log collection component must be installed on hosts in the CCE cluster.
- If the ICAgent component has been installed in your CCE cluster, click ICAgent Already Installed.
+

+
+
+
+ - Select Host Group
- In the host group list, select one or more host groups to collect logs. If there are no desired host groups, click Create in the upper left corner of the list. On the displayed Create Host Group page, create a host group. For details, see Creating a Host Group (Custom Identifier).
- The host group to which the cluster belongs is selected by default. You can select another created host group as required.
- You can skip this step and configure host groups after the ingestion configuration is complete. There are two options to do this:
- On the LTS console, choose Host Management > Host Groups and associate host groups with ingestion configurations.
- On the LTS console, choose Log Ingestion in the navigation pane on the left and click an ingestion configuration. On the displayed page, add one or more host groups for association.
+
+
+
+
+
+ - Click Next: Configure Collection.
+ - Configurations
Specify collection rules. For details, see Configurations.
+ - Finish.
Click Submit.
+
+
+ ConfigurationsWhen CCE is used to ingest logs, the configuration details are as follows:
+
+
+
+
+
+ - Basic Information: Enter a name containing 1 to 64 characters. Only letters, digits, hyphens (-), underscores (_), and periods (.) are allowed. The name cannot start with a period or underscore, or end with a period.
- Data Source: Select a data source type and configure it.
- Container standard output: Collects stderr and stdout logs of a specified container in the cluster.
- The standard output of the matched container is collected to the specified log stream. Standard output to AOM stops.
- The container standard output must be unique to a host.
+
+ - Container file: Collects file logs of a specified container in the cluster.
- Node file: Collects files of a specified node in the cluster.
The collection path must be unique to a host.
+
+
+
+Table 1 Configuration parametersParameter
+ |
+Description
+ |
+
+
+Container standard output
+ |
+Collects container standard output to AOM, and collects stderr and stdout logs of a specified container in the cluster.
+Collecting container standard output to AOM: ICAgent is installed on hosts in the cluster by default, and logs is collected to AOM. The function of collecting container standard output to AOM is enabled. Disable this function to collect stdout streams to LTS.
+Either stdout or stderr must be enabled.
+ |
+
+Container file
+ |
+- Collection Paths: LTS collects logs from the specified paths.
NOTE: - If a container mount path has been configured for the CCE cluster workload, the paths added for this field are invalid. The collection paths take effect only after the mount path is deleted.
- The collection path must be unique to a host.
+
+ - Set Collection Filters: Blacklisted directories or files will not be collected. If you specify a directory, all files in the directory are filtered out.
+ |
+
+Node file
+ |
+- Collection Paths: LTS collects logs from the paths you added for this field.
NOTE: The collection path must be unique to a host.
+
+ - Set Collection Filters: Blacklisted directories or files will not be collected. If you specify a directory, all files in the directory are filtered out.
+ |
+
+
+
+
+ - Kubernetes Matching Rules: Set this parameter only when the data source type is set to Container standard output or Container file path.
+
Table 2 Kubernetes matching rulesParameter
+ |
+Description
+ |
+
+
+Namespace Name Regular Expression
+ |
+Specifies the container whose logs are to be collected based on the namespace name. Regular expression matching is supported. NOTE: LTS will collect logs of the namespaces with names matching this expression. To collect logs of all namespaces, leave this field empty.
+
+
+ |
+
+Pod Name Regular Expression
+ |
+Specifies the container whose logs are to be collected based on the Pod name. Regular expression matching is supported.
+ NOTE: LTS will collect logs of the Pods with names matching this expression. To collect logs of all Pods, leave this field empty.
+
+ |
+
+Container Name Regular Expression
+ |
+Specifies the container whose logs are to be collected based on the container name (the Kubernetes container name is defined in spec.containers). Regular expression matching is supported. NOTE: LTS will collect logs of the containers with names matching this expression. To collect logs of all containers, leave this field empty.
+
+
+ |
+
+Container Label Whitelist
+ |
+Specifies the containers whose logs are to be collected. If you want to set a container label whitelist, Label Key is mandatory and Label Value is optional. NOTE: LTS will match all containers with a container label containing either a Label Key with an empty corresponding Label Value, or a Label Key with its corresponding Label Value.
+
+
+ |
+
+Container Label Blacklist
+ |
+Specifies the containers whose logs are not to be collected. If you want to set a container label blacklist, Label Key is mandatory and Label Value is optional. NOTE: LTS will exclude all containers with a container label containing either a Label Key with an empty corresponding Label Value, or a Label Key with its corresponding Label Value.
+
+
+ |
+
+Container Label
+ |
+After the Container Label is set, LTS adds related fields to logs.
+ NOTE: LTS adds the specified fields to the log when each Label Key has a corresponding Label Value. For example, if you enter "app" as the key and "app_alias" as the value, when the container label contains "app=lts", "{app_alias: lts}" will be added to the log.
+
+ |
+
+Environment Variable Whitelist
+ |
+Specifies the containers whose logs are to be collected. If you want to set an environment variable whitelist, Label Key is mandatory and Label Value is optional. NOTE: LTS will match all containers with environment variables containing either an Environment Variable Key with an empty corresponding Environment Variable Value, or an Environment Variable Key with its corresponding Environment Variable Value.
+
+
+ |
+
+Environment Variable Blacklist
+ |
+Specifies the containers whose logs are not to be collected. If you want to set an environment variable blacklist, Label Key is mandatory and Label Value is optional. NOTE: LTS will exclude all containers with environment variables containing either an Environment Variable Key with an empty corresponding Environment Variable Value, or an Environment Variable Key with its corresponding Environment Variable Value.
+
+
+ |
+
+Environment Variable Label
+ |
+After the environment variable label is set, the log service adds related fields to the log. NOTE: LTS adds the specified fields to the log when each Environment Variable Key has a corresponding Environment Variable Value. For example, if you enter "app" as the key and "app_alias" as the value, when the Kubernetes environment variable contains "app=lts", "{app_alias: lts}" will be added to the log.
+
+
+ |
+
+
+
+
+ - Advanced Settings: Configure the log format and log time.
+
Table 3 Log collection settingsParameter
+ |
+Description
+ |
+
+
+Log Format
+ |
+- Single-line: Each log line is displayed as a single log event.
- Multi-line: Multiple lines of exception log events can be displayed as a single log event. This is helpful when you check logs to locate problems.
+ |
+
+Log Time
+ |
+System time: log collection time by default. It is displayed at the beginning of each log event.
+ NOTE: - Log collection time is the time when logs are collected and sent by ICAgent to LTS.
- Log printing time is the time when logs are printed. ICAgent collects and sends logs to LTS with an interval of 1 second.
- Restriction on log collection time: Logs are collected within 24 hours before and after the system time.
+
+ |
+
+Time wildcard: You can set a time wildcard so that ICAgent will look for the log printing time as the beginning of a log event.
+- If the time format in a log event is 2019-01-01 23:59:59.011, the time wildcard should be set to YYYY-MM-DD hh:mm:ss.SSS.
- If the time format in a log event is 19-1-1 23:59:59.011, the time wildcard should be set to YY-M-D hh:mm:ss.SSS.
+ NOTE: If a log event does not contain year information, ICAgent regards it as printed in the current year.
+
+Example:
+YY - year (19)
+YYYY - year (2019)
+M - month (1)
+MM - month (01)
+D - day (1)
+DD - day (01)
+hh - hours (23)
+mm - minutes (59)
+ss - seconds (59)
+SSS - millisecond (999)
+hpm - hours (03PM)
+h:mmpm - hours:minutes (03:04PM)
+h:mm:sspm - hours:minutes:seconds (03:04:05PM)
+hh:mm:ss ZZZZ (16:05:06 +0100)
+hh:mm:ss ZZZ (16:05:06 CET)
+hh:mm:ss ZZ (16:05:06 +01:00)
+ |
+
+Log Segmentation
+ |
+This parameter needs to be specified if the Log Format is set to Multi-line. By generation time indicates that a time wildcard is used to detect log boundaries, whereas By regular expression indicates that a regular expression is used.
+ |
+
+Regular Expression
+ |
+You can set a regular expression to look for a specific pattern to indicate the beginning of a log event. This parameter needs to be specified when you select Multi-line for Log Format and By regular expression for Log Segmentation.
+ |
+
+
+
+
+ The time wildcard and regular expression will look for the specified pattern right from the beginning of each log line. If no match is found, the system time, which may be different from the time in the log event, is used. In general cases, you are advised to select Single-line for Log Format and System time for Log Time.
+
+
+
+
+
+
diff --git a/docs/lts/umn/lts_04_1031.html b/docs/lts/umn/lts_04_1031.html
new file mode 100644
index 00000000..cc7b3072
--- /dev/null
+++ b/docs/lts/umn/lts_04_1031.html
@@ -0,0 +1,155 @@
+
+
+ Collecting Logs from ECS
+ ICAgent collects logs from hosts based on your specified collection rules, and packages and sends the collected log data to LTS on a log stream basis. You can view logs on the LTS console in real time.
+ PrerequisitesICAgent has been installed and added to the host group.
+
+ ProcedurePerform the following operations to configure ECS log ingestion:
+ - Log in to the LTS console.
- In the navigation pane on the left, choose Log Ingestion and click ECS (Elastic Cloud Server).
- Select a log group.
- Select a log group from the drop-down list of Log Group. If there are no desired log groups, click Create Log Group to create one.
- Select a log stream from the drop-down list of Log Stream. If there are no desired log streams, click Create Log Stream to create one.
- Click Next: Select Host Group.
Figure 1 Selecting a log stream
+
+ - Select Host Group
- Select one or more host groups from which you want to collect logs. If there are no desired host groups, click Create above the host group list to create one. For details, see Creating a Host Group (IP Address).
+
+
You can choose not to select a host group in this step, but associate a host group with the ingestion configuration after you finish the procedure here. To do this, either:
+ - Choose Host Management in the navigation pane, click the Host Groups tab, and make the association, or
- Choose Log Ingestion in the navigation pane, click an ingestion configuration, and make the association on the details page.
+
+ - Click Next: Configure Collection.
Figure 2 Selecting a host group
+
+
+ - Configure Collection
Specify collection rules. For details, see Configurations.
+ - Finish
Click Back to Ingestion Configurations to check the ingestion details. You can also click View Log Stream to view the log stream to which logs are ingested.
+
+
+ ConfigurationsWhen you configure host log ingestion, the configuration details are as follows.
+ Figure 3 Configuring the collection
+
+ - Collection Configuration Name: Enter up to 64 characters. Only letters, digits, hyphens (-), underscores (_), and periods (.) are allowed. The name cannot start with a period or underscore, or end with a period.
To import old-edition ingestion configurations to the new edition of log ingestion, click Import Old-Edition Configuration.
+
+ - Collection Paths: Add one or more host paths. LTS will collect logs from these paths.
- Logs can be collected recursively. A double asterisk (**) can represent up to 5 directory levels in a path.
For example, /var/logs/**/a.log matches the following logs:
+/var/logs/1/a.log
+/var/logs/1/2/a.log
+/var/logs/1/2/3/a.log
+/var/logs/1/2/3/4/a.log
+/var/logs/1/2/3/4/5/a.log
+ - /1/2/3/4/5/ indicates the 5 levels of directories under the /var/logs directory. The a.log files found in all these directories will be collected.
- Only one double asterisk (**) can be contained in a collection path. For example, /var/logs/**/a.log is acceptable but /opt/test/**/log/** is not.
- A collection path cannot begin with a double asterisk (**), such as /**/test to avoid collecting system files.
+
+ - You can use an asterisk (*) as a wildcard for fuzzy match. The wildcard (*) can represent one or more characters of a directory or file name.
If a log collection path is similar to C:\windows\system32 but logs cannot be collected, enable the Web Application Firewall (WAF) and configure the path again.
+
+
+ - If the collection path is set to a directory (such as /var/logs/), only .log, .trace, and .out files in the directory are collected.
If the collection path is set to a file name, the corresponding file is collected. Only text files can be collected. To query the file format, run file -i File name.
+
+ - Ensure that sensitive information is not collected.
- If you want to collect system logs from a Windows host, enable the collection of Windows event logs when configuring the collection.
- It only collects logs of ECS (host) instances.
- A collection path can be configured only once. It means that a path of a host cannot be added for different log streams. Otherwise, log collection may be abnormal.
- If a collection path of a host has been configured in AOM, do not configure the path in LTS. If a path is configured in both AOM and LTS, only the path that is configured later takes effect.
- If log files were last modified more than 12 hours earlier than the time when the path is added, the files are not collected.
+
+ - Collection Blacklist: Blacklisted directories or files will not be collected. If you specify a directory, all files in the directory are filtered out.
Blacklist filters can be exact matches or wildcard pattern matches. For details, see Collection Paths.
+ If you blacklist a file or directory that has been set as a collection path in the previous step, the blacklist settings will be used and the file or files in the directory will be filtered out.
+
+ - Collect Windows Event Logs: To collect logs from Windows hosts, enable this option, and set the following parameters.
+
Table 1 Parameters for collecting windows event logsParameter
+ |
+Description
+ |
+
+
+Log Type
+ |
+Log types include system, program, security, and startup.
+ |
+
+Offset from First Collection Time
+ |
+Example: Set this parameter to 7 to collect logs generated within the 7 days before the collection start time. This offset takes effect only for the first collection to ensure that the logs are not repeatedly collected. Max: 7 days.
+ |
+
+Event Severity
+ |
+The event severity can be information, warning, error, critical, or verbose. Filter and collect by Windows event level. Only Windows Vista or later is supported.
+ |
+
+
+
+
+ - Configure the log format and log time.
+
Table 2 Log collection configurationsParameter
+ |
+Description
+ |
+
+
+Log Format
+ |
+- Single-line: Each log line is displayed as a single log event.
- Multi-line: Multiple lines of exception log events can be displayed as a single log event. This is helpful when you check logs to locate problems.
+ |
+
+Log Time
+ |
+System time: log collection time by default. It is displayed at the beginning of each log event.
+ NOTE: - Log collection time is the time when logs are collected and sent by ICAgent to LTS.
- Log printing time is the time when logs are printed. ICAgent collects and sends logs to LTS with an interval of 1 second.
- Restriction on log collection time: Logs are collected within 24 hours before and after the system time.
+
+ |
+
+Time wildcard: You can set a time wildcard so that ICAgent will look for the log printing time as the beginning of a log event.
+- If the time format in a log event is 2019-01-01 23:59:59.011, the time wildcard should be set to YYYY-MM-DD hh:mm:ss.SSS.
- If the time format in a log event is 19-1-1 23:59:59.011, the time wildcard should be set to YY-M-D hh:mm:ss.SSS.
+ NOTE: If a log event does not contain year information, ICAgent regards it as printed in the current year.
+
+Example:
+YY - year (19)
+YYYY - year (2019)
+M - month (1)
+MM - month (01)
+D - day (1)
+DD - day (01)
+hh - hours (23)
+mm - minutes (59)
+ss - seconds (59)
+SSS - millisecond (999)
+hpm - hours (03PM)
+h:mmpm - hours:minutes (03:04PM)
+h:mm:sspm - hours:minutes:seconds (03:04:05PM)
+hh:mm:ss ZZZZ (16:05:06 +0100)
+hh:mm:ss ZZZ (16:05:06 CET)
+hh:mm:ss ZZ (16:05:06 +01:00)
+ |
+
+Log Segmentation
+ |
+This parameter needs to be specified if the Log Format is set to Multi-line. By generation time indicates that a time wildcard is used to detect log boundaries, whereas By regular expression indicates that a regular expression is used.
+ |
+
+Regular Expression
+ |
+You can set a regular expression to look for a specific pattern to indicate the beginning of a log event. This parameter needs to be specified when you select Multi-line for Log Format and By regular expression for Log Segmentation.
+ |
+
+
+
+
+ The time wildcard and regular expression will look for the specified pattern right from the beginning of each log line. If no match is found, the system time, which may be different from the time in the log event, is used. In general cases, you are advised to select Single-line for Log Format and System time for Log Time.
+
+
+
+ Checking Ingestion ConfigurationsOn the LTS console, choose Log Ingestion in the navigation pane. Alternatively, access the Log Ingestion page by clicking Back to Ingestion Configurations when you finish configuring log ingestion.
+
+
+
+ Collecting Logs from Hosts (Old Version)If you have added a host using the old version, perform the following steps to view the host list:
+ - Log in to the LTS console and choose Log Management.
- In the log group list, select a log group or log stream. The log stream details page is displayed.
- Click View Old-Edition Ingestion to view the list of hosts ingested to the current log stream.
+
In the old version, ingestion configurations cannot be added or modified. See Procedure.
+
+
+
+
+
+
+
+
diff --git a/docs/lts/umn/lts_04_1053.html b/docs/lts/umn/lts_04_1053.html
new file mode 100644
index 00000000..7046c843
--- /dev/null
+++ b/docs/lts/umn/lts_04_1053.html
@@ -0,0 +1,17 @@
+
+
+ Log Management
+
+
+
diff --git a/docs/lts/umn/lts_04_1153.html b/docs/lts/umn/lts_04_1153.html
new file mode 100644
index 00000000..a7c30de5
--- /dev/null
+++ b/docs/lts/umn/lts_04_1153.html
@@ -0,0 +1,68 @@
+
+
+ LTS Console
+ The LTS console provides resource statistics, your favorite log streams/favorite log streams (local cache), and recently visited log streams.
+
+
+ 
+
+
+
+
+
+
+ Resource StatisticsThis area shows the read/write traffic, index traffic, storage volume, and total log volume of the account on the previous day, as well as the day-on-day changes.
+ To view resource details, click Details.
+
+
+
+ 
+
+
+
+
+
+ For details, see Resource Statistics.
+
+ My Favorites/My Favorites (Local Cache)This area displays the log streams you have added to favorites, including My Favorites and My Favorites (Local Cache).
+ - My Favorites: Save log streams to the database. This function is disabled by default. If your account has the write permission, My Favorites and My Favorites (Local Cache) are displayed.
- My Favorites (Local Cache): Save log streams to the local cache of the browser. This function is disabled by default. My Favorites (Local Cache) is displayed for all accounts.
If your account has the write permission, at least one of My Favorites and My Favorites (Local Cache) is enabled. Otherwise, log streams cannot be added to favorites.
+
+
+ You can customize a list of your favorite log streams for quickly locating frequently used log streams.
+ For example, to add a log stream of the log group lts-test to favorites, perform the following steps:
+ - Log in to the LTS console.
- In the Log Groups list, click
next to the log group name lts-test. - Click
on the right of the log stream. On the displayed Edit tab page, select a mode and click OK.
+
+
+
+
+
+
+
+ You can remove a favorite in either of the following ways:
+ - In the log stream list, click
in the row containing a log stream. - In the My Favorites area, hover the cursor over a log stream and click
.
+
+
+
+ Recently VisitedThis area displays the log streams that are recently visited.
+
+ 
+ A maximum of three log streams can be displayed in Recently Visited.
+
+
+ FAQThis area displays frequently asked questions.
+ To view more FAQs, click More.
+
+
+
+ 
+
+
+
+
+
+
+
diff --git a/docs/lts/umn/lts_04_1154.html b/docs/lts/umn/lts_04_1154.html
new file mode 100644
index 00000000..568a3a8c
--- /dev/null
+++ b/docs/lts/umn/lts_04_1154.html
@@ -0,0 +1,44 @@
+
+
+ Resource Statistics
+ Log resource statistics are classified into read/write traffic, index traffic, and log volume. The statistics are for reference only. You can also visualize log resource statistics in charts.
+ - Read/Write: LTS charges for the amount of compressed log data read from and written to LTS. Generally, the log compression ratio is 5:1.
- Indexing: Raw logs are full-text indexed by default for log search.
- Log Volume: Space used for storing compressed logs, indexes, and copies is billed. The space is roughly the size of the raw logs.
+ Yesterday's Statistics
+
+
+ 
+
+
+
+
+ This area shows LTS log resource statistics, day-on-day changes, and data of each hour of the previous day.
+ - The read and write traffic, index traffic, and log volume of the previous day are displayed.
- Yesterday's day-on-day changes show a trend.
- Trend charts display yesterday's traffic (or log volume) by hour. The interval between two points is one hour. The unit is KB, MB, or GB.
+
+ Resource Statistics Details
+
+
+ 
+
+
+
+
+
+ Resource statistics details display the top 100 log groups or log streams by read/write traffic, index traffic, and latest log volume. By default, the log groups or log streams are sorted by the latest log volume (GB). You can also sort the statistics by read/write or index traffic.
+
+
+
+
+
diff --git a/docs/lts/umn/lts_05_0003.html b/docs/lts/umn/lts_05_0003.html
new file mode 100644
index 00000000..d558075b
--- /dev/null
+++ b/docs/lts/umn/lts_05_0003.html
@@ -0,0 +1,17 @@
+
+
+ Log Collection
+ To reduce the memory, database, and disk space usage, you can set log collection as required. The log collection switch is used to determine whether to collect log data.
+ - Log in to the LTS console, choose Configuration Center in the navigation pane on the left, and click the Log Collection tab.
- Enable or disable Log Collection.
Figure 1 Enabling or disabling Log Collection
+ This function is enabled by default. If you do not need to collect logs, disable this function to reduce resource usage.
+ After the log collection function is disabled, ICAgents will stop collecting logs, and this function on the AOM console will also be disabled.
+
+
+
+
+
+
diff --git a/docs/lts/umn/lts_05_0004.html b/docs/lts/umn/lts_05_0004.html
new file mode 100644
index 00000000..0b182eeb
--- /dev/null
+++ b/docs/lts/umn/lts_05_0004.html
@@ -0,0 +1,17 @@
+
+
+ Log Search and View
+
+
+
diff --git a/docs/lts/umn/lts_05_0005.html b/docs/lts/umn/lts_05_0005.html
new file mode 100644
index 00000000..fb395fa9
--- /dev/null
+++ b/docs/lts/umn/lts_05_0005.html
@@ -0,0 +1,264 @@
+
+
+ Log Search
+ Follow the directions below to search logs by keyword and time range:
+ - On the LTS console, choose Log Management in the navigation pane on the left.
- In the log group list, click
on the left of a log group name. - In the log stream list, click a log stream name.
+
+
+
+
+ - In the upper right corner, select a time range. The default time range is 1 hour (from now).
There are three types of time range: relative time from now, relative time from last, and specified time. Select a time range as required. - From now: queries log data generated in a time range that ends with the current time, such as the previous 1, 5, or 15 minutes. For example, if the current time is 19:20:31 and 1 hour is selected as the relative time from now, the charts on the dashboard display the log data that is generated from 18:20:31 to 19:20:31.
- From last: queries log data generated in a time range that ends with the current time, such as the previous 1 or 15 minutes. For example, if the current time is 19:20:31 and 1 hour is selected as the relative time from last, the charts on the dashboard display the log data that is generated from 18:00:00 to 19:00:00.
- Specified time: queries log data that is generated in a specified time range.
+
+
+ - On the log stream details page, you can search for logs using the following methods:
- In the search area, click in the search box. The drop-down list contains the following items:
- Structured fields or index fields: Built-in fields are not displayed in the drop-down list. However, when you enter a built-in field, the drop-down list is automatically associated and matched with the field.
- NOT, AND, OR, :, and :* keywords can be displayed. Keywords other than NOT are displayed in the drop-down list only after you enter the keyword in the search box.
- When entering a keyword, you can press Tab to automatically add the first keyword displayed in the drop-down list.
- Keywords are case-insensitive.
+
+ - Historical records: A maximum of 20 historical records can be retained, but only the latest three records are displayed in the drop-down list.
- Quick search: quick search fields that have been created.
- Search syntax: common search syntax.
+Enter a keyword, or select a field and keyword from the drop-down list, and click Query.
+Logs that contain the keyword are displayed.
+ - Built-in fields include appName, category, clusterId, clusterName, collectTime, containerName, hostIP, hostIPv6, hostId, hostName, nameSpace, pathFile, podName and serviceID. By default, the fields are displayed in simplified mode, and hostIP, hostName, and pathFile are displayed at the beginning.
+
- The structured fields are displayed in key:value format.
+
+ - On the Raw Logs page, click a field in blue in the log content. You can select Copy, Add To Search, and Exclude from Search from the displayed drop-down list.
- Click a field for which quick analysis has been created to add it to the search box.
If the field you click already exists in the search box, it will be replaced by this newly added one. If the field is added for the first time, fields in the search box are searched using the AND operator.
+
+ - In the search area, press the up and down arrows on the keyboard to select a keyword or search syntax from the drop-down list, press Tab or Enter to select a keyword or syntax, and click Query.
+
+ Common Log Search OperationsLog search operations include sharing logs and refreshing logs.
+
+ Table 1 Common operationsOperation
+ |
+Description
+ |
+
+
+Creating quick search criteria
+ |
+Click to create a quick search.
+ |
+
+Sharing logs
+ |
+Click to copy the link of the current log search page to share the logs that you have searched.
+ |
+
+Refreshing logs
+ |
+You can click to refresh logs in two modes: manual refresh and automatic refresh.
+- Manual refresh: Select Refresh Now from the drop-down list.
- Automatic refresh: Select an interval from the drop-down list to automatically refresh logs. The interval can be 15 seconds, 30 seconds, 1 minute, or 5 minutes.
+ |
+
+Copying logs
+ |
+Click to copy the log content.
+ |
+
+Viewing context of a log
+ |
+Click to view the log context.
+ |
+
+Simplifying field details
+ |
+Click to view the simplified field details.
+ |
+
+Unfold/Fold
+ |
+Click  to display the full log content. NOTE: Unfold is enabled by default.
+
+
+ |
+
+Downloading logs
+ |
+Click . On the displayed Download Logs page, click Direct Download or Transfer and Download.
+Direct Download: Download log files to the local PC. Up to 5000 logs can be downloaded at a time.
+Select .csv or .txt from the drop-down list and click Download to export logs to the local PC.
+ NOTE: - If you select Export .csv, logs are exported as a table.
- If you select Export .txt, logs are exported as a .txt file.
+
+ |
+
+Layout
+ |
+Move the cursor over and choose Layout from the drop-down list. On the displayed Layout page, specify whether to simplify field display and show fields.
+- Simple View: If this is enabled, the fields are displayed in a simplified manner.
- Show/Hide: When the visibility of a field is disabled, the field is not displayed in the log content.
+ |
+
+JSON
+ |
+Move the cursor over , click JSON, and set JSON formatting.
+ NOTE: Formatting is enabled by default. The default number of expanded levels is 2.
+
+- Formatting enabled: Set the default number of expanded levels. Maximum value: 10.
- Formatting disabled: JSON logs will not be formatted for display.
+ |
+
+Invisible fields ( )
+ |
+This list displays the invisible fields configured in the layout settings.
+- The
button is unavailable for log streams without layout settings configured. - If the log content is CONFIG_FILE and layout settings are not configured, the default invisible fields include appName, clusterId, clusterName, containerName, hostIPv6, NameSpace, podName, and serviceID.
+ |
+
+
+
+
+
+ Search Syntax and ExamplesSearch syntax:
+
+ Table 2 Search syntaxFilter
+ |
+Description
+ |
+
+
+Exact search by keyword
+ |
+LTS searches for logs containing the exact keyword (case-sensitive) that you specify. A keyword is the word between two adjacent delimiters.
+You can add an asterisk (*) after a keyword, for example, error*, if you are not familiar with delimiters.
+ |
+
+Exact search by phrase
+ |
+LTS searches for logs containing the exact phrase (case-sensitive) that you specify.
+ |
+
+&&
+ |
+Intersection of search results
+ |
+
+||
+ |
+Union of search results
+ |
+
+AND
+ |
+Intersection of search results
+ |
+
+and
+ |
+Intersection of search results
+ |
+
+OR
+ |
+Union of search results
+ |
+
+or
+ |
+Union of search results
+ |
+
+NOT
+ |
+Logs that contain the keyword after NOT are excluded.
+ |
+
+not
+ |
+Logs that contain the keyword after not are excluded.
+ |
+
+?
+ |
+Fuzzy search. The question mark (?) can be put in the middle or at the end of a keyword to replace a character.
+ |
+
+>
+ |
+Search for structured long or float fields with values greater than a specified number. For example, num > 10.
+ |
+
+<
+ |
+Search for structured long or float fields with values less than a specified number. For example, num < 10.
+ |
+
+=
+ |
+Search for structured long or float fields with values equal to a specified number. For example, num = 10.
+ |
+
+>=
+ |
+Search for structured long or float fields with values greater than or equal to a specified number. For example, num >= 10.
+ |
+
+<=
+ |
+Search for structured long or float fields with values less than or equal to a specified number. For example, num <= 10.
+ |
+
+:
+ |
+Search for a specified field (key:value). For example, request_method:GET.
+Use double quotation marks ("") to enclose a field name or value that contains reserved characters, such as spaces and colons (:). For example, "file info":apsara.
+ |
+
+""
+ |
+Enclose a syntax keyword to convert it into common characters. For example, "and".
+This "and" means searching for logs that contain this word. It is not an operator.
+All words enclosed in double quotation marks ("") are considered as a whole.
+ |
+
+\
+ |
+Escape double quotation marks (""). The escaped quotation marks indicate the symbol itself. For example, to search for instance_id:nginx"01", use instance_id:nginx\"01\".
+ |
+
+*
+ |
+An asterisk (*) can be placed only after the keyword and can match zero, one, or multiple characters. For example, host:abcd*c.
+ NOTE: LTS will find 100 words that meet the search criteria in all logs and return these logs.
+
+ |
+
+in
+ |
+Query logs whose field values are in a specified range. Brackets indicate a closed interval, and parentheses indicate an open interval. Numbers are separated with spaces. Example:
+request_time in [100 200] and request_time in (100 200]
+ NOTE: Enter in in lowercase and use only long or float fields.
+
+ |
+
+()
+ |
+Specify fields that should be matched with higher priority. Use and, or, and not to connect fields. Example: (request_method:GET or request_method:POST) and status:200
+ |
+
+key:#"abc def"
+ |
+Search for specified field names and values (key:value) after field indexing is configured.
+ |
+
+#"abc def"
+ |
+Full text search. LTS splits an entire log into multiple words based on the delimiter you set. Search for logs using specified keywords (field name and value) and rules.
+ |
+
+
+
+
+ Operators (such as &&, ||, AND, OR, NOT, *, ?, :, >, <, =, >=, and <=) contained in raw logs cannot be used to search for logs.
+
+ Search rules:
+ - Fuzzy search is supported.
For example, if you enter error*, all logs containing error will be displayed and those start with error will be highlighted.
+ - You can use a combination of multiple search criteria in the key and value format: key1:value1 AND key2:value2 or key1:value1 OR key2:value2. After entering or selecting key1:value1, you need to add AND or OR before entering or selecting key2:value2 in the search box.
- Click a keyword and select one of the three operations from the displayed drop-down list: Copy, Add To Search, and Exclude from Search.
Copy: Copy the field.
+Add To Search: Add AND field: value to the search statement.
+Exclude from Search: Add NOT field: value to the query statement.
+
+ Searching sample
+ - Search for logs containing start: Enter start.
- Search for logs containing start to refresh: Enter start to refresh.
- Search for the logs containing both keyword start and unexpected: Enter start && unexpected.
- Search for logs containing both start and unexpected: Enter start AND unexpected.
- Search for the logs containing keyword start or unexpected: Enter start || unexpected.
+ - Search for logs containing start or unexpected: Enter start OR unexpected.
- Logs that do not contain query1: NOT content: query1.
- error*: logs that contain error.
- er?or: logs that start with er, is followed by any single character, and end with or.
- If your keyword contains a colon (:), use the content: Keyword format. Example: content: "120.46.138.115:80" or content: 120.46.138.115:80.
- query1 AND query2 AND NOT content: query3: logs that contain both query1 and query2 but not query3.
+ - When you enter a keyword to query logs, the keyword is case-sensitive. Log contents you queried are case-sensitive but the highlighted log contents are case-sensitive.
- The asterisk (*) and question mark (?) do not match special characters such as hyphens (-) and spaces.
- For fuzzy match, the question mark (?) or asterisk (*) can only go in the middle or at the end of a keyword. For example, you can enter ER?OR or ER*R.
- When you search logs by keyword, if a single log contains more than 255 characters, exact search may fail.
+
+
+
+
+
diff --git a/docs/lts/umn/lts_05_0006.html b/docs/lts/umn/lts_05_0006.html
new file mode 100644
index 00000000..086be464
--- /dev/null
+++ b/docs/lts/umn/lts_05_0006.html
@@ -0,0 +1,21 @@
+
+
+ Viewing Real-Time Logs
+ You can view reported logs on the LTS console in real time.
+ Prerequisites- You have created log groups and log streams.
- You have installed ICAgent.
- You have configured log collection rules.
+
+ Procedure- On the LTS console, click Log Management.
- In the log group list, click
on the left of a log group name. - In the log stream list, click a log stream name. The log stream details page is displayed.
- On the log stream details page, click the Real-Time Logs tab to view logs in real time.
+
+ Logs are reported to LTS once every minute. You may wait for at most 1 minute before the logs are displayed.
+ In addition, you can customize log display by clicking Clear or Pause in the upper right corner.
+ Stay on the Real-Time Logs tab to keep updating them in real time. If you leave the Real-Time Logs tab page, logs will stop being loaded in real time. The next time you access the tab, the logs that were shown before you left the tab will not be displayed.
+
+
+
+
+
diff --git a/docs/lts/umn/lts_05_0007.html b/docs/lts/umn/lts_05_0007.html
new file mode 100644
index 00000000..416f3aee
--- /dev/null
+++ b/docs/lts/umn/lts_05_0007.html
@@ -0,0 +1,18 @@
+
+
+ Quick Analysis
+ Monitoring keywords in logs helps you keep track of system performance and services. For example, the number of ERROR keywords indicates the system health, and the number of BUY keywords indicates the sales volume. LTS provides quick analysis for you to obtain statistics on your specified keywords.
+ PrerequisitesQuick analysis is conducted on fields extracted from structured logs. Structure raw logs before you create a quick analysis task.
+
+ Creating a Quick Analysis TaskYou can enable Quick Analysis for the fields on the Log Structuring page. You can also perform the following steps to create a quick analysis task:
+ - Log in to the LTS console. In the navigation pane on the left, choose Log Management.
- A quick analysis is performed on a log stream. Select the target log group and log stream on the Log Management page.
- Click Set Quick Analysis or
. On the displayed page, add fields for quick analysis. - Click OK. The quick analysis task is created.
+
+
+
+
+
diff --git a/docs/lts/umn/lts_05_0009.html b/docs/lts/umn/lts_05_0009.html
new file mode 100644
index 00000000..007a9e8c
--- /dev/null
+++ b/docs/lts/umn/lts_05_0009.html
@@ -0,0 +1,66 @@
+
+
+ Quick Search
+ To search for logs using a keyword repeatedly, perform the following operations to configure quick search.
+ Procedure- On the LTS console, choose Log Management in the navigation pane on the left.
- In the log group list, click
on the left of a log group name. - In the log stream list, click the name of the target log stream.
- On the log stream details page, click
and specify Name and Keyword.- A quick search name is used to distinguish multiple quick search statements. The name can be customized and must meet the following requirements:
- Can contain only letters, digits, hyphens (-), underscores (_), and periods (.).
- Cannot start with a period (.) or underscore (_) or end with a period (.).
- Can contain 1 to 64 characters.
+ - A quick search statement is used to repeatedly search for logs, for example, error*.
+
+ - Click OK.
Click the name of a quick search statement to view log details.
+
+
+ Viewing Context of a LogYou can check the logs generated before and after a log for quick fault locating.
+ - On the LTS console, choose Log Management in the navigation pane on the left.
- In the log group list, click
on the left of a log group name. - In the log stream list, click the name of the target log stream.
- On the Raw Logs tab page, click
to view the context.The context of the log is displayed.
+ - On the displayed View Context page, check the log context.
+
Table 1 Introduction to log context viewingFeature
+ |
+Description
+ |
+
+
+Search Rows
+ |
+Number of rows to search. The options are 100, 200, and 500.
+ |
+
+Highlighting
+ |
+Enter a string to be highlighted and press Enter.
+ |
+
+Filter
+ |
+Enter a string to be filtered and press Enter. When both Highlighting and Filter are configured, the filtered string can also be highlighted.
+ |
+
+Fields
+ |
+The default field for viewing log context is content. Click Fields to view the context of other fields.
+ |
+
+Prev
+ |
+View half the number of Search Rows leading to the current position. For example, if Search Rows is set to 100 and you click Prev, 50 rows prior to the current position are displayed. In this case, the current line number is -50. If you click Prev again, the line number will become -100, -150, -200, and so on.
+ |
+
+Current
+ |
+Current log position. When Prev or Update is set, you can click Current to return to the position where the context starts (when the line number is 0).
+ |
+
+Update
+ |
+View half the number of Search Rows following the current position. For example, if Search Rows is set to 100 and you click Update, 50 rows following the current position are displayed. In this case, the current line number is 50. If you click Update again, the line number will become 100, 150, 200, and so on.
+ |
+
+
+
+
+
+
+
+
+
diff --git a/docs/lts/umn/lts_06_0003.html b/docs/lts/umn/lts_06_0003.html
new file mode 100644
index 00000000..f4f52501
--- /dev/null
+++ b/docs/lts/umn/lts_06_0003.html
@@ -0,0 +1,12 @@
+
+
+ Configuration Center
+
+
+
diff --git a/docs/lts/umn/lts_0821.html b/docs/lts/umn/lts_0821.html
new file mode 100644
index 00000000..d645e2b2
--- /dev/null
+++ b/docs/lts/umn/lts_0821.html
@@ -0,0 +1,366 @@
+
+
+ Log Structuring
+ Log data can be structured or unstructured. Structured data is quantitative data or can be defined by unified data models. It has a fixed length and format. Unstructured data has no pre-defined data models and cannot be fit into two-dimensional tables of databases.
+ During log structuring, logs with fixed or similar formats are extracted from a log stream based on your defined structuring method and irrelevant logs are filtered out. You can then use SQL syntax to query and analyze the structured logs.
+ Prerequisite- You have created a log stream.
- Log structuring is recommended when most logs in a log stream share a similar pattern.
+
+ Structuring Raw LogsAdd structuring rules to a log stream and LTS will extract logs based on the rules. You can then use the SQL syntax to query logs.
+ To structure logs:
+ - Log in to the LTS console. In the navigation pane on the left, choose Log Management.
- Select a log group and a log stream.
- On the log stream details page, click
in the upper right corner. On the Log Structuring tab, select an extraction method to structure logs.- Regular Expressions: Fields are extracted using regular expressions.
- JSON: Key-value pairs are extracted from JSON log events.
- Delimiter: Fields are extracted using delimiters (such as commas or spaces).
- Nginx: Key-value pairs are extracted from Nginx log events. You can use the log_format directive to define the format of access logs.
- Structuring template: Custom and system templates are provided, including templates for VPC, DCS audit logs, Tomcat, and Nginx.
+Tag Fields: You can also extract tag fields, which are customizable and are usually host IP addresses or collection paths to show where and when logs were collected. Tag fields are supported for structuring with regular expressions, JSON, delimiters, and Nginx.
+Figure 1 Log structuring
+
+You can then use SQL statements to query structured logs in the same way as you query data in two-dimensional database tables.
+ - If a structured field exceeds 20 KB, only the first 20 KB is retained.
- The following system fields cannot be extracted during log structuring: groupName, logStream, lineNum, content, logContent, logContentSize, collectTime, category, clusterId, clusterName, containerName, hostIP, hostId, hostName, nameSpace, pathFile, and podName.
+
+
+
+ Regular Expressions- Select a typical log event as the sample. Click Select from existing log events, select a log event, and click OK. You can select different time ranges to filter logs.
There are three types of time range: relative time from now, relative time from last, and specified time. Select a time range as required.
+ - From now: queries log data generated in a time range that ends with the current time, such as the previous 1, 5, or 15 minutes. For example, if the current time is 19:20:31 and 1 hour is selected as the relative time from now, the charts on the dashboard display the log data that is generated from 18:20:31 to 19:20:31.
- From last: queries log data generated in a time range that ends with the current time, such as the previous 1 or 15 minutes. For example, if the current time is 19:20:31 and 1 hour is selected as the relative time from last, the charts on the dashboard display the log data that is generated from 18:00:00 to 19:00:00.
- Specified time: queries log data that is generated in a specified time range.
+
+
- Extract fields. Extracted fields are shown with their example values. You can extract fields in two ways:
- Auto generate: Select the log content you want to extract as a field in the sample log event. In the dialog box displayed, set the field name. The name must start with a letter and contain only letters and digits. Then click Add.
Figure 2 Selecting a field
+ - Manually enter: Enter a regular expression in the text box and click Extract Field. A regular expression may contain multiple capturing groups. Each group is enclosed in a pair of parentheses. There are three types of capturing groups:
- (exp): Capturing groups are numbered by counting their opening parentheses from left to right. The numbering starts with 1.
- (?<name>exp): named capturing group. It captures text that matches exp into the group name. The group name must start with a letter and contain only letters and digits. A group is recalled by group name or number.
- (?:exp): non-capturing group. It captures text that matches exp, but it is not named or numbered and cannot be recalled.
+
+ - Select a portion between two adjacent delimiters as a field. Default delimiters: , '";=()[]{}@&<>/:\n\t\r
- When you select manually enter, the regular expression can contain up to 5000 characters. You do not have to name capturing groups when writing the regular expression. When you click Extract Field, those unnamed groups will be named as field1, field2, field3, and so on.
+
+Check and edit the fields if needed. For details about rules for configuring extracted fields, see Configuring Structured Fields.
+ - Click Save. The type of extracted fields cannot be changed after the structuring is complete.
+
+ JSON- Select a typical log event as the sample. Click Select from existing log events, select a log event, or enter a log event in the text box, and click OK. You can select different time ranges to filter logs.
There are three types of time range: relative time from now, relative time from last, and specified time. Select a time range as required.
+ - From now: queries log data generated in a time range that ends with the current time, such as the previous 1, 5, or 15 minutes. For example, if the current time is 19:20:31 and 1 hour is selected as the relative time from now, the charts on the dashboard display the log data that is generated from 18:20:31 to 19:20:31.
- From last: queries log data generated in a time range that ends with the current time, such as the previous 1 or 15 minutes. For example, if the current time is 19:20:31 and 1 hour is selected as the relative time from last, the charts on the dashboard display the log data that is generated from 18:00:00 to 19:00:00.
- Specified time: queries log data that is generated in a specified time range.
+
+
- Extract fields. Extract fields from the log event. Extracted fields are shown with their example values.
Click Intelligent Extraction. Take the following log event as an example.
+Enter the log event in the text box.
+{"record_time":"1632904298988","user":{"domain":{"name":"paas_apm_z00418070_01","id":"1d26cc8c86a840e28a4f8d0d07852f1d"},"assumedBy":{"user":{"domain":{"name":"xxxx","id":"xxxx"},"name":"xxxx","id":"xxxx"}},"name":"paas_apm_z00418070_01/cce_admin_trust"}}
+The following fields will be extracted:
+
+
+ The float data type has seven digit precision.
+ If a value contains more than seven valid digits, the extracted content is incorrect, which affects visualization and quick analysis. In this case, you are advised to change the field type to string.
+
+
+
+Check and edit the fields if needed. For details about rules for configuring extracted fields, see Configuring Structured Fields.
+ - Click Save. The type of extracted fields cannot be changed after the structuring is complete.
+
+ Delimiters- Select a typical log event as the sample. Click Select from existing log events, select a log event, or enter a log event in the text box, and click OK. You can select different time ranges to filter logs.
There are three types of time range: relative time from now, relative time from last, and specified time. Select a time range as required.
+ - From now: queries log data generated in a time range that ends with the current time, such as the previous 1, 5, or 15 minutes. For example, if the current time is 19:20:31 and 1 hour is selected as the relative time from now, the charts on the dashboard display the log data that is generated from 18:20:31 to 19:20:31.
- From last: queries log data generated in a time range that ends with the current time, such as the previous 1 or 15 minutes. For example, if the current time is 19:20:31 and 1 hour is selected as the relative time from last, the charts on the dashboard display the log data that is generated from 18:00:00 to 19:00:00.
- Specified time: queries log data that is generated in a specified time range.
+
+ - Select a delimiter or define a custom one.
Figure 3 Defining a custom delimiter
+
- Extract fields. Extract fields from the log event. Extracted fields are shown with their example values.
Click Intelligent Extraction. Take the following log event as an example.
+Enter the log event in the text box.
+1 5f67944957444bd6bb4fe3b367de8f3d 1d515d18-1b36-47dc-a983-bd6512aed4bd 192.168.0.154 192.168.3.25 38929 53 17 1 96 1548752136 1548752736 ACCEPT OK
+The following fields will be extracted:
+Figure 4 Intelligent extraction results
+
+ The float data type has seven digit precision.
+ If a value contains more than seven valid digits, the extracted content is incorrect, which affects visualization and quick analysis. In this case, you are advised to change the field type to string.
+
+
+Check and edit the fields if needed. For details about rules for configuring extracted fields, see Configuring Structured Fields.
+ - Click Save. The type of extracted fields cannot be changed after the structuring is complete.
+
+ Nginx- Select a typical log event as the sample. Click Select from existing log events, select a log event, or enter a log event in the text box, and click OK. You can select different time ranges to filter logs.
There are three types of time range: relative time from now, relative time from last, and specified time. Select a time range as required.
+ - From now: queries log data generated in a time range that ends with the current time, such as the previous 1, 5, or 15 minutes. For example, if the current time is 19:20:31 and 1 hour is selected as the relative time from now, the charts on the dashboard display the log data that is generated from 18:20:31 to 19:20:31.
- From last: queries log data generated in a time range that ends with the current time, such as the previous 1 or 15 minutes. For example, if the current time is 19:20:31 and 1 hour is selected as the relative time from last, the charts on the dashboard display the log data that is generated from 18:00:00 to 19:00:00.
- Specified time: queries log data that is generated in a specified time range.
+
+ - Define the Nginx log format. You can click Apply Default Nginx Log Format to apply the default format.
Figure 5 Defining the Nginx log format
+ In standard Nginx configuration files, the portion starting with log_format indicates the log configuration.
+ Log format
+
+
+
- Extract fields. Extract fields from the log event. Extracted fields are shown with their example values.
Click Intelligent Extraction. Take the following log event as an example.
+Enter the log event in the text box.
+39.149.31.187 - - [12/Mar/2020:12:24:02 +0800] "GET / HTTP/1.1" 304 0 "-" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36" "-"
+Also, you have configured the following Nginx log format in step 2:
+log_format main '$remote_addr - $remote_user [$time_local] "$request" '
+ '$status $body_bytes_sent "$http_referer" '
+ '"$http_user_agent" "$http_x_forwarded_for"';
+The following fields will be extracted:
+Figure 6 Intelligent extraction results
+ The float data type has seven digit precision.
+ If a value contains more than seven valid digits, the extracted content is incorrect, which affects visualization and quick analysis. In this case, you are advised to change the field type to string.
+
+
+Check and edit the fields if needed. For details about rules for configuring extracted fields, see Configuring Structured Fields.
+ - Click Save. The type of extracted fields cannot be changed after the structuring is complete.
+
+ Structuring TemplatesSystem Templates
+ - Select System template, and select a system template for VPC,DCS audit logs, Tomcat, and Nginx. A sample log event is displayed for each template.
- When you select a custom template, the log parsing result is displayed in the Template Details area. Click Save.
Figure 7 Structuring templates
+
+ Custom Templates
+ - Select Custom template and select a custom template. There are two ways to obtain a custom template:
- When you extract fields using methods of regular expression, JSON, delimiter, or Nginx, click Save as Template in the lower left corner. In the displayed dialog box, enter the template name and click OK. The template will be displayed in the custom template list.
Figure 8 Custom templates
+ - Create a custom template under the Structuring Template option.
Select Custom template and click Create Template. Enter a template name, select Regular Expressions, JSON, Delimiter, or Nginx, configure the template, and click Save. The template will be displayed in the custom template list.
+
+
+
+
+
+
+
+
+
+
+
+
+
- When you select a custom template, the log parsing result is displayed in the Template Details area. Click Save.
You can enable quick analysis for fields if needed.
+ The float data type has seven digit precision.
+ If a value contains more than seven valid digits, the extracted content is incorrect, which affects visualization and quick analysis. In this case, you are advised to change the field type to string.
+
+
+
+ Tag FieldsWhen you structure logs, you can configure tag fields, so you can use these fields to run SQL queries on the Visualization page.
+ - During field extraction, click the Tag Fields tab.
- Click Add Field.
- In the Field column, enter the name of the tag field, for example, hostIP.
- Click the check mark next to the field name. If the field is a system tag field, it will be identified as System in the Source column.
+
If you configure tag fields for a structuring rule that was created before the function of tag fields was brought online, no example values will be shown with the tag fields.
+
+ - To add more fields, click Add Field.
- Click Save to save the settings.
- Tag fields can be the following system fields: category, clusterId, clusterName, containerName, hostIP, hostId, hostName, nameSpace, pathFile, and podName.
- Tag fields cannot be the following system fields: groupName, logStream, lineNum, content, logContent, logContentSize, and collectTime.
- Quick analysis is disabled for tag fields by default. You can enable it.
- You can configure both field extraction and tag fields during log structuring.
+
+
+
+ Configuring Structured FieldsYou can edit extracted fields after log structuring.
+
+ Table 1 Rules for configuring structured fieldsStructuring Method
+ |
+Field Name
+ |
+Field Type Can Be Changed
+ |
+Field Can Be Deleted
+ |
+
+
+Regular expressions (auto generate)
+ |
+User-defined.
+The name must start with a letter and contain only letters and digits.
+ |
+Yes
+ |
+Yes
+ |
+
+Regular expressions (manually enter)
+ |
+- User-defined.
- Default names such as field1, field2, and field3 will be used for unnamed fields. You can modify these names.
+ |
+Yes
+ |
+Yes
+ |
+
+JSON
+ |
+Names are set automatically, but you can set aliases for fields.
+ |
+Yes
+ |
+Yes
+ |
+
+Delimiter
+ |
+Default names such as field1, field2, field3 are used. You can modify these names.
+ |
+Yes
+ |
+Yes
+ |
+
+ELB structuring template
+ |
+Defined by ELB.
+ |
+No
+ |
+No
+ |
+
+VPC structuring template
+ |
+Defined by VPC.
+ |
+No
+ |
+No
+ |
+
+CTS structuring template
+ |
+Keys in JSON log events.
+ |
+No
+ |
+No
+ |
+
+APIG structuring template
+ |
+Defined by APIG.
+ |
+No
+ |
+No
+ |
+
+DCS audit logs
+ |
+Defined by DCS.
+ |
+No
+ |
+No
+ |
+
+Tomcat
+ |
+Defined by Tomcat.
+ |
+No
+ |
+No
+ |
+
+Nginx
+ |
+Defined by Nginx.
+ |
+No
+ |
+No
+ |
+
+GAUSSV5 audit logs
+ |
+Defined by GAUSSV5.
+ |
+No
+ |
+No
+ |
+
+DDS audit logs
+ |
+Defined by DDS.
+ |
+No
+ |
+No
+ |
+
+DDS error logs
+ |
+Defined by DDS.
+ |
+No
+ |
+No
+ |
+
+DDS slow query logs
+ |
+Defined by DDS.
+ |
+No
+ |
+No
+ |
+
+CFW access control logs
+ |
+Defined by CFW.
+ |
+No
+ |
+No
+ |
+
+CFW attack logs
+ |
+Defined by CFW.
+ |
+No
+ |
+No
+ |
+
+CFW traffic logs
+ |
+Defined by CFW.
+ |
+No
+ |
+No
+ |
+
+MySQL error logs
+ |
+Defined by MySQL.
+ |
+No
+ |
+No
+ |
+
+MySQL slow query logs
+ |
+Defined by MySQL.
+ |
+No
+ |
+No
+ |
+
+PostgreSQL error logs
+ |
+Defined by PostgreSQL.
+ |
+No
+ |
+No
+ |
+
+SQL Server error logs
+ |
+Defined by SQL Server.
+ |
+No
+ |
+No
+ |
+
+GaussDB(for Redis) slow query logs
+ |
+Defined by GaussDB(for Redis).
+ |
+No
+ |
+No
+ |
+
+Custom template
+ |
+User-defined.
+ |
+Yes
+ |
+Yes
+ |
+
+
+
+
+ When you use regular expressions (manually entered), JSON, delimiters, Nginx, or custom templates to structure logs, field names:
+ - Can contain only letters, digits, hyphens (-), underscores (_), and periods (.).
- Cannot start with a period (.) or underscore (_) or end with a period (.).
- Can contain 1 to 64 characters.
+
+
+
+
+
diff --git a/docs/lts/umn/lts_faq_0003.html b/docs/lts/umn/lts_faq_0003.html
new file mode 100644
index 00000000..5010c833
--- /dev/null
+++ b/docs/lts/umn/lts_faq_0003.html
@@ -0,0 +1,14 @@
+
+
+ What Kind of Logs and Files Can LTS Collect?
+ Logs That Can Be Collected by LTS:- Host logs. ICAgent should be installed on the target hosts for log collection.
- Cloud service logs. To collect logs from cloud services, such as Elastic Load Balance (ELB) or Virtual Private Cloud (VPC), enable log reporting to LTS in the cloud services.
+
+ Files That Can Be Collected by LTS:If the collection path is set to a directory, for example, /var/logs/, only .log, .trace, and .out files in the directory are collected. If the collection path is set to the name of a file (only text files are supported), the specified file is collected. Note that LTS only collects logs generated in the last 7 days.
+
+
+
+
diff --git a/docs/lts/umn/lts_faq_0031.html b/docs/lts/umn/lts_faq_0031.html
new file mode 100644
index 00000000..12a4e8e6
--- /dev/null
+++ b/docs/lts/umn/lts_faq_0031.html
@@ -0,0 +1,16 @@
+
+
+ What Can I Do If I Cannot View Raw Logs on the LTS Console?
+ SymptomNo log events are displayed on the Raw Logs tab in a log stream on the LTS console.
+
+ Possible Causes- ICAgent has not been installed.
- The collection path is incorrectly configured.
- The Log Collection function on the LTS console is disabled.
- Log collection was stopped because your account is in arrears.
- The rate of writing logs into log streams or length of single-line logs exceeds what is supported.
- The browser has slowed down because of the amount of log data.
+
+ Solution- Install the ICAgent. For details, see Installing ICAgent.
- If the collection path is set to a directory, for example, /var/logs/, only .log, .trace, and .out files in the directory are collected. If the collection path is set to name of a file, ensure that the file is a text file.
- Log in to the LTS console, choose Configuration Center > Log Collection, and enable the Log Collection function.
- Use Google Chrome or Firefox to query logs.
+
+
+
+
diff --git a/docs/lts/umn/lts_faq_0044.html b/docs/lts/umn/lts_faq_0044.html
new file mode 100644
index 00000000..d14469bb
--- /dev/null
+++ b/docs/lts/umn/lts_faq_0044.html
@@ -0,0 +1,13 @@
+
+
+ Does LTS Delete Logs That Have Been Transferred to OBS Buckets?
+ No. During log transfer, logs are "replicated" to OBS buckets. To view transferred log files, click the name of the corresponding OBS bucket on the Log Transfer page of the LTS console, and you will be directed to the OBS console to check the files.
+
+ 
+
+
+
diff --git a/docs/lts/umn/lts_faq_0314.html b/docs/lts/umn/lts_faq_0314.html
new file mode 100644
index 00000000..47d0558e
--- /dev/null
+++ b/docs/lts/umn/lts_faq_0314.html
@@ -0,0 +1,11 @@
+
+
+ What Are the Common Causes of Abnormal Log Transfer?
+ - The OBS bucket used for log transfer has been deleted. Specify another bucket.
- Access control on the OBS bucket is incorrectly configured. Go to the OBS console to correct the settings.
+
+
+
diff --git a/docs/lts/umn/lts_faq_0610.html b/docs/lts/umn/lts_faq_0610.html
new file mode 100644
index 00000000..884dd026
--- /dev/null
+++ b/docs/lts/umn/lts_faq_0610.html
@@ -0,0 +1,19 @@
+
+
+ How Do I Transfer CTS Logs to an OBS Bucket?
+ When Cloud Trace Service (CTS) is connected to LTS, a log group and log stream are automatically created for CTS on the LTS console. To transfer CTS logs to OBS, do as follows:
+ - Log in to the CTS console and choose Tracker List in the navigation pane on the left.
- Click Configure on the row of the system tracker.
+
+
+
- Click Next to enable Transfer to LTS.
+
- Access the LTS console, choose Log Transfer in the navigation pane on the left, and click Configure Log Transfer in the upper right corner.
Set Log Group Name to CTS and Log Stream Name to system-trace. Specify other parameters and click OK to transfer CTS logs to the selected OBS bucket.
+
+
+ - View the transferred CTS logs in the specified OBS bucket on the OBS console.
+
+
+
diff --git a/docs/lts/umn/public_sys-resources/icon-arrowdn.gif b/docs/lts/umn/public_sys-resources/icon-arrowdn.gif
index 84eec9be..37942803 100644
Binary files a/docs/lts/umn/public_sys-resources/icon-arrowdn.gif and b/docs/lts/umn/public_sys-resources/icon-arrowdn.gif differ
diff --git a/docs/lts/umn/public_sys-resources/icon-arrowrt.gif b/docs/lts/umn/public_sys-resources/icon-arrowrt.gif
index 39583d16..6aaaa11c 100644
Binary files a/docs/lts/umn/public_sys-resources/icon-arrowrt.gif and b/docs/lts/umn/public_sys-resources/icon-arrowrt.gif differ
|
---|
|
---|
|
|
---|