Skip to content

Commit c1cc7ee

Browse files
authored
Add skeleton database_observability.postgres component (#3945)
This adds a new database observability component for postgres. The purpose of this component will be to collect postgres query analysis data; replicating that of the mysql component. The required collectors for that functionality will be built over time. This change introduces the initial component so that the collectors can subsequently be added. The most basic connection_info collector is added, so we can begin sending the database_observability_connection_info metric for pg databases. A docs page is added for the new component but the component README is not updated to reference postgres things, while we work out the small details on configuring the DB etc. The changelog isn't updated to include this new addition, as we don't expect adoption of this component yet - however happy to add an entry if it makes sense to do so.
1 parent 1231f7e commit c1cc7ee

File tree

7 files changed

+598
-0
lines changed

7 files changed

+598
-0
lines changed

docs/sources/reference/compatibility/_index.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@ The following components, grouped by namespace, _export_ Targets.
4646

4747
{{< collapse title="database_observability" >}}
4848
- [database_observability.mysql](../components/database_observability/database_observability.mysql)
49+
- [database_observability.postgres](../components/database_observability/database_observability.postgres)
4950
{{< /collapse >}}
5051

5152
{{< collapse title="discovery" >}}
@@ -245,6 +246,7 @@ The following components, grouped by namespace, _consume_ Loki `LogsReceiver`.
245246

246247
{{< collapse title="database_observability" >}}
247248
- [database_observability.mysql](../components/database_observability/database_observability.mysql)
249+
- [database_observability.postgres](../components/database_observability/database_observability.postgres)
248250
{{< /collapse >}}
249251

250252
{{< collapse title="faro" >}}
Lines changed: 99 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,99 @@
1+
---
2+
canonical: https://grafana.com/docs/alloy/latest/reference/components/database_observability.postgres/
3+
description: Learn about database_observability.postgres
4+
title: database_observability.postgres
5+
labels:
6+
stage: experimental
7+
products:
8+
- oss
9+
---
10+
11+
# `database_observability.postgres`
12+
13+
{{< docs/shared lookup="stability/experimental.md" source="alloy" version="<ALLOY_VERSION>" >}}
14+
15+
## Usage
16+
17+
```alloy
18+
database_observability.postgres "<LABEL>" {
19+
data_source_name = <DATA_SOURCE_NAME>
20+
forward_to = [<LOKI_RECEIVERS>]
21+
}
22+
```
23+
24+
## Arguments
25+
26+
You can use the following arguments with `database_observability.postgres`:
27+
28+
| Name | Type | Description | Default | Required |
29+
|------------------------------------|----------------------|------------------------------------------------------------------------------------------------|---------|----------|
30+
| `data_source_name` | `secret` | [Data Source Name][] for the Postgres server to connect to. | | yes |
31+
| `forward_to` | `list(LogsReceiver)` | Where to forward log entries after processing. | | yes |
32+
33+
## Blocks
34+
35+
The `database_observability.postgres` component doesn't support any blocks. You can configure this component with arguments.
36+
37+
## Example
38+
39+
```alloy
40+
database_observability.postgres "orders_db" {
41+
data_source_name = "postgres://user:pass@localhost:5432/mydb"
42+
forward_to = [loki.write.logs_service.receiver]
43+
}
44+
45+
prometheus.scrape "orders_db" {
46+
targets = database_observability.postgres.orders_db.targets
47+
honor_labels = true // required to keep job and instance labels
48+
forward_to = [prometheus.remote_write.metrics_service.receiver]
49+
}
50+
51+
prometheus.remote_write "metrics_service" {
52+
endpoint {
53+
url = sys.env("<GRAFANA_CLOUD_HOSTED_METRICS_URL>")
54+
basic_auth {
55+
username = sys.env("<GRAFANA_CLOUD_HOSTED_METRICS_ID>")
56+
password = sys.env("<GRAFANA_CLOUD_RW_API_KEY>")
57+
}
58+
}
59+
}
60+
61+
loki.write "logs_service" {
62+
endpoint {
63+
url = sys.env("<GRAFANA_CLOUD_HOSTED_LOGS_URL>")
64+
basic_auth {
65+
username = sys.env("<GRAFANA_CLOUD_HOSTED_LOGS_ID>")
66+
password = sys.env("<GRAFANA_CLOUD_RW_API_KEY>")
67+
}
68+
}
69+
}
70+
```
71+
72+
Replace the following:
73+
74+
* _`<GRAFANA_CLOUD_HOSTED_METRICS_URL>`_: The URL for your Grafana Cloud hosted metrics.
75+
* _`<GRAFANA_CLOUD_HOSTED_METRICS_ID>`_: The user ID for your Grafana Cloud hosted metrics.
76+
* _`<GRAFANA_CLOUD_RW_API_KEY>`_: Your Grafana Cloud API key.
77+
* _`<GRAFANA_CLOUD_HOSTED_LOGS_URL>`_: The URL for your Grafana Cloud hosted logs.
78+
* _`<GRAFANA_CLOUD_HOSTED_LOGS_ID>`_: The user ID for your Grafana Cloud hosted logs.
79+
80+
[Data Source Name]: https://pkg.go.dev/github.com/lib/pq#hdr-Connection_String_Parameters
81+
82+
<!-- START GENERATED COMPATIBLE COMPONENTS -->
83+
84+
## Compatible components
85+
86+
`database_observability.postgres` can accept arguments from the following components:
87+
88+
- Components that export [Loki `LogsReceiver`](../../../compatibility/#loki-logsreceiver-exporters)
89+
90+
`database_observability.postgres` has exports that can be consumed by the following components:
91+
92+
- Components that consume [Targets](../../../compatibility/#targets-consumers)
93+
94+
{{< admonition type="note" >}}
95+
Connecting some components may not be sensible or components may require further configuration to make the connection work correctly.
96+
Refer to the linked documentation for more details.
97+
{{< /admonition >}}
98+
99+
<!-- END GENERATED COMPATIBLE COMPONENTS -->

internal/component/all/all.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ package all
44
import (
55
_ "github.com/grafana/alloy/internal/component/beyla/ebpf" // Import beyla.ebpf
66
_ "github.com/grafana/alloy/internal/component/database_observability/mysql" // Import database_observability.mysql
7+
_ "github.com/grafana/alloy/internal/component/database_observability/postgres" // Import database_observability.postgres
78
_ "github.com/grafana/alloy/internal/component/discovery/aws" // Import discovery.aws.ec2 and discovery.aws.lightsail
89
_ "github.com/grafana/alloy/internal/component/discovery/azure" // Import discovery.azure
910
_ "github.com/grafana/alloy/internal/component/discovery/consul" // Import discovery.consul
Lines changed: 95 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,95 @@
1+
package collector
2+
3+
import (
4+
"context"
5+
"regexp"
6+
"strings"
7+
8+
"github.com/prometheus/client_golang/prometheus"
9+
"go.uber.org/atomic"
10+
)
11+
12+
const ConnectionInfoName = "connection_info"
13+
14+
var (
15+
rdsRegex = regexp.MustCompile(`(?P<identifier>[^\.]+)\.([^\.]+)\.(?P<region>[^\.]+)\.rds\.amazonaws\.com`)
16+
azureRegex = regexp.MustCompile(`(?P<identifier>[^\.]+)\.postgres\.database\.azure\.com`)
17+
)
18+
19+
type ConnectionInfoArguments struct {
20+
DSN string
21+
Registry *prometheus.Registry
22+
}
23+
24+
type ConnectionInfo struct {
25+
DSN string
26+
Registry *prometheus.Registry
27+
InfoMetric *prometheus.GaugeVec
28+
29+
running *atomic.Bool
30+
}
31+
32+
func NewConnectionInfo(args ConnectionInfoArguments) (*ConnectionInfo, error) {
33+
infoMetric := prometheus.NewGaugeVec(prometheus.GaugeOpts{
34+
Namespace: "database_observability",
35+
Name: "connection_info",
36+
Help: "Information about the connection",
37+
}, []string{"provider_name", "provider_region", "db_instance_identifier", "engine"})
38+
39+
args.Registry.MustRegister(infoMetric)
40+
41+
return &ConnectionInfo{
42+
DSN: args.DSN,
43+
Registry: args.Registry,
44+
InfoMetric: infoMetric,
45+
running: &atomic.Bool{},
46+
}, nil
47+
}
48+
49+
func (c *ConnectionInfo) Name() string {
50+
return ConnectionInfoName
51+
}
52+
53+
func (c *ConnectionInfo) Start(ctx context.Context) error {
54+
c.running.Store(true)
55+
56+
var (
57+
providerName = "unknown"
58+
providerRegion = "unknown"
59+
dbInstanceIdentifier = "unknown"
60+
engine = "postgres"
61+
)
62+
63+
parts, err := ParseURL(c.DSN)
64+
if err != nil {
65+
return err
66+
}
67+
68+
if host, ok := parts["host"]; ok {
69+
if strings.HasSuffix(host, "rds.amazonaws.com") {
70+
providerName = "aws"
71+
matches := rdsRegex.FindStringSubmatch(host)
72+
if len(matches) > 3 {
73+
dbInstanceIdentifier = matches[1]
74+
providerRegion = matches[3]
75+
}
76+
} else if strings.HasSuffix(host, "postgres.database.azure.com") {
77+
providerName = "azure"
78+
matches := azureRegex.FindStringSubmatch(host)
79+
if len(matches) > 1 {
80+
dbInstanceIdentifier = matches[1]
81+
}
82+
}
83+
}
84+
c.InfoMetric.WithLabelValues(providerName, providerRegion, dbInstanceIdentifier, engine).Set(1)
85+
return nil
86+
}
87+
88+
func (c *ConnectionInfo) Stopped() bool {
89+
return !c.running.Load()
90+
}
91+
92+
func (c *ConnectionInfo) Stop() {
93+
c.Registry.Unregister(c.InfoMetric)
94+
c.running.Store(false)
95+
}
Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
package collector
2+
3+
import (
4+
"fmt"
5+
"strings"
6+
"testing"
7+
8+
"github.com/prometheus/client_golang/prometheus"
9+
"github.com/prometheus/client_golang/prometheus/testutil"
10+
"github.com/stretchr/testify/require"
11+
"go.uber.org/goleak"
12+
)
13+
14+
func TestConnectionInfo(t *testing.T) {
15+
defer goleak.VerifyNone(t)
16+
17+
const baseExpectedMetrics = `
18+
# HELP database_observability_connection_info Information about the connection
19+
# TYPE database_observability_connection_info gauge
20+
database_observability_connection_info{db_instance_identifier="%s",engine="%s",provider_name="%s",provider_region="%s"} 1
21+
`
22+
23+
testCases := []struct {
24+
name string
25+
dsn string
26+
expectedMetrics string
27+
}{
28+
{
29+
name: "generic dsn",
30+
dsn: "postgres://user:pass@localhost:5432/mydb",
31+
expectedMetrics: fmt.Sprintf(baseExpectedMetrics, "unknown", "postgres", "unknown", "unknown"),
32+
},
33+
{
34+
name: "AWS/RDS dsn",
35+
dsn: "postgres://user:[email protected]:5432/mydb",
36+
expectedMetrics: fmt.Sprintf(baseExpectedMetrics, "products-db", "postgres", "aws", "us-east-1"),
37+
},
38+
{
39+
name: "Azure flexibleservers dsn",
40+
dsn: "postgres://user:[email protected]:5432/mydb",
41+
expectedMetrics: fmt.Sprintf(baseExpectedMetrics, "products-db", "postgres", "azure", "unknown"),
42+
},
43+
}
44+
45+
for _, tc := range testCases {
46+
reg := prometheus.NewRegistry()
47+
48+
collector, err := NewConnectionInfo(ConnectionInfoArguments{
49+
DSN: tc.dsn,
50+
Registry: reg,
51+
})
52+
require.NoError(t, err)
53+
require.NotNil(t, collector)
54+
55+
err = collector.Start(t.Context())
56+
require.NoError(t, err)
57+
58+
err = testutil.GatherAndCompare(reg, strings.NewReader(tc.expectedMetrics))
59+
require.NoError(t, err)
60+
}
61+
}
Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
package collector
2+
3+
import (
4+
"fmt"
5+
"strings"
6+
7+
"github.com/lib/pq"
8+
)
9+
10+
func ParseURL(url string) (map[string]string, error) {
11+
if url == "postgresql://" || url == "postgres://" {
12+
return map[string]string{}, nil
13+
}
14+
15+
raw, err := pq.ParseURL(url)
16+
if err != nil {
17+
return nil, err
18+
}
19+
20+
res := map[string]string{}
21+
22+
unescaper := strings.NewReplacer(`\'`, `'`, `\\`, `\`)
23+
24+
for keypair := range strings.SplitSeq(raw, " ") {
25+
parts := strings.SplitN(keypair, "=", 2)
26+
if len(parts) != 2 {
27+
return nil, fmt.Errorf("unexpected keypair %s from pq", keypair)
28+
}
29+
30+
key := parts[0]
31+
value := parts[1]
32+
33+
// Undo all the transformations ParseURL did: remove wrapping
34+
// quotes and then unescape the escaped characters.
35+
value = strings.TrimPrefix(value, "'")
36+
value = strings.TrimSuffix(value, "'")
37+
value = unescaper.Replace(value)
38+
39+
res[key] = value
40+
}
41+
42+
return res, nil
43+
}

0 commit comments

Comments
 (0)