Compare commits
165 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
6a3947bc17 | ||
|
d0b11b1688 | ||
|
4a05471da0 | ||
|
8ce5bcd22e | ||
|
965313c62b | ||
|
4e88ca340c | ||
|
369d3e117c | ||
|
a8f6abee84 | ||
|
f427012e6f | ||
|
bb457e1ba9 | ||
|
7ab7436c85 | ||
|
b046db6911 | ||
|
88f7c47884 | ||
|
d6fed41e8d | ||
|
65443abea0 | ||
|
c8306988e6 | ||
|
d800360ea2 | ||
|
b5dec11f7c | ||
|
f2a75fdaba | ||
|
94d5de9c97 | ||
|
bac0119f1b | ||
|
b5299ebcd3 | ||
|
57765c6cfd | ||
|
bb16957745 | ||
|
4db0f92816 | ||
|
0853cb83de | ||
|
5cc6b7a0b9 | ||
|
719da070ee | ||
|
3951704b92 | ||
|
dff2773826 | ||
|
f72c4a3518 | ||
|
bd78a05f37 | ||
|
498ce65af5 | ||
|
98847ba0cb | ||
|
1b600447a8 | ||
|
b21c47c8de | ||
|
852d2d7a6d | ||
|
aa3fe6dd2c | ||
|
85c5d19d4f | ||
|
d6fa635377 | ||
|
07092b51bc | ||
|
130db6c0f4 | ||
|
c272856047 | ||
|
6fe7856808 | ||
|
01491ec0b4 | ||
|
f59eb1fe94 | ||
|
585a4a6b49 | ||
|
6c5c7ea14f | ||
|
e6c4b42bfc | ||
|
5bd48aa9ac | ||
|
e6537752d3 | ||
|
1c9375d797 | ||
|
0d14d22cf6 | ||
|
b79bbcb160 | ||
|
4c6cc454bb | ||
|
bb16436c95 | ||
|
1d05b4605a | ||
|
871dcd6673 | ||
|
0432c83899 | ||
|
254a0c9bf2 | ||
|
eb4cc3159a | ||
|
35240d08c6 | ||
|
23e5539688 | ||
|
cd8fdfa610 | ||
|
7e92e99be3 | ||
|
985e8ad180 | ||
|
5cac4ee02b | ||
|
6a4f9935e6 | ||
|
c131ca1a0b | ||
|
fd0700ed6f | ||
|
ad8a55815c | ||
|
ca3fe5d6a7 | ||
|
4e7330b4f3 | ||
|
be5568d530 | ||
|
084f595a86 | ||
|
108504b9f9 | ||
|
085640170d | ||
|
3c13d05414 | ||
|
88b31ece52 | ||
|
d0bed6e6ce | ||
|
775b423152 | ||
|
65f5edc83b | ||
|
ac5b8b8287 | ||
|
d0779320e3 | ||
|
8d02af4b78 | ||
|
86ce9b2681 | ||
|
7675418a2a | ||
|
07b8b7c993 | ||
|
f644edc500 | ||
|
3d588023b0 | ||
|
ec963a67db | ||
|
17a9cc67f2 | ||
|
4c1909a659 | ||
|
0449e563d8 | ||
|
e8509f74d5 | ||
|
ddb01405df | ||
|
ff00fc5677 | ||
|
4f88a05f4d | ||
|
c32cb948d3 | ||
|
9c96ccd888 | ||
|
a3c276a3e0 | ||
|
e2b6272c71 | ||
|
0873b84e68 | ||
|
9a0b5b5d7d | ||
|
71469cf4c9 | ||
|
dc9baafbdb | ||
|
555eec64af | ||
|
44a67c1f89 | ||
|
56a8fd65cc | ||
|
dd6bf20f8d | ||
|
758bfa4c5e | ||
|
4455ade49e | ||
|
e8c2a9c7fd | ||
|
1ea457d9f1 | ||
|
e9c57c811f | ||
|
9f3b8d6399 | ||
|
da3913a25e | ||
|
4084504c80 | ||
|
cf90b3e903 | ||
|
16c6eabe13 | ||
|
d768d3edac | ||
|
1535c5f7e9 | ||
|
bf95c539ce | ||
|
cbdc1f5d67 | ||
|
97dbe0c4c8 | ||
|
022e8a2fd5 | ||
|
6e3a433c82 | ||
|
50f155917a | ||
|
e30d98f0cf | ||
|
9955f3feeb | ||
|
b47769817e | ||
|
9c8093e630 | ||
|
d335a22666 | ||
|
ee8faf706d | ||
|
b5b5148dec | ||
|
27a6b9806d | ||
|
a04d065423 | ||
|
8b4b14b7a9 | ||
|
11e1508a16 | ||
|
a6da9c1b0a | ||
|
c66d1973e4 | ||
|
4a87daf97f | ||
|
91562472ec | ||
|
8a67a2d7be | ||
|
ac6cae3aa6 | ||
|
4db8affe13 | ||
|
0328bacd49 | ||
|
4db3f39baa | ||
|
4193bcb107 | ||
|
2b7efe725f | ||
|
afc402f3cc | ||
|
0c589507f0 | ||
|
76c4f1fb6f | ||
|
3a5c2e235c | ||
|
f649350867 | ||
|
6a45a4d961 | ||
|
b10634d666 | ||
|
a8c3c11fd2 | ||
|
83c3d857cc | ||
|
0648f8e095 | ||
|
9da371065d | ||
|
9622f5745b | ||
|
8c21b29bfd | ||
|
59f3141f2a | ||
|
f97a4a7bc8 |
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
@ -29,7 +29,7 @@ jobs:
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: '1.18.x'
|
||||
go-version: '1.21.x'
|
||||
|
||||
- name: Setup NodeJS
|
||||
uses: actions/setup-node@v3
|
||||
|
@ -20,9 +20,6 @@ builds:
|
||||
- -X "github.com/alice-lg/alice-lg/pkg/config.Version={{ .Version }}"
|
||||
archives:
|
||||
- name_template: 'alice-lg_{{ .Version }}_{{ .Os }}_{{ .Arch }}'
|
||||
replacements:
|
||||
386: i386
|
||||
amd64: x86_64
|
||||
|
||||
checksum:
|
||||
name_template: 'checksums.txt'
|
||||
|
44
CHANGELOG.md
44
CHANGELOG.md
@ -1,6 +1,50 @@
|
||||
|
||||
# Changelog
|
||||
|
||||
## 6.1.0 (2024-02-12)
|
||||
|
||||
* Added memory pools for deduplicating route information.
|
||||
This drastically reduces the memory consumption.
|
||||
|
||||
* Single table birdwatcher source is now using stream
|
||||
response parsing. This also reduces the memory consumption.
|
||||
However, as there are now waiting times, CPU load can get
|
||||
high. You can introduce a delay while parsing with the
|
||||
`stream_parser_throttle` parameter in the config.
|
||||
|
||||
* Improved search performance and timeout handling.
|
||||
|
||||
* The BGP info modal can now be dismissed by pressing `esc`.
|
||||
|
||||
* Global search now supports querying for bgp communities.
|
||||
Please set the `routes_store_query_limit` config variable.
|
||||
Some communities might match a large number of routes.
|
||||
|
||||
* Examples for the global search can be added using the
|
||||
theme's `Alice.updateContent` API:
|
||||
`{lookup: {examples: [["asn", "AS2342"], ...]}}`. Valid types
|
||||
are: `asn`, `community` `prefix` and `q`.
|
||||
|
||||
* Performance in search has been improved.
|
||||
You can now set the `prefix_lookup_community_filter_cutoff`
|
||||
config variable to prevent timeouts with large result sets.
|
||||
|
||||
* The configuration now supports defining variables like
|
||||
`$ASN01 = 65535` which can be used for expressivly describing
|
||||
communities. For now see `pkg/config/testdata/alice.conf` for
|
||||
usage.
|
||||
|
||||
* Bugfixes:
|
||||
- Fixed parsing and handling of ext community filters.
|
||||
- Fixed stylesheet compatibility: For route flags, new SVG icons
|
||||
are now wrapped in an `<i>` tag, to ensure backward compatiblity.
|
||||
- Fixed trying to decode an 'undefined' value for a query filter.
|
||||
- Spelling fixes
|
||||
|
||||
* Deprecations:
|
||||
- The `/api/v1/routeservers/<rs>/neighbors/<id>/routes` endpoint
|
||||
is removed.
|
||||
|
||||
|
||||
## 6.0.0 (2022-11-10)
|
||||
|
||||
|
@ -20,7 +20,7 @@ ADD ui/ .
|
||||
RUN yarn build
|
||||
|
||||
# Build the backend
|
||||
FROM golang:1.18 AS backend
|
||||
FROM golang:1.21 AS backend
|
||||
|
||||
# Install dependencies
|
||||
WORKDIR /src/alice-lg
|
||||
|
15
README.md
15
README.md
@ -10,6 +10,8 @@ Take a look at Alice-LG production examples at:
|
||||
- https://lg.netnod.se/
|
||||
- https://alice-rs.linx.net/
|
||||
- https://lg.ix.br/
|
||||
- https://lg.ix.asn.au/
|
||||
- https://lg.ix.nz/
|
||||
|
||||
And checkout the API at:
|
||||
- https://lg.de-cix.net/api/v1/config
|
||||
@ -39,6 +41,7 @@ Alice-LG is a BGP looking glass which gets its data from external APIs.
|
||||
Currently Alice-LG supports the following APIs:
|
||||
- [birdwatcher API](https://github.com/alice-lg/birdwatcher) for [BIRD](http://bird.network.cz/)
|
||||
- [GoBGP](https://osrg.github.io/gobgp/)
|
||||
- [bgplgd](https://man.openbsd.org/bgplgd) or [`openbgpd-state-server`](https://github.com/alice-lg/openbgpd-state-server) for [OpenBGP](https://www.openbgpd.org/)
|
||||
|
||||
### Birdwatcher
|
||||
Normally you would first install the [birdwatcher API](https://github.com/alice-lg/birdwatcher) directly on the machine(s) where you run [BIRD](http://bird.network.cz/) on
|
||||
@ -50,11 +53,12 @@ just prior to [RIPE73](https://ripe73.ripe.net/) in Madrid, Spain.
|
||||
Major thanks to Barry O'Donovan who built the original [INEX Bird's Eye](https://github.com/inex/birdseye) BIRD API of which Alice-LG is a spinnoff
|
||||
|
||||
### GoBGP
|
||||
Alice-LG supports direct integration with GoBGP instances using gRPC. See the configuration section for more detail.
|
||||
Alice-LG supports direct integration with GoBGP instances using gRPC.
|
||||
See the configuration section for more detail.
|
||||
|
||||
### OpenBGPD
|
||||
|
||||
Alice-LG supports OpenBGP via [`bgplgd`](https://github.com/cjeker/bgplgd)
|
||||
Alice-LG supports OpenBGP via [`bgplgd`](https://man.openbsd.org/bgplgd)
|
||||
and [`openbgpd-state-server`](https://github.com/alice-lg/openbgpd-state-server).
|
||||
|
||||
## Building Alice-LG from scratch
|
||||
@ -133,6 +137,13 @@ host = rs2.example.com:50051
|
||||
# ProcessingTimeout is a timeout in seconds configured per gRPC call to a given GoBGP daemon
|
||||
processing_timeout = 300
|
||||
```
|
||||
Configure TLS with:
|
||||
```ini
|
||||
tls_crt = /path/to/cert
|
||||
tls_common_name = "common name"
|
||||
```
|
||||
|
||||
You can disable TLS with `insecure = true`.
|
||||
|
||||
[OpenBGPD](https://www.openbgpd.org/) via `openbgpd-state-server`:
|
||||
```ini
|
||||
|
@ -1,5 +1,5 @@
|
||||
|
||||
FROM golang:1.18
|
||||
FROM golang:1.21
|
||||
|
||||
|
||||
WORKDIR /src/alice-lg
|
||||
|
@ -11,14 +11,22 @@ listen_http = 127.0.0.1:7340
|
||||
# enable the prefix-lookup endpoint / the global search feature
|
||||
enable_prefix_lookup = true
|
||||
|
||||
# Prefix lookup community filter cutoff defines an upper limit
|
||||
# of returned routes for which the community filters list is
|
||||
# available. If the number of routes exceeds this limit, the
|
||||
# communities filters become available if there is a specific
|
||||
# route server selected. Default: 100000.
|
||||
prefix_lookup_community_filter_cutoff = 100000
|
||||
|
||||
# Try to refresh the neighbor status on every request to /neighbors
|
||||
enable_neighbors_status_refresh = false
|
||||
|
||||
# this ASN is used as a fallback value in the RPKI feature and for route
|
||||
# filtering evaluation with large BGP communities
|
||||
asn = 9033
|
||||
# This default ASN is used as a fallback value in the RPKI feature.
|
||||
# Setting it is optional.
|
||||
asn = 9999
|
||||
|
||||
store_backend = postgres
|
||||
# Use an alternative store backend. The default is `memory`.
|
||||
# store_backend = postgres
|
||||
|
||||
# how many route servers will be refreshed at the same time
|
||||
# if set to 0 (or for the matter of fact 1), refresh will be
|
||||
@ -32,10 +40,28 @@ neighbors_store_refresh_parallelism = 10000
|
||||
routes_store_refresh_interval = 5
|
||||
neighbors_store_refresh_interval = 5
|
||||
|
||||
[postgres]
|
||||
url = "postgres://postgres:postgres@localhost:5432/alice"
|
||||
min_connections = 2
|
||||
max_connections = 128
|
||||
# Maximum number of routes returned from the store in a prefix
|
||||
# search, to avoid timeouts with too big result sets.
|
||||
# This is important when querying BGP communities, as some might
|
||||
# match a large number of routes. (Default: 200000)
|
||||
routes_store_query_limit = 200000
|
||||
|
||||
# Add a delay to the stream parser in order to reduce
|
||||
# CPU load while ingesting routes. Route refreshs will take
|
||||
# a bit longer. The value is in nanoseconds.
|
||||
# A value of 10000 will keep the cpu load at roughly 70% and
|
||||
# parsing a master4 table will take about 2.5 instead of 1.25 minutes.
|
||||
stream_parser_throttle = 10000
|
||||
|
||||
# [postgres]
|
||||
# url = "postgres://postgres:postgres@localhost:5432/alice"
|
||||
|
||||
# As an alternative to the url, you can use the environment variables
|
||||
# from libpq to configure the postgres connection:
|
||||
# https://www.postgresql.org/docs/current/libpq-envars.html
|
||||
|
||||
# min_connections = 2
|
||||
# max_connections = 128
|
||||
|
||||
[housekeeping]
|
||||
# Interval for the housekeeping routine in minutes
|
||||
@ -78,6 +104,7 @@ routes_not_exported_page_size = 250
|
||||
[rejection_candidates]
|
||||
communities = 6695:1102:14, 6695:1102:15, 23:42:46
|
||||
|
||||
|
||||
[noexport]
|
||||
load_on_demand = true # Default: false
|
||||
|
||||
@ -94,6 +121,11 @@ load_on_demand = true # Default: false
|
||||
|
||||
23:46:1 = Some other made up reason
|
||||
|
||||
[blackhole_communities]
|
||||
65535:666
|
||||
12345:1105-1189:*
|
||||
12345:1111:10-90
|
||||
rt:1234:4200000000-4200010000
|
||||
|
||||
[rpki]
|
||||
# shows rpki validation status in the client, based on the presence of a large
|
||||
@ -116,7 +148,7 @@ invalid = 23042:1000:4-*
|
||||
0:* = do not redistribute to AS$1
|
||||
|
||||
#
|
||||
# Define columns for neighbours and routes table,
|
||||
# Define columns for neighbors and routes table,
|
||||
# with <key> = <Table Header>
|
||||
#
|
||||
# and <key> := <object.path> Implicitly referencing the object,
|
||||
@ -124,19 +156,19 @@ invalid = 23042:1000:4-*
|
||||
# |= <Widget> A widget with special rendering features,
|
||||
# to which the object is applied. E.g.
|
||||
# Uptime, which will be rendered as
|
||||
# Uptime(neighbour).
|
||||
# Uptime(neighbor).
|
||||
#
|
||||
# As per convention: Widgets are in Uppercase, object properties are
|
||||
# in lowercase.
|
||||
#
|
||||
# Available Widgets for Neighbours:
|
||||
# Available Widgets for Neighbors:
|
||||
#
|
||||
# Uptime Displays the relative uptime of this neighbour
|
||||
# Description The neighbour's description with link to routes page
|
||||
# Uptime Displays the relative uptime of this neighbor
|
||||
# Description The neighbor's description with link to routes page
|
||||
#
|
||||
|
||||
[neighbours_columns]
|
||||
address = Neighbour
|
||||
[neighbors_columns]
|
||||
address = Neighbor
|
||||
asn = ASN
|
||||
state = State
|
||||
Uptime = Uptime
|
||||
@ -159,8 +191,8 @@ bgp.as_path = AS Path
|
||||
flags =
|
||||
network = Network
|
||||
gateway = Gateway
|
||||
neighbour.asn = ASN
|
||||
neighbour.description = Description
|
||||
neighbor.asn = ASN
|
||||
neighbor.description = Description
|
||||
bgp.as_path = AS Path
|
||||
routeserver.name = RS
|
||||
|
||||
@ -176,9 +208,13 @@ blackholes = 10.23.6.666, 10.23.6.665
|
||||
[source.rs0-example-v4.birdwatcher]
|
||||
api = http://rs1.example.com:29184/
|
||||
# single_table / multi_table
|
||||
## Note: arouteserver generates single_table configurations.
|
||||
## if you set multi_table on a single table BIRD configuration, it will look
|
||||
## like all routes are filtered
|
||||
type = multi_table
|
||||
|
||||
main_table = master4 # default is master in bird1x
|
||||
main_table = master4 # default is "master" in bird1x
|
||||
# In type=single_table, peer_table_prefix and pipe_protocol_prefix are not used
|
||||
peer_table_prefix = T
|
||||
pipe_protocol_prefix = M
|
||||
# Timeout in seconds to wait for the status data (only required if enable_neighbors_status_refresh is true)
|
||||
@ -187,6 +223,10 @@ neighbors_refresh_timeout = 2
|
||||
# Optional:
|
||||
show_last_reboot = true
|
||||
|
||||
servertime = 2006-01-02T15:04:05Z07:00
|
||||
servertime_short = 2006-01-02 15:04:05
|
||||
servertime_ext = 2006-01-02 15:04:05
|
||||
|
||||
[source.rs1-example-v6]
|
||||
name = rs1.example.com (IPv6)
|
||||
[source.rs1-example-v6.birdwatcher]
|
||||
@ -224,6 +264,11 @@ servertime_ext = Mon, 02 Jan 2006 15:04:05 -0700
|
||||
# configured per gRPC call to a given GoBGP daemon.
|
||||
# Default: 300
|
||||
# processing_timeout = 300
|
||||
# TLS:
|
||||
# tls_crt = /path/to/cert
|
||||
# tls_common_name = "common name"
|
||||
# Disable TLS:
|
||||
# insecure = true
|
||||
|
||||
# [source.rs0-example]
|
||||
# name = rs-example.openbgpd-state-server
|
||||
|
38
go.mod
38
go.mod
@ -1,18 +1,34 @@
|
||||
module github.com/alice-lg/alice-lg
|
||||
|
||||
go 1.16
|
||||
go 1.20
|
||||
|
||||
require (
|
||||
github.com/go-ini/ini v1.62.0
|
||||
github.com/golang/protobuf v1.5.1
|
||||
github.com/jackc/pgx/v4 v4.14.1
|
||||
github.com/go-ini/ini v1.67.0
|
||||
github.com/golang/protobuf v1.5.3
|
||||
github.com/jackc/pgx/v4 v4.18.1
|
||||
github.com/julienschmidt/httprouter v1.3.0
|
||||
github.com/osrg/gobgp v0.0.0-20190502094614-fd6618fed499
|
||||
github.com/sirupsen/logrus v1.8.1
|
||||
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a // indirect
|
||||
github.com/stretchr/testify v1.7.0
|
||||
golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4 // indirect
|
||||
google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6 // indirect
|
||||
google.golang.org/grpc v1.36.0
|
||||
gopkg.in/ini.v1 v1.42.0 // indirect
|
||||
github.com/sirupsen/logrus v1.9.3
|
||||
github.com/stretchr/testify v1.8.4
|
||||
google.golang.org/grpc v1.60.1
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/jackc/chunkreader/v2 v2.0.1 // indirect
|
||||
github.com/jackc/pgconn v1.14.1 // indirect
|
||||
github.com/jackc/pgio v1.0.0 // indirect
|
||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
github.com/jackc/pgproto3/v2 v2.3.2 // indirect
|
||||
github.com/jackc/pgservicefile v0.0.0-20231201235250-de7065d80cb9 // indirect
|
||||
github.com/jackc/pgtype v1.14.0 // indirect
|
||||
github.com/jackc/puddle v1.3.0 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
golang.org/x/crypto v0.18.0 // indirect
|
||||
golang.org/x/net v0.20.0 // indirect
|
||||
golang.org/x/sys v0.16.0 // indirect
|
||||
golang.org/x/text v0.14.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240108191215-35c7eff3a6b1 // indirect
|
||||
google.golang.org/protobuf v1.32.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
|
167
go.sum
167
go.sum
@ -1,11 +1,7 @@
|
||||
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
github.com/BurntSushi/toml v0.3.0/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs=
|
||||
github.com/armon/go-radix v0.0.0-20170727155443-1fca145dffbc/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
|
||||
github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I=
|
||||
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
|
||||
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
|
||||
@ -17,47 +13,23 @@ github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs
|
||||
github.com/dgryski/go-farm v0.0.0-20171119141306-ac7624ea8da3/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
|
||||
github.com/eapache/channels v1.1.0/go.mod h1:jMm2qB5Ubtg9zLd+inMZd2/NUvXgzmWXsDaLyQIGfH0=
|
||||
github.com/eapache/queue v1.0.2/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
|
||||
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||
github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
|
||||
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
||||
github.com/fsnotify/fsnotify v1.4.2/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||
github.com/go-ini/ini v1.62.0 h1:7VJT/ZXjzqSrvtraFp4ONq80hTcRQth1c9ZnQ3uNQvU=
|
||||
github.com/go-ini/ini v1.62.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
|
||||
github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A=
|
||||
github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
|
||||
github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
|
||||
github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
github.com/gofrs/uuid v4.0.0+incompatible h1:1SD/1F5pU8p29ybwgQSwpQk+mwdRrXCYuPhW6m+TnJw=
|
||||
github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/protobuf v1.0.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
|
||||
github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
|
||||
github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
|
||||
github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
|
||||
github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
|
||||
github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
|
||||
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
|
||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||
github.com/golang/protobuf v1.5.1 h1:jAbXjIeW2ZSW2AwFxlGTDoc2CjI2XujLkV3ArsZFCvc=
|
||||
github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM=
|
||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=
|
||||
github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
|
||||
github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/hashicorp/hcl v0.0.0-20170509225359-392dba7d905e/go.mod h1:oZtUIOe8dh44I2q6ScRibXws4Ajl+d+nod3AaR9vL5w=
|
||||
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
||||
github.com/jackc/chunkreader v1.0.0 h1:4s39bBR8ByfqH+DKm8rQA3E1LHZWB9XWcrz8fqaZbe0=
|
||||
github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo=
|
||||
github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk=
|
||||
github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8=
|
||||
@ -68,8 +40,9 @@ github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsU
|
||||
github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o=
|
||||
github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY=
|
||||
github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI=
|
||||
github.com/jackc/pgconn v1.10.1 h1:DzdIHIjG1AxGwoEEqS+mGsURyjt4enSmqzACXvVzOT8=
|
||||
github.com/jackc/pgconn v1.10.1/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI=
|
||||
github.com/jackc/pgconn v1.14.0/go.mod h1:9mBNlny0UvkgJdCDvdVHYSjI+8tD2rnKK69Wz8ti++E=
|
||||
github.com/jackc/pgconn v1.14.1 h1:smbxIaZA08n6YuxEX1sDyjV/qkbtUtkH20qLkR9MUR4=
|
||||
github.com/jackc/pgconn v1.14.1/go.mod h1:9mBNlny0UvkgJdCDvdVHYSjI+8tD2rnKK69Wz8ti++E=
|
||||
github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE=
|
||||
github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8=
|
||||
github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE=
|
||||
@ -78,7 +51,6 @@ github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65 h1:DadwsjnMwFjfWc9y5W
|
||||
github.com/jackc/pgmock v0.0.0-20210724152146-4ad1a8207f65/go.mod h1:5R2h2EEX+qri8jOWMbJCtaPWkrrNc7OHwsp2TCqp7ak=
|
||||
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
||||
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
|
||||
github.com/jackc/pgproto3 v1.1.0 h1:FYYE4yRw+AgI8wXIinMlNjBbp/UitDJwfj5LqqewP1A=
|
||||
github.com/jackc/pgproto3 v1.1.0/go.mod h1:eR5FA3leWg7p9aeAqi37XOTgTIbkABlvcPB3E5rlc78=
|
||||
github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod h1:bhq50y+xrl9n5mRYyCBFKkpRVTLYJVWeCc+mEAI3yXA=
|
||||
github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg=
|
||||
@ -86,30 +58,30 @@ github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvW
|
||||
github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM=
|
||||
github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
|
||||
github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
|
||||
github.com/jackc/pgproto3/v2 v2.2.0 h1:r7JypeP2D3onoQTCxWdTpCtJ4D+qpKr0TxvoyMhZ5ns=
|
||||
github.com/jackc/pgproto3/v2 v2.2.0/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
|
||||
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b h1:C8S2+VttkHFdOOCXJe+YGfa4vHYwlt4Zx+IVXQ97jYg=
|
||||
github.com/jackc/pgproto3/v2 v2.3.2 h1:7eY55bdBeCz1F2fTzSz69QC+pG46jYq9/jtSPiJ5nn0=
|
||||
github.com/jackc/pgproto3/v2 v2.3.2/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
|
||||
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E=
|
||||
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
|
||||
github.com/jackc/pgservicefile v0.0.0-20231201235250-de7065d80cb9 h1:L0QtFUgDarD7Fpv9jeVMgy/+Ec0mtnmYuImjTz6dtDA=
|
||||
github.com/jackc/pgservicefile v0.0.0-20231201235250-de7065d80cb9/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
|
||||
github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg=
|
||||
github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc=
|
||||
github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw=
|
||||
github.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM=
|
||||
github.com/jackc/pgtype v1.9.1 h1:MJc2s0MFS8C3ok1wQTdQxWuXQcB6+HwAm5x1CzW7mf0=
|
||||
github.com/jackc/pgtype v1.9.1/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4=
|
||||
github.com/jackc/pgtype v1.14.0 h1:y+xUdabmyMkJLyApYuPj38mW+aAIqCe5uuBB51rH3Vw=
|
||||
github.com/jackc/pgtype v1.14.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4=
|
||||
github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y=
|
||||
github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM=
|
||||
github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc=
|
||||
github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs=
|
||||
github.com/jackc/pgx/v4 v4.14.1 h1:71oo1KAGI6mXhLiTMn6iDFcp3e7+zon/capWjl2OEFU=
|
||||
github.com/jackc/pgx/v4 v4.14.1/go.mod h1:RgDuE4Z34o7XE92RpLsvFiOEfrAUT0Xt2KxvX73W06M=
|
||||
github.com/jackc/pgx/v4 v4.18.1 h1:YP7G1KABtKpB5IHrO9vYwSrCOhs7p3uqhvhhQBptya0=
|
||||
github.com/jackc/pgx/v4 v4.18.1/go.mod h1:FydWkUyadDmdNH/mHnGob881GawxeEm7TcMCzkb+qQE=
|
||||
github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||
github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||
github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||
github.com/jackc/puddle v1.2.0 h1:DNDKdn/pDrWvDWyT2FYvpZVE81OAhWrjCv19I9n108Q=
|
||||
github.com/jackc/puddle v1.2.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||
github.com/jackc/puddle v1.3.0 h1:eHK/5clGOatcjX3oWGBO/MpxpbHzSwud5EWTSCI+MX0=
|
||||
github.com/jackc/puddle v1.3.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||
github.com/jessevdk/go-flags v1.3.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
|
||||
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
|
||||
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||
github.com/julienschmidt/httprouter v1.3.0 h1:U0609e9tgbseu3rBINet9P48AI/D3oJs4dN7jwJOQ1U=
|
||||
github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
@ -143,7 +115,6 @@ github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
|
||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
|
||||
github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU=
|
||||
@ -156,12 +127,8 @@ github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFR
|
||||
github.com/sirupsen/logrus v0.0.0-20170713114250-a3f95b5c4235/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc=
|
||||
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
|
||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||
github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE=
|
||||
github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a h1:pa8hGb/2YqsZKovtsgrwcDH1RZhVbTKCjLp47XpqCDs=
|
||||
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
|
||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||
github.com/spf13/afero v0.0.0-20170217164146-9be650865eab/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
|
||||
github.com/spf13/cast v1.1.0/go.mod h1:r2rcYCSwa1IExKTDiTfzaxqT2FNHs8hODu4LnUfgKEg=
|
||||
github.com/spf13/cobra v0.0.0-20170731170427-b26b538f6930/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
|
||||
@ -171,15 +138,22 @@ github.com/spf13/viper v1.0.0/go.mod h1:A8kyI5cUJhb8N+3pkfONlcEcZbueH6nhAm0Fq7Sr
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/testify v1.1.4/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/vishvananda/netlink v0.0.0-20170802012344-a95659537721/go.mod h1:+SR5DhBJrl6ZM7CoCKvpw5BKroDKQ+PJqOg65H/2ktk=
|
||||
github.com/vishvananda/netns v0.0.0-20170707011535-86bef332bfc3/go.mod h1:ZjcWmFBXmLKZu9Nxj3WKYEafiSqer2rnvPr0en9UNpI=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
|
||||
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
||||
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
||||
@ -200,31 +174,27 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
|
||||
golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97 h1:/UOmuWzQfxxo9UtlXMwuQU8CMgg1eZXqTRwkSQJWKOI=
|
||||
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
|
||||
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58=
|
||||
golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc=
|
||||
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
|
||||
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
|
||||
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4 h1:b0LrWgu8+q7z4J+0Y3Umo5q1dL7NXBkKBWkaVkAq17E=
|
||||
golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo=
|
||||
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
|
||||
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
@ -237,75 +207,60 @@ golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1 h1:SrN+KX8Art/Sf4HNj6Zcz06G7VEz+7w9tdXTPOZ7+l4=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU=
|
||||
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
||||
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||
golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/genproto v0.0.0-20170731182057-09f6ed296fc6/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
|
||||
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
|
||||
google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6 h1:4Xw2NwItrJOFR5s6PnK98PI6Bgw1LhMP1j/rO5WP0S4=
|
||||
google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240108191215-35c7eff3a6b1 h1:gphdwh0npgs8elJ4T6J+DQJHPVF7RsuJHCfwztUb4J4=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240108191215-35c7eff3a6b1/go.mod h1:daQN87bsDqDoe316QbbvX60nMoJQa4r6Ds0ZuoAe5yA=
|
||||
google.golang.org/grpc v1.5.1/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
|
||||
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
|
||||
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
||||
google.golang.org/grpc v1.36.0 h1:o1bcQ6imQMIOpdrO3SWf2z5RV72WbDwdXuK0MDlc8As=
|
||||
google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
|
||||
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
|
||||
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
|
||||
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
|
||||
google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
|
||||
google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
|
||||
google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
|
||||
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
|
||||
google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
|
||||
google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=
|
||||
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
|
||||
google.golang.org/grpc v1.60.1 h1:26+wFr+cNqSGFcOXcabYC0lUVJVRa2Sb2ortSK7VrEU=
|
||||
google.golang.org/grpc v1.60.1/go.mod h1:OlCHIeLYqSSsLi6i49B5QGdzaMZK9+M7LXN2FKz4eGM=
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk=
|
||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I=
|
||||
google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||
gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s=
|
||||
gopkg.in/ini.v1 v1.42.0 h1:7N3gPTt50s8GuLortA00n8AqRTk75qOP98+mTPpgzRk=
|
||||
gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||
gopkg.in/yaml.v2 v2.0.0-20170721122051-25c4ec802a7d/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
|
||||
|
@ -173,3 +173,33 @@ func (c BGPCommunityMap) Communities() Communities {
|
||||
}
|
||||
return communities
|
||||
}
|
||||
|
||||
// BGPCommunity types: Standard, Extended and Large
|
||||
const (
|
||||
BGPCommunityTypeStd = iota
|
||||
BGPCommunityTypeExt
|
||||
BGPCommunityTypeLarge
|
||||
)
|
||||
|
||||
// BGPCommunityRange is a list of tuples with the start and end
|
||||
// of the range defining a community.
|
||||
type BGPCommunityRange []interface{}
|
||||
|
||||
// Type classifies the BGP Ranged BGP Community into: std, large, ext
|
||||
func (c BGPCommunityRange) Type() int {
|
||||
if len(c) == 2 {
|
||||
return BGPCommunityTypeStd
|
||||
}
|
||||
if _, ok := c[0].([]string); ok {
|
||||
return BGPCommunityTypeExt
|
||||
}
|
||||
return BGPCommunityTypeLarge
|
||||
}
|
||||
|
||||
// A BGPCommunitiesSet is a set of communities, large and extended.
|
||||
// The communities are described as ranges.
|
||||
type BGPCommunitiesSet struct {
|
||||
Standard []BGPCommunityRange `json:"standard"`
|
||||
Extended []BGPCommunityRange `json:"extended"`
|
||||
Large []BGPCommunityRange `json:"large"`
|
||||
}
|
||||
|
7
pkg/api/errors.go
Normal file
7
pkg/api/errors.go
Normal file
@ -0,0 +1,7 @@
|
||||
package api
|
||||
|
||||
import "errors"
|
||||
|
||||
// ErrTooManyRoutes is returned when the result set
|
||||
// of a route query exceeds the maximum allowed number of routes.
|
||||
var ErrTooManyRoutes = errors.New("too many routes")
|
@ -1,7 +1,7 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
@ -27,8 +27,6 @@ type CacheableResponse interface {
|
||||
|
||||
// ConfigResponse is a response with client runtime configuration
|
||||
type ConfigResponse struct {
|
||||
Asn int `json:"asn"`
|
||||
|
||||
RejectReasons map[string]interface{} `json:"reject_reasons"`
|
||||
|
||||
Noexport Noexport `json:"noexport"`
|
||||
@ -38,7 +36,8 @@ type ConfigResponse struct {
|
||||
|
||||
Rpki Rpki `json:"rpki"`
|
||||
|
||||
BGPCommunities map[string]interface{} `json:"bgp_communities"`
|
||||
BGPCommunities map[string]interface{} `json:"bgp_communities"`
|
||||
BGPBlackholeCommunities BGPCommunitiesSet `json:"bgp_blackhole_communities"`
|
||||
|
||||
NeighborsColumns map[string]string `json:"neighbors_columns"`
|
||||
NeighborsColumnsOrder []string `json:"neighbors_columns_order"`
|
||||
@ -65,11 +64,11 @@ type RejectCandidates struct {
|
||||
|
||||
// Rpki is the validation status of a prefix
|
||||
type Rpki struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
Valid []string `json:"valid"`
|
||||
Unknown []string `json:"unknown"`
|
||||
NotChecked []string `json:"not_checked"`
|
||||
Invalid []string `json:"invalid"`
|
||||
Enabled bool `json:"enabled"`
|
||||
Valid [][]string `json:"valid"`
|
||||
Unknown [][]string `json:"unknown"`
|
||||
NotChecked [][]string `json:"not_checked"`
|
||||
Invalid [][]string `json:"invalid"`
|
||||
}
|
||||
|
||||
// Meta contains response meta information
|
||||
@ -158,24 +157,37 @@ type RouteServersResponse struct {
|
||||
RouteServers RouteServers `json:"routeservers"`
|
||||
}
|
||||
|
||||
// A LookupRouteServer is a shorter representation of the
|
||||
// route server data source.
|
||||
type LookupRouteServer struct {
|
||||
ID *string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
}
|
||||
|
||||
// Community is a BGP community
|
||||
type Community []int
|
||||
|
||||
func (com Community) String() string {
|
||||
res := ""
|
||||
if len(com) < 1 {
|
||||
return ""
|
||||
}
|
||||
for _, v := range com {
|
||||
res += fmt.Sprintf(":%d", v)
|
||||
s := ""
|
||||
for i, v := range com {
|
||||
if i > 0 {
|
||||
s += ":"
|
||||
}
|
||||
s += strconv.Itoa(v)
|
||||
}
|
||||
return res[1:]
|
||||
return s
|
||||
}
|
||||
|
||||
// Communities is a collection of bgp communities
|
||||
type Communities []Community
|
||||
|
||||
// Unique deduplicates communities
|
||||
// Unique deduplicates communities.
|
||||
/*
|
||||
We can skip this. Worst case is, that the
|
||||
cardinality is off.
|
||||
func (communities Communities) Unique() Communities {
|
||||
seen := map[string]bool{}
|
||||
result := make(Communities, 0, len(communities))
|
||||
@ -191,25 +203,34 @@ func (communities Communities) Unique() Communities {
|
||||
|
||||
return result
|
||||
}
|
||||
*/
|
||||
|
||||
// ExtCommunity is a BGP extended community
|
||||
type ExtCommunity []interface{}
|
||||
|
||||
func (com ExtCommunity) String() string {
|
||||
res := ""
|
||||
if len(com) < 1 {
|
||||
return ""
|
||||
}
|
||||
for _, v := range com {
|
||||
res += fmt.Sprintf(":%v", v)
|
||||
res := ""
|
||||
for i, v := range com {
|
||||
if i == 0 {
|
||||
res += v.(string)
|
||||
continue
|
||||
}
|
||||
if i > 0 {
|
||||
res += ":"
|
||||
}
|
||||
res += strconv.Itoa(v.(int))
|
||||
}
|
||||
return res[1:]
|
||||
return res
|
||||
}
|
||||
|
||||
// ExtCommunities is a collection of extended bgp communities.
|
||||
type ExtCommunities []ExtCommunity
|
||||
|
||||
// Unique deduplicates extended communities.
|
||||
/*
|
||||
func (communities ExtCommunities) Unique() ExtCommunities {
|
||||
seen := map[string]bool{}
|
||||
result := make(ExtCommunities, 0, len(communities))
|
||||
@ -225,12 +246,13 @@ func (communities ExtCommunities) Unique() ExtCommunities {
|
||||
|
||||
return result
|
||||
}
|
||||
*/
|
||||
|
||||
// BGPInfo is a set of BGP attributes
|
||||
type BGPInfo struct {
|
||||
Origin string `json:"origin"`
|
||||
Origin *string `json:"origin"`
|
||||
AsPath []int `json:"as_path"`
|
||||
NextHop string `json:"next_hop"`
|
||||
NextHop *string `json:"next_hop"`
|
||||
Communities Communities `json:"communities"`
|
||||
LargeCommunities Communities `json:"large_communities"`
|
||||
ExtCommunities ExtCommunities `json:"ext_communities"`
|
||||
|
@ -8,18 +8,18 @@ import (
|
||||
|
||||
// Route is a prefix with BGP information.
|
||||
type Route struct {
|
||||
ID string `json:"id"`
|
||||
NeighborID string `json:"neighbor_id"`
|
||||
// ID string `json:"id"`
|
||||
NeighborID *string `json:"neighbor_id"`
|
||||
|
||||
Network string `json:"network"`
|
||||
Interface string `json:"interface"`
|
||||
Gateway string `json:"gateway"`
|
||||
Interface *string `json:"interface"`
|
||||
Gateway *string `json:"gateway"`
|
||||
Metric int `json:"metric"`
|
||||
BGP *BGPInfo `json:"bgp"`
|
||||
Age time.Duration `json:"age"`
|
||||
Type []string `json:"type"` // [BGP, unicast, univ]
|
||||
Primary bool `json:"primary"`
|
||||
LearntFrom string `json:"learnt_from"`
|
||||
LearntFrom *string `json:"learnt_from"`
|
||||
|
||||
Details *json.RawMessage `json:"details"`
|
||||
}
|
||||
@ -72,12 +72,12 @@ func (routes Routes) Swap(i, j int) {
|
||||
// ToLookupRoutes prepares routes for lookup
|
||||
func (routes Routes) ToLookupRoutes(
|
||||
state string,
|
||||
rs *RouteServer,
|
||||
rs *LookupRouteServer,
|
||||
neighbors map[string]*Neighbor,
|
||||
) LookupRoutes {
|
||||
lookupRoutes := make(LookupRoutes, 0, len(routes))
|
||||
for _, route := range routes {
|
||||
neighbor, ok := neighbors[route.NeighborID]
|
||||
neighbor, ok := neighbors[*route.NeighborID]
|
||||
if !ok {
|
||||
log.Println("prepare route, neighbor not found:", route.NeighborID)
|
||||
continue
|
||||
@ -129,8 +129,9 @@ type PaginatedResponse struct {
|
||||
|
||||
// FilteredResponse includes filters applied and available
|
||||
type FilteredResponse struct {
|
||||
FiltersAvailable *SearchFilters `json:"filters_available"`
|
||||
FiltersApplied *SearchFilters `json:"filters_applied"`
|
||||
FiltersAvailable *SearchFilters `json:"filters_available"`
|
||||
FiltersApplied *SearchFilters `json:"filters_applied"`
|
||||
FiltersNotAvailable []string `json:"filters_not_available"`
|
||||
}
|
||||
|
||||
const (
|
||||
@ -142,6 +143,13 @@ const (
|
||||
RouteStateImported = "imported"
|
||||
)
|
||||
|
||||
// NeighborQuery is used in finding routes by neighbors.
|
||||
// Source and Neighbor IDs are pointers to string pools.
|
||||
type NeighborQuery struct {
|
||||
NeighborID *string
|
||||
SourceID *string
|
||||
}
|
||||
|
||||
// LookupRoute is a route with additional
|
||||
// neighbor and state information
|
||||
type LookupRoute struct {
|
||||
@ -149,13 +157,13 @@ type LookupRoute struct {
|
||||
|
||||
State string `json:"state"` // Filtered, Imported, ...
|
||||
|
||||
Neighbor *Neighbor `json:"neighbor"`
|
||||
RouteServer *RouteServer `json:"routeserver"`
|
||||
Neighbor *Neighbor `json:"neighbor"`
|
||||
RouteServer *LookupRouteServer `json:"routeserver"`
|
||||
}
|
||||
|
||||
// MatchSourceID implements filterable interface for lookup routes
|
||||
func (r *LookupRoute) MatchSourceID(id string) bool {
|
||||
return r.RouteServer.ID == id
|
||||
return *r.RouteServer.ID == id
|
||||
}
|
||||
|
||||
// MatchASN matches the neighbor's ASN
|
||||
@ -178,6 +186,17 @@ func (r *LookupRoute) MatchLargeCommunity(community Community) bool {
|
||||
return r.Route.BGP.HasLargeCommunity(community)
|
||||
}
|
||||
|
||||
// MatchNeighborQuery matches a neighbor query
|
||||
func (r *LookupRoute) MatchNeighborQuery(query *NeighborQuery) bool {
|
||||
if r.RouteServer.ID != query.SourceID {
|
||||
return false
|
||||
}
|
||||
if r.NeighborID != query.NeighborID {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// LookupRoutes is a collection of lookup routes.
|
||||
type LookupRoutes []*LookupRoute
|
||||
|
||||
|
@ -74,7 +74,7 @@ func TestCommunityStringify(t *testing.T) {
|
||||
t.Error("Expected 23:42, got:", com.String())
|
||||
}
|
||||
|
||||
extCom := ExtCommunity{"ro", "42", "123"}
|
||||
extCom := ExtCommunity{"ro", 42, 123}
|
||||
if extCom.String() != "ro:42:123" {
|
||||
t.Error("Expected ro:42:123, but got:", extCom.String())
|
||||
}
|
||||
@ -134,6 +134,7 @@ func TestHasCommunity(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
func TestUniqueCommunities(t *testing.T) {
|
||||
all := Communities{Community{23, 42}, Community{42, 123}, Community{23, 42}}
|
||||
unique := all.Unique()
|
||||
@ -154,3 +155,4 @@ func TestUniqueExtCommunities(t *testing.T) {
|
||||
}
|
||||
t.Log("All:", all, "Unique:", unique)
|
||||
}
|
||||
*/
|
||||
|
@ -5,6 +5,7 @@ import (
|
||||
"log"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// SearchKeys are filterable attributes
|
||||
@ -49,7 +50,32 @@ func searchFilterCmpInt(a FilterValue, b FilterValue) bool {
|
||||
|
||||
// Compare strings
|
||||
func searchFilterCmpString(a FilterValue, b FilterValue) bool {
|
||||
return a.(string) == b.(string)
|
||||
var (
|
||||
valA string
|
||||
valB string
|
||||
)
|
||||
_, ptrA := a.(*string)
|
||||
_, ptrB := b.(*string)
|
||||
|
||||
// Compare pointers, this is ok because we can assume
|
||||
// using pool values for both.
|
||||
if ptrA && ptrB {
|
||||
return a == b
|
||||
}
|
||||
|
||||
// Otherwise fall back to string compare
|
||||
if ptrA {
|
||||
valA = *a.(*string)
|
||||
} else {
|
||||
valA = a.(string)
|
||||
}
|
||||
if ptrB {
|
||||
valB = *b.(*string)
|
||||
} else {
|
||||
valB = b.(string)
|
||||
}
|
||||
|
||||
return valA == valB
|
||||
}
|
||||
|
||||
// Compare communities
|
||||
@ -96,6 +122,8 @@ func (f *SearchFilter) Equal(other *SearchFilter) bool {
|
||||
cmp = searchFilterCmpInt
|
||||
case string:
|
||||
cmp = searchFilterCmpString
|
||||
case *string:
|
||||
cmp = searchFilterCmpString
|
||||
}
|
||||
|
||||
if cmp == nil {
|
||||
@ -131,14 +159,29 @@ func (g *SearchFilterGroup) Contains(filter *SearchFilter) bool {
|
||||
return g.FindFilter(filter) != nil
|
||||
}
|
||||
|
||||
// filterValueAsString gets the string representation
|
||||
// from a filter value
|
||||
func filterValueAsString(value interface{}) string {
|
||||
switch v := value.(type) {
|
||||
case int:
|
||||
return strconv.Itoa(v)
|
||||
case *string:
|
||||
return *v
|
||||
case string:
|
||||
return v
|
||||
case Community:
|
||||
return v.String()
|
||||
case ExtCommunity:
|
||||
return v.String()
|
||||
}
|
||||
panic("unexpected filter value: " + fmt.Sprintf("%v", value))
|
||||
}
|
||||
|
||||
// GetFilterByValue retrieves a filter by matching
|
||||
// a string representation of it's filter value.
|
||||
func (g *SearchFilterGroup) GetFilterByValue(value interface{}) *SearchFilter {
|
||||
// I've tried it with .(fmt.Stringer), but int does not implement this...
|
||||
// So whatever. I'm using the trick of letting Sprintf choose the right
|
||||
// conversion. If this is too expensive, we need to refactor this.
|
||||
// TODO: profile this.
|
||||
idx, ok := g.filtersIdx[fmt.Sprintf("%v", value)]
|
||||
ref := filterValueAsString(value)
|
||||
idx, ok := g.filtersIdx[ref]
|
||||
if !ok {
|
||||
return nil // We don't have this particular filter
|
||||
}
|
||||
@ -158,7 +201,8 @@ func (g *SearchFilterGroup) AddFilter(filter *SearchFilter) {
|
||||
idx := len(g.Filters)
|
||||
filter.Cardinality = 1
|
||||
g.Filters = append(g.Filters, filter)
|
||||
g.filtersIdx[fmt.Sprintf("%v", filter.Value)] = idx
|
||||
ref := filterValueAsString(filter.Value)
|
||||
g.filtersIdx[ref] = idx
|
||||
}
|
||||
|
||||
// AddFilters adds a list of filters to a group.
|
||||
@ -172,7 +216,8 @@ func (g *SearchFilterGroup) AddFilters(filters []*SearchFilter) {
|
||||
func (g *SearchFilterGroup) rebuildIndex() {
|
||||
idx := make(map[string]int)
|
||||
for i, filter := range g.Filters {
|
||||
idx[fmt.Sprintf("%v", filter.Value)] = i
|
||||
ref := filterValueAsString(filter.Value)
|
||||
idx[ref] = i
|
||||
}
|
||||
g.filtersIdx = idx // replace index
|
||||
}
|
||||
@ -347,6 +392,50 @@ func (s *SearchFilters) GetGroupByKey(key string) *SearchFilterGroup {
|
||||
return nil
|
||||
}
|
||||
|
||||
// UpdateSourcesFromLookupRoute updates the source filter
|
||||
func (s *SearchFilters) UpdateSourcesFromLookupRoute(r *LookupRoute) {
|
||||
// Add source
|
||||
s.GetGroupByKey(SearchKeySources).AddFilter(&SearchFilter{
|
||||
Name: r.RouteServer.Name,
|
||||
Value: r.RouteServer.ID,
|
||||
})
|
||||
}
|
||||
|
||||
// UpdateASNSFromLookupRoute updates the ASN filter
|
||||
func (s *SearchFilters) UpdateASNSFromLookupRoute(r *LookupRoute) {
|
||||
// Add ASN from neighbor
|
||||
s.GetGroupByKey(SearchKeyASNS).AddFilter(&SearchFilter{
|
||||
Name: r.Neighbor.Description,
|
||||
Value: r.Neighbor.ASN,
|
||||
})
|
||||
}
|
||||
|
||||
// UpdateCommunitiesFromLookupRoute updates the communities filter
|
||||
func (s *SearchFilters) UpdateCommunitiesFromLookupRoute(r *LookupRoute) {
|
||||
// Add communities
|
||||
communities := s.GetGroupByKey(SearchKeyCommunities)
|
||||
for _, c := range r.Route.BGP.Communities {
|
||||
communities.AddFilter(&SearchFilter{
|
||||
Name: c.String(),
|
||||
Value: c,
|
||||
})
|
||||
}
|
||||
extCommunities := s.GetGroupByKey(SearchKeyExtCommunities)
|
||||
for _, c := range r.Route.BGP.ExtCommunities {
|
||||
extCommunities.AddFilter(&SearchFilter{
|
||||
Name: c.String(),
|
||||
Value: c,
|
||||
})
|
||||
}
|
||||
largeCommunities := s.GetGroupByKey(SearchKeyLargeCommunities)
|
||||
for _, c := range r.Route.BGP.LargeCommunities {
|
||||
largeCommunities.AddFilter(&SearchFilter{
|
||||
Name: c.String(),
|
||||
Value: c,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// UpdateFromLookupRoute updates a filter
|
||||
// and its counters.
|
||||
//
|
||||
@ -354,40 +443,9 @@ func (s *SearchFilters) GetGroupByKey(key string) *SearchFilterGroup {
|
||||
// - Extract ASN, source, bgp communities,
|
||||
// - Find Filter in group, increment result count if required.
|
||||
func (s *SearchFilters) UpdateFromLookupRoute(r *LookupRoute) {
|
||||
// Add source
|
||||
s.GetGroupByKey(SearchKeySources).AddFilter(&SearchFilter{
|
||||
Name: r.RouteServer.Name,
|
||||
Value: r.RouteServer.ID,
|
||||
})
|
||||
|
||||
// Add ASN from neighbor
|
||||
s.GetGroupByKey(SearchKeyASNS).AddFilter(&SearchFilter{
|
||||
Name: r.Neighbor.Description,
|
||||
Value: r.Neighbor.ASN,
|
||||
})
|
||||
|
||||
// Add communities
|
||||
communities := s.GetGroupByKey(SearchKeyCommunities)
|
||||
for _, c := range r.Route.BGP.Communities.Unique() {
|
||||
communities.AddFilter(&SearchFilter{
|
||||
Name: c.String(),
|
||||
Value: c,
|
||||
})
|
||||
}
|
||||
extCommunities := s.GetGroupByKey(SearchKeyCommunities)
|
||||
for _, c := range r.Route.BGP.ExtCommunities.Unique() {
|
||||
extCommunities.AddFilter(&SearchFilter{
|
||||
Name: c.String(),
|
||||
Value: c,
|
||||
})
|
||||
}
|
||||
largeCommunities := s.GetGroupByKey(SearchKeyLargeCommunities)
|
||||
for _, c := range r.Route.BGP.LargeCommunities.Unique() {
|
||||
largeCommunities.AddFilter(&SearchFilter{
|
||||
Name: c.String(),
|
||||
Value: c,
|
||||
})
|
||||
}
|
||||
s.UpdateSourcesFromLookupRoute(r)
|
||||
s.UpdateASNSFromLookupRoute(r)
|
||||
s.UpdateCommunitiesFromLookupRoute(r)
|
||||
}
|
||||
|
||||
// UpdateFromRoute updates a search filter, however as
|
||||
@ -398,21 +456,21 @@ func (s *SearchFilters) UpdateFromRoute(r *Route) {
|
||||
|
||||
// Add communities
|
||||
communities := s.GetGroupByKey(SearchKeyCommunities)
|
||||
for _, c := range r.BGP.Communities.Unique() {
|
||||
for _, c := range r.BGP.Communities {
|
||||
communities.AddFilter(&SearchFilter{
|
||||
Name: c.String(),
|
||||
Value: c,
|
||||
})
|
||||
}
|
||||
extCommunities := s.GetGroupByKey(SearchKeyExtCommunities)
|
||||
for _, c := range r.BGP.ExtCommunities.Unique() {
|
||||
for _, c := range r.BGP.ExtCommunities {
|
||||
extCommunities.AddFilter(&SearchFilter{
|
||||
Name: c.String(),
|
||||
Value: c,
|
||||
})
|
||||
}
|
||||
largeCommunities := s.GetGroupByKey(SearchKeyLargeCommunities)
|
||||
for _, c := range r.BGP.LargeCommunities.Unique() {
|
||||
for _, c := range r.BGP.LargeCommunities {
|
||||
largeCommunities.AddFilter(&SearchFilter{
|
||||
Name: c.String(),
|
||||
Value: c,
|
||||
@ -479,6 +537,59 @@ func FiltersFromQuery(query url.Values) (*SearchFilters, error) {
|
||||
return queryFilters, nil
|
||||
}
|
||||
|
||||
// parseCommunityFilterText creates FilterValue from the
|
||||
// text input which may be a api.Community or api.ExtCommunity.
|
||||
func parseCommunityFilterText(text string) (string, *SearchFilter, error) {
|
||||
tokens := strings.Split(text, ":")
|
||||
if len(tokens) < 2 {
|
||||
return "", nil, fmt.Errorf("BGP community incomplete")
|
||||
}
|
||||
|
||||
// Check if we are dealing with an ext. community
|
||||
maybeExt := false
|
||||
_, err := strconv.Atoi(tokens[0])
|
||||
if err != nil {
|
||||
maybeExt = true
|
||||
}
|
||||
|
||||
// Parse filter value
|
||||
if maybeExt {
|
||||
filter, err := parseExtCommunityValue(text)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
return SearchKeyExtCommunities, filter, nil
|
||||
}
|
||||
|
||||
filter, err := parseCommunityValue(text)
|
||||
if err != nil {
|
||||
return "", nil, fmt.Errorf("BGP community incomplete")
|
||||
}
|
||||
|
||||
if len(tokens) == 2 {
|
||||
return SearchKeyCommunities, filter, nil
|
||||
}
|
||||
|
||||
return SearchKeyLargeCommunities, filter, nil
|
||||
}
|
||||
|
||||
// FiltersFromTokens parses the passed list of filters
|
||||
// extracted from the query string and creates the filter.
|
||||
func FiltersFromTokens(tokens []string) (*SearchFilters, error) {
|
||||
queryFilters := NewSearchFilters()
|
||||
for _, value := range tokens {
|
||||
if strings.HasPrefix(value, "#") { // Community query
|
||||
key, filter, err := parseCommunityFilterText(value[1:])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
queryFilters.GetGroupByKey(key).AddFilter(filter)
|
||||
}
|
||||
|
||||
}
|
||||
return queryFilters, nil
|
||||
}
|
||||
|
||||
// MatchRoute checks if a route matches all filters.
|
||||
// Unless all filters are blank.
|
||||
func (s *SearchFilters) MatchRoute(r Filterable) bool {
|
||||
@ -510,6 +621,31 @@ func (s *SearchFilters) MatchRoute(r Filterable) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
// Combine two search filters
|
||||
func (s *SearchFilters) Combine(other *SearchFilters) *SearchFilters {
|
||||
result := make(SearchFilters, len(*s))
|
||||
for id, group := range *s {
|
||||
otherGroup := (*other)[id]
|
||||
combined := &SearchFilterGroup{
|
||||
Key: group.Key,
|
||||
Filters: []*SearchFilter{},
|
||||
}
|
||||
for _, f := range group.Filters {
|
||||
combined.Filters = append(combined.Filters, f)
|
||||
}
|
||||
for _, f := range otherGroup.Filters {
|
||||
if combined.Contains(f) {
|
||||
continue
|
||||
}
|
||||
combined.Filters = append(combined.Filters, f)
|
||||
}
|
||||
combined.rebuildIndex()
|
||||
result[id] = combined
|
||||
}
|
||||
|
||||
return &result
|
||||
}
|
||||
|
||||
// Sub makes a diff of two search filters
|
||||
func (s *SearchFilters) Sub(other *SearchFilters) *SearchFilters {
|
||||
result := make(SearchFilters, len(*s))
|
||||
@ -552,6 +688,13 @@ func (s *SearchFilters) MergeProperties(other *SearchFilters) {
|
||||
}
|
||||
}
|
||||
|
||||
// HasGroup checks if a group with a given key exists
|
||||
// and filters are present.
|
||||
func (s *SearchFilters) HasGroup(key string) bool {
|
||||
group := s.GetGroupByKey(key)
|
||||
return len(group.Filters) > 0
|
||||
}
|
||||
|
||||
// A NeighborFilter includes only a name and ASN.
|
||||
// We are using a slightly simpler solution for
|
||||
// neighbor queries.
|
||||
|
@ -1,10 +1,16 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Errors
|
||||
var (
|
||||
ErrExtCommunityIncomplete = errors.New("incomplete extended community")
|
||||
)
|
||||
|
||||
// FilterQueryParser parses a filter value into a search filter
|
||||
type FilterQueryParser func(value string) (*SearchFilter, error)
|
||||
|
||||
@ -62,10 +68,19 @@ func parseExtCommunityValue(value string) (*SearchFilter, error) {
|
||||
components := strings.Split(value, ":")
|
||||
community := make(ExtCommunity, len(components))
|
||||
|
||||
for i, c := range components {
|
||||
community[i] = c
|
||||
if len(community) != 3 {
|
||||
return nil, ErrExtCommunityIncomplete
|
||||
}
|
||||
|
||||
// Check if the community is incomplete
|
||||
if components[0] == "" || components[1] == "" || components[2] == "" {
|
||||
return nil, ErrExtCommunityIncomplete
|
||||
}
|
||||
// TODO: Mixing strings and integers is not a good idea
|
||||
community[0] = components[0]
|
||||
community[1], _ = strconv.Atoi(components[1])
|
||||
community[2], _ = strconv.Atoi(components[2])
|
||||
|
||||
return &SearchFilter{
|
||||
Name: community.String(),
|
||||
Value: community,
|
||||
|
@ -59,3 +59,25 @@ func TestParseExtCommunityValue(t *testing.T) {
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestPartialParseExtCommunityValue(t *testing.T) {
|
||||
filter, err := parseExtCommunityValue("rt:23")
|
||||
if err == nil {
|
||||
t.Error("Expected error, result:", filter)
|
||||
}
|
||||
|
||||
filter, err = parseExtCommunityValue("rt:23:")
|
||||
if err == nil {
|
||||
t.Error("Expected error, result:", filter)
|
||||
}
|
||||
|
||||
filter, err = parseExtCommunityValue("rt::")
|
||||
if err == nil {
|
||||
t.Error("Expected error, result:", filter)
|
||||
}
|
||||
|
||||
filter, err = parseExtCommunityValue("::")
|
||||
if err == nil {
|
||||
t.Error("Expected error, result:", filter)
|
||||
}
|
||||
}
|
||||
|
@ -5,6 +5,10 @@ import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
var (
|
||||
testRsID string = "3"
|
||||
)
|
||||
|
||||
func makeTestRoute() *Route {
|
||||
route := &Route{
|
||||
BGP: &BGPInfo{
|
||||
@ -13,7 +17,7 @@ func makeTestRoute() *Route {
|
||||
{111, 11},
|
||||
},
|
||||
ExtCommunities: []ExtCommunity{
|
||||
{"ro", "23", "123"},
|
||||
{"ro", 23, 123},
|
||||
},
|
||||
LargeCommunities: []Community{
|
||||
{1000, 23, 42},
|
||||
@ -33,7 +37,7 @@ func makeTestLookupRoute() *LookupRoute {
|
||||
{111, 11},
|
||||
},
|
||||
ExtCommunities: []ExtCommunity{
|
||||
{"ro", "23", "123"},
|
||||
{"ro", 23, 123},
|
||||
},
|
||||
LargeCommunities: []Community{
|
||||
{1000, 23, 42},
|
||||
@ -44,8 +48,8 @@ func makeTestLookupRoute() *LookupRoute {
|
||||
ASN: 23042,
|
||||
Description: "Security Solutions Ltd.",
|
||||
},
|
||||
RouteServer: &RouteServer{
|
||||
ID: "3",
|
||||
RouteServer: &LookupRouteServer{
|
||||
ID: &testRsID,
|
||||
Name: "test.rs.ixp",
|
||||
},
|
||||
}
|
||||
@ -295,10 +299,10 @@ func TestSearchFilterCompareRoute(t *testing.T) {
|
||||
}
|
||||
|
||||
// Ext. Communities
|
||||
if searchFilterMatchExtCommunity(route, ExtCommunity{"ro", "23", "123"}) != true {
|
||||
if searchFilterMatchExtCommunity(route, ExtCommunity{"ro", 23, 123}) != true {
|
||||
t.Error("Route should have community ro:23:123")
|
||||
}
|
||||
if searchFilterMatchExtCommunity(route, ExtCommunity{"rt", "42", "111"}) == true {
|
||||
if searchFilterMatchExtCommunity(route, ExtCommunity{"rt", 42, 111}) == true {
|
||||
t.Error("Route should not have community rt:42:111")
|
||||
}
|
||||
|
||||
@ -656,3 +660,132 @@ func TestNeighborFilterFromQuery(t *testing.T) {
|
||||
t.Error("Unexpected name:", filter.name)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSearchFiltersHasKey(t *testing.T) {
|
||||
// Sources filter present
|
||||
query := "asn=2342&sources=foo"
|
||||
values, _ := url.ParseQuery(query)
|
||||
filters, _ := FiltersFromQuery(values)
|
||||
|
||||
if !filters.HasGroup(SearchKeySources) {
|
||||
t.Error("sources should be filtered")
|
||||
}
|
||||
|
||||
// Check without sources present
|
||||
query = "asn=2342"
|
||||
values, _ = url.ParseQuery(query)
|
||||
filters, _ = FiltersFromQuery(values)
|
||||
|
||||
if filters.HasGroup(SearchKeySources) {
|
||||
t.Error("sources should not be filtered")
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseInvalidCommunityFilterText(t *testing.T) {
|
||||
_, _, err := parseCommunityFilterText("")
|
||||
if err == nil {
|
||||
t.Error("Expected error for empty filter")
|
||||
}
|
||||
t.Log(err)
|
||||
|
||||
_, _, err = parseCommunityFilterText("23452")
|
||||
if err == nil {
|
||||
t.Error("Expected error for empty filter")
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseCommunityFilterText(t *testing.T) {
|
||||
text := "12345:23"
|
||||
key, filter, err := parseCommunityFilterText(text)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if key != SearchKeyCommunities {
|
||||
t.Error("Expected key to be", SearchKeyCommunities, "but got:", key)
|
||||
}
|
||||
v := filter.Value.(Community)
|
||||
if v[0] != 12345 && v[1] != 23 {
|
||||
t.Error("Expected community to be 12345:23 but got:", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseLargeCommunityFilterText(t *testing.T) {
|
||||
text := "12345:23:42"
|
||||
key, filter, err := parseCommunityFilterText(text)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if key != SearchKeyLargeCommunities {
|
||||
t.Error("Expected key to be", SearchKeyLargeCommunities, "but got:", key)
|
||||
}
|
||||
v := filter.Value.(Community)
|
||||
if v[0] != 12345 && v[1] != 23 && v[2] != 42 {
|
||||
t.Error("Expected community to be 12345:23:42 but got:", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseExtCommunityFilterText(t *testing.T) {
|
||||
text := "ro:12345:23"
|
||||
key, filter, err := parseCommunityFilterText(text)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if key != SearchKeyExtCommunities {
|
||||
t.Error("Expected key to be", SearchKeyExtCommunities, "but got:", key)
|
||||
}
|
||||
v := filter.Value.(ExtCommunity)
|
||||
if v[0] != "ro" && v[1] != "12345" && v[2] != "23" {
|
||||
t.Error("Expected community to be ro:12345:23 but got:", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFiltersFromTokens(t *testing.T) {
|
||||
tokens := []string{"#23:42", "#ro:23:42", "#1000:23:42"}
|
||||
|
||||
filters, err := FiltersFromTokens(tokens)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Check communities
|
||||
communities := filters.GetGroupByKey(SearchKeyCommunities).Filters
|
||||
if len(communities) != 1 {
|
||||
t.Error("There should be 1 community filter")
|
||||
}
|
||||
|
||||
v0 := communities[0].Value.(Community)
|
||||
if v0[0] != 23 && v0[1] != 42 {
|
||||
t.Error("Expected community to be 23:42 but got:", v0)
|
||||
}
|
||||
|
||||
// Check ext. communities
|
||||
extCommunities := filters.GetGroupByKey(SearchKeyExtCommunities).Filters
|
||||
if len(extCommunities) != 1 {
|
||||
t.Error("There should be 1 ext. community filter")
|
||||
}
|
||||
|
||||
v1 := extCommunities[0].Value.(ExtCommunity)
|
||||
if v1[0] != "ro" && v1[1] != "23" && v1[2] != "42" {
|
||||
t.Error("Expected community to be ro:23:42 but got:", v1)
|
||||
}
|
||||
|
||||
// Check large communities
|
||||
largeCommunities := filters.GetGroupByKey(SearchKeyLargeCommunities).Filters
|
||||
if len(largeCommunities) != 1 {
|
||||
t.Error("There should be 1 large community filter")
|
||||
}
|
||||
|
||||
v2 := largeCommunities[0].Value.(Community)
|
||||
if v2[0] != 1000 && v2[1] != 23 && v2[2] != 42 {
|
||||
t.Error("Expected community to be 1000:23:42 but got:", v2)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFiltersFromTokensInvalid(t *testing.T) {
|
||||
tokens := []string{"#"}
|
||||
_, err := FiltersFromTokens(tokens)
|
||||
if err == nil {
|
||||
t.Error("Expected error for invalid filter")
|
||||
}
|
||||
t.Log(err)
|
||||
}
|
||||
|
121
pkg/config/bgp_communities.go
Normal file
121
pkg/config/bgp_communities.go
Normal file
@ -0,0 +1,121 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/alice-lg/alice-lg/pkg/api"
|
||||
"github.com/alice-lg/alice-lg/pkg/decoders"
|
||||
)
|
||||
|
||||
// ErrInvalidCommunity creates an invalid community error
|
||||
func ErrInvalidCommunity(s string) error {
|
||||
return fmt.Errorf("invalid community: %s", s)
|
||||
}
|
||||
|
||||
// Helper parse communities from a section body
|
||||
func parseAndMergeCommunities(
|
||||
communities api.BGPCommunityMap, body string,
|
||||
) api.BGPCommunityMap {
|
||||
|
||||
// Parse and merge communities
|
||||
lines := strings.Split(body, "\n")
|
||||
for _, line := range lines {
|
||||
kv := strings.SplitN(line, "=", 2)
|
||||
if len(kv) != 2 {
|
||||
log.Println("Skipping malformed BGP community:", line)
|
||||
continue
|
||||
}
|
||||
|
||||
community := strings.TrimSpace(kv[0])
|
||||
label := strings.TrimSpace(kv[1])
|
||||
communities.Set(community, label)
|
||||
}
|
||||
|
||||
return communities
|
||||
}
|
||||
|
||||
// Parse a communities set with ranged communities
|
||||
func parseRangeCommunitiesSet(body string) (*api.BGPCommunitiesSet, error) {
|
||||
comms := []api.BGPCommunityRange{}
|
||||
large := []api.BGPCommunityRange{}
|
||||
ext := []api.BGPCommunityRange{}
|
||||
|
||||
lines := strings.Split(body, "\n")
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" {
|
||||
continue // Empty
|
||||
}
|
||||
if strings.HasPrefix(line, "#") {
|
||||
continue // Comment
|
||||
}
|
||||
comm, err := parseRangeCommunity(line)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
switch comm.Type() {
|
||||
case api.BGPCommunityTypeStd:
|
||||
comms = append(comms, comm)
|
||||
case api.BGPCommunityTypeLarge:
|
||||
large = append(large, comm)
|
||||
case api.BGPCommunityTypeExt:
|
||||
ext = append(ext, comm)
|
||||
}
|
||||
}
|
||||
|
||||
set := &api.BGPCommunitiesSet{
|
||||
Standard: comms,
|
||||
Large: large,
|
||||
Extended: ext,
|
||||
}
|
||||
return set, nil
|
||||
}
|
||||
|
||||
func parseRangeCommunity(s string) (api.BGPCommunityRange, error) {
|
||||
tokens := strings.Split(s, ":")
|
||||
if len(tokens) < 2 {
|
||||
return nil, ErrInvalidCommunity(s)
|
||||
}
|
||||
|
||||
// Extract ranges and make uniform structure
|
||||
parts := make([][]string, 0, len(tokens))
|
||||
for _, t := range tokens {
|
||||
values := strings.SplitN(t, "-", 2)
|
||||
if len(values) == 0 {
|
||||
return nil, ErrInvalidCommunity(s)
|
||||
}
|
||||
if len(values) == 1 {
|
||||
parts = append(parts, []string{values[0], values[0]})
|
||||
} else {
|
||||
parts = append(parts, []string{values[0], values[1]})
|
||||
}
|
||||
}
|
||||
if len(parts) <= 1 {
|
||||
return nil, ErrInvalidCommunity(s)
|
||||
}
|
||||
|
||||
// Check if this might be an ext community
|
||||
isExt := false
|
||||
if _, err := strconv.Atoi(parts[0][0]); err != nil {
|
||||
isExt = true // At least it looks like...
|
||||
}
|
||||
|
||||
if isExt && len(parts) != 3 {
|
||||
return nil, ErrInvalidCommunity(s)
|
||||
}
|
||||
if isExt {
|
||||
return api.BGPCommunityRange{
|
||||
[]string{parts[0][0], parts[0][0]},
|
||||
decoders.IntListFromStrings(parts[1]),
|
||||
decoders.IntListFromStrings(parts[2]),
|
||||
}, nil
|
||||
}
|
||||
comm := api.BGPCommunityRange{}
|
||||
for _, p := range parts {
|
||||
comm = append(comm, decoders.IntListFromStrings(p))
|
||||
}
|
||||
return comm, nil
|
||||
}
|
@ -5,6 +5,7 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
@ -16,6 +17,7 @@ import (
|
||||
|
||||
"github.com/alice-lg/alice-lg/pkg/api"
|
||||
"github.com/alice-lg/alice-lg/pkg/decoders"
|
||||
"github.com/alice-lg/alice-lg/pkg/pools"
|
||||
"github.com/alice-lg/alice-lg/pkg/sources"
|
||||
"github.com/alice-lg/alice-lg/pkg/sources/birdwatcher"
|
||||
"github.com/alice-lg/alice-lg/pkg/sources/gobgp"
|
||||
@ -67,21 +69,33 @@ const (
|
||||
// DefaultHTTPTimeout is the time in seconds after which the
|
||||
// server will timeout.
|
||||
DefaultHTTPTimeout = 120
|
||||
|
||||
// DefaultPrefixLookupCommunityFilterCutoff is the number of
|
||||
// routes after which the community filter will not be
|
||||
// available.
|
||||
DefaultPrefixLookupCommunityFilterCutoff = 100000
|
||||
|
||||
// DefaultRoutesStoreQueryLimit is the default limit for
|
||||
// prefixes returned from the store.
|
||||
DefaultRoutesStoreQueryLimit = 200000
|
||||
)
|
||||
|
||||
// A ServerConfig holds the runtime configuration
|
||||
// for the backend.
|
||||
type ServerConfig struct {
|
||||
Listen string `ini:"listen_http"`
|
||||
HTTPTimeout int `ini:"http_timeout"`
|
||||
EnablePrefixLookup bool `ini:"enable_prefix_lookup"`
|
||||
NeighborsStoreRefreshInterval int `ini:"neighbors_store_refresh_interval"`
|
||||
NeighborsStoreRefreshParallelism int `ini:"neighbors_store_refresh_parallelism"`
|
||||
RoutesStoreRefreshInterval int `ini:"routes_store_refresh_interval"`
|
||||
RoutesStoreRefreshParallelism int `ini:"routes_store_refresh_parallelism"`
|
||||
StoreBackend string `ini:"store_backend"`
|
||||
Asn int `ini:"asn"`
|
||||
EnableNeighborsStatusRefresh bool `ini:"enable_neighbors_status_refresh"`
|
||||
Listen string `ini:"listen_http"`
|
||||
HTTPTimeout int `ini:"http_timeout"`
|
||||
EnablePrefixLookup bool `ini:"enable_prefix_lookup"`
|
||||
PrefixLookupCommunityFilterCutoff int `ini:"prefix_lookup_community_filter_cutoff"`
|
||||
NeighborsStoreRefreshInterval int `ini:"neighbors_store_refresh_interval"`
|
||||
NeighborsStoreRefreshParallelism int `ini:"neighbors_store_refresh_parallelism"`
|
||||
RoutesStoreRefreshInterval int `ini:"routes_store_refresh_interval"`
|
||||
RoutesStoreRefreshParallelism int `ini:"routes_store_refresh_parallelism"`
|
||||
RoutesStoreQueryLimit uint `ini:"routes_store_query_limit"`
|
||||
StoreBackend string `ini:"store_backend"`
|
||||
DefaultAsn int `ini:"asn"`
|
||||
EnableNeighborsStatusRefresh bool `ini:"enable_neighbors_status_refresh"`
|
||||
StreamParserThrottle int `ini:"stream_parser_throttle"`
|
||||
}
|
||||
|
||||
// PostgresConfig is the configuration for the database
|
||||
@ -124,11 +138,11 @@ type RejectCandidatesConfig struct {
|
||||
// validation state.
|
||||
type RpkiConfig struct {
|
||||
// Define communities
|
||||
Enabled bool `ini:"enabled"`
|
||||
Valid []string `ini:"valid"`
|
||||
Unknown []string `ini:"unknown"`
|
||||
NotChecked []string `ini:"not_checked"`
|
||||
Invalid []string `ini:"invalid"`
|
||||
Enabled bool `ini:"enabled"`
|
||||
Valid [][]string `ini:"valid"`
|
||||
Unknown [][]string `ini:"unknown"`
|
||||
NotChecked [][]string `ini:"not_checked"`
|
||||
Invalid [][]string `ini:"invalid"`
|
||||
}
|
||||
|
||||
// UIConfig holds runtime settings for the web client
|
||||
@ -146,8 +160,9 @@ type UIConfig struct {
|
||||
RoutesNoexports NoexportsConfig
|
||||
RoutesRejectCandidates RejectCandidatesConfig
|
||||
|
||||
BGPCommunities api.BGPCommunityMap
|
||||
Rpki RpkiConfig
|
||||
BGPCommunities api.BGPCommunityMap
|
||||
BGPBlackholeCommunities api.BGPCommunitiesSet
|
||||
Rpki RpkiConfig
|
||||
|
||||
Theme ThemeConfig
|
||||
|
||||
@ -387,28 +402,6 @@ func getLookupColumns(config *ini.File) (
|
||||
return columns, order, nil
|
||||
}
|
||||
|
||||
// Helper parse communities from a section body
|
||||
func parseAndMergeCommunities(
|
||||
communities api.BGPCommunityMap, body string,
|
||||
) api.BGPCommunityMap {
|
||||
|
||||
// Parse and merge communities
|
||||
lines := strings.Split(body, "\n")
|
||||
for _, line := range lines {
|
||||
kv := strings.SplitN(line, "=", 2)
|
||||
if len(kv) != 2 {
|
||||
log.Println("Skipping malformed BGP community:", line)
|
||||
continue
|
||||
}
|
||||
|
||||
community := strings.TrimSpace(kv[0])
|
||||
label := strings.TrimSpace(kv[1])
|
||||
communities.Set(community, label)
|
||||
}
|
||||
|
||||
return communities
|
||||
}
|
||||
|
||||
// Get UI config: BGP Communities
|
||||
func getBGPCommunityMap(config *ini.File) api.BGPCommunityMap {
|
||||
// Load defaults
|
||||
@ -459,6 +452,25 @@ func getRoutesNoexports(config *ini.File) (NoexportsConfig, error) {
|
||||
return noexportsConfig, nil
|
||||
}
|
||||
|
||||
// Get UI config: blackhole communities
|
||||
func getBlackholeCommunities(config *ini.File) (api.BGPCommunitiesSet, error) {
|
||||
section := config.Section("blackhole_communities")
|
||||
defaultBlackholes := api.BGPCommunitiesSet{
|
||||
Standard: []api.BGPCommunityRange{
|
||||
{[]interface{}{65535, 65535}, []interface{}{666, 666}},
|
||||
},
|
||||
}
|
||||
if section == nil {
|
||||
return defaultBlackholes, nil
|
||||
}
|
||||
set, err := parseRangeCommunitiesSet(section.Body())
|
||||
if err != nil {
|
||||
return defaultBlackholes, err
|
||||
}
|
||||
set.Standard = append(set.Standard, defaultBlackholes.Standard...)
|
||||
return *set, nil
|
||||
}
|
||||
|
||||
// Get UI config: Reject candidates
|
||||
func getRejectCandidatesConfig(config *ini.File) (RejectCandidatesConfig, error) {
|
||||
candidateCommunities := config.Section(
|
||||
@ -486,51 +498,89 @@ func getRpkiConfig(config *ini.File) (RpkiConfig, error) {
|
||||
// Defaults taken from:
|
||||
// https://www.euro-ix.net/en/forixps/large-bgp-communities/
|
||||
section := config.Section("rpki")
|
||||
lines := strings.Split(section.Body(), "\n")
|
||||
|
||||
for _, line := range lines {
|
||||
l := strings.TrimSpace(line)
|
||||
if !strings.Contains(l, "=") {
|
||||
continue
|
||||
}
|
||||
parts := strings.SplitN(l, "=", 2)
|
||||
if len(parts) != 2 {
|
||||
return rpki, fmt.Errorf("invalid rpki config line: %s", line)
|
||||
}
|
||||
key := strings.TrimSpace(parts[0])
|
||||
value := strings.TrimSpace(parts[1])
|
||||
comm := strings.Split(value, ":")
|
||||
|
||||
if key == "enabled" {
|
||||
rpki.Enabled = value == "true"
|
||||
} else if key == "valid" {
|
||||
rpki.Valid = append(rpki.Valid, comm)
|
||||
} else if key == "not_checked" {
|
||||
rpki.NotChecked = append(rpki.NotChecked, comm)
|
||||
} else if key == "invalid" {
|
||||
rpki.Invalid = append(rpki.Invalid, comm)
|
||||
} else if key == "unknown" {
|
||||
rpki.Unknown = append(rpki.Unknown, comm)
|
||||
} else {
|
||||
return rpki, fmt.Errorf("invalid rpki config line: %s", line)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if err := section.MapTo(&rpki); err != nil {
|
||||
return rpki, err
|
||||
}
|
||||
|
||||
fallbackAsn, err := getOwnASN(config)
|
||||
hasDefaultASN := true
|
||||
asn, err := getDefaultASN(config)
|
||||
if err != nil {
|
||||
log.Println(
|
||||
"Own ASN is not configured.",
|
||||
"This might lead to unexpected behaviour with BGP large communities",
|
||||
)
|
||||
hasDefaultASN = false
|
||||
}
|
||||
ownAsn := fmt.Sprintf("%d", fallbackAsn)
|
||||
|
||||
// Fill in defaults or postprocess config value
|
||||
if len(rpki.Valid) == 0 {
|
||||
rpki.Valid = []string{ownAsn, "1000", "1"}
|
||||
} else {
|
||||
rpki.Valid = strings.SplitN(rpki.Valid[0], ":", 3)
|
||||
if len(rpki.Valid) == 0 && !hasDefaultASN && rpki.Enabled {
|
||||
return rpki, fmt.Errorf(
|
||||
"rpki.valid must be set if no server.asn is configured")
|
||||
}
|
||||
if len(rpki.Valid) == 0 && rpki.Enabled {
|
||||
log.Printf("Using default rpki.valid: %s:1000:1\n", asn)
|
||||
rpki.Valid = [][]string{{asn, "1000", "1"}}
|
||||
}
|
||||
|
||||
if len(rpki.Unknown) == 0 {
|
||||
rpki.Unknown = []string{ownAsn, "1000", "2"}
|
||||
} else {
|
||||
rpki.Unknown = strings.SplitN(rpki.Unknown[0], ":", 3)
|
||||
if len(rpki.Unknown) == 0 && !hasDefaultASN && rpki.Enabled {
|
||||
return rpki, fmt.Errorf(
|
||||
"rpki.unknown must be set if no server.asn is configured")
|
||||
}
|
||||
if len(rpki.Unknown) == 0 && rpki.Enabled {
|
||||
log.Printf("Using default rpki.unknown: %s:1000:2\n", asn)
|
||||
rpki.Unknown = [][]string{{asn, "1000", "2"}}
|
||||
}
|
||||
|
||||
if len(rpki.NotChecked) == 0 && !hasDefaultASN && rpki.Enabled {
|
||||
return rpki, fmt.Errorf(
|
||||
"rpki.not_checked must be set if no server.asn is set")
|
||||
}
|
||||
if len(rpki.NotChecked) == 0 {
|
||||
rpki.NotChecked = []string{ownAsn, "1000", "3"}
|
||||
} else {
|
||||
rpki.NotChecked = strings.SplitN(rpki.NotChecked[0], ":", 3)
|
||||
log.Printf("Using default rpki.not_checked: %s:1000:3\n", asn)
|
||||
rpki.NotChecked = [][]string{{asn, "1000", "3"}}
|
||||
}
|
||||
|
||||
// As the euro-ix document states, this can be a range.
|
||||
if len(rpki.Invalid) == 0 {
|
||||
rpki.Invalid = []string{ownAsn, "1000", "4", "*"}
|
||||
} else {
|
||||
// Preprocess
|
||||
rpki.Invalid = strings.SplitN(rpki.Invalid[0], ":", 3)
|
||||
if len(rpki.Invalid) != 3 {
|
||||
// This is wrong, we should have three parts (RS):1000:[range]
|
||||
return rpki, fmt.Errorf(
|
||||
"unexpected rpki.Invalid configuration: %v", rpki.Invalid)
|
||||
for i, com := range rpki.Invalid {
|
||||
if len(com) != 3 {
|
||||
return rpki, fmt.Errorf("Invalid rpki.invalid config: %v", com)
|
||||
}
|
||||
tokens := strings.Split(rpki.Invalid[2], "-")
|
||||
rpki.Invalid = append([]string{rpki.Invalid[0], rpki.Invalid[1]}, tokens...)
|
||||
tokens := strings.Split(com[2], "-")
|
||||
rpki.Invalid[i] = append([]string{com[0], com[1]}, tokens...)
|
||||
}
|
||||
if len(rpki.Invalid) == 0 && !hasDefaultASN && rpki.Enabled {
|
||||
return rpki, fmt.Errorf(
|
||||
"rpki.invalid must be set if no server.asn is configured")
|
||||
}
|
||||
if len(rpki.Invalid) == 0 && rpki.Enabled {
|
||||
log.Printf("Using default rpki.invalid: %s:1000:4-*\n", asn)
|
||||
rpki.Invalid = [][]string{{asn, "1000", "4", "*"}}
|
||||
}
|
||||
|
||||
return rpki, nil
|
||||
@ -539,12 +589,12 @@ func getRpkiConfig(config *ini.File) (RpkiConfig, error) {
|
||||
// Helper: Get own ASN from ini
|
||||
// This is now easy, since we enforce an ASN in
|
||||
// the [server] section.
|
||||
func getOwnASN(config *ini.File) (int, error) {
|
||||
func getDefaultASN(config *ini.File) (string, error) {
|
||||
server := config.Section("server")
|
||||
asn := server.Key("asn").MustInt(-1)
|
||||
asn := server.Key("asn").MustString("")
|
||||
|
||||
if asn == -1 {
|
||||
return 0, fmt.Errorf("could not get own ASN from config")
|
||||
if asn == "" {
|
||||
return "", fmt.Errorf("could not get default ASN from config")
|
||||
}
|
||||
|
||||
return asn, nil
|
||||
@ -618,6 +668,12 @@ func getUIConfig(config *ini.File) (UIConfig, error) {
|
||||
return uiConfig, err
|
||||
}
|
||||
|
||||
// Blackhole communities
|
||||
blackholeCommunities, err := getBlackholeCommunities(config)
|
||||
if err != nil {
|
||||
return uiConfig, err
|
||||
}
|
||||
|
||||
// Theme configuration: Theming is optional, if no settings
|
||||
// are found, it will be ignored
|
||||
themeConfig := getThemeConfig(config)
|
||||
@ -640,8 +696,9 @@ func getUIConfig(config *ini.File) (UIConfig, error) {
|
||||
RoutesNoexports: noexports,
|
||||
RoutesRejectCandidates: rejectCandidates,
|
||||
|
||||
BGPCommunities: getBGPCommunityMap(config),
|
||||
Rpki: rpki,
|
||||
BGPBlackholeCommunities: blackholeCommunities,
|
||||
BGPCommunities: getBGPCommunityMap(config),
|
||||
Rpki: rpki,
|
||||
|
||||
Theme: themeConfig,
|
||||
|
||||
@ -702,6 +759,9 @@ func getSources(config *ini.File) ([]*SourceConfig, error) {
|
||||
Type: sourceType,
|
||||
}
|
||||
|
||||
// Register route server ID with pool
|
||||
pools.RouteServers.Acquire(sourceID)
|
||||
|
||||
// Set backend
|
||||
switch backendType {
|
||||
case SourceBackendBirdwatcher:
|
||||
@ -816,34 +876,75 @@ func getSources(config *ini.File) ([]*SourceConfig, error) {
|
||||
return sources, nil
|
||||
}
|
||||
|
||||
// preprocessConfig parses the variables in the config
|
||||
// and applies it to the rest of the config.
|
||||
func preprocessConfig(data []byte) []byte {
|
||||
lines := bytes.Split(data, []byte("\n"))
|
||||
config := make([][]byte, 0, len(lines))
|
||||
|
||||
expMap := ExpandMap{}
|
||||
for _, line := range lines {
|
||||
l := strings.TrimSpace(string(line))
|
||||
if strings.HasPrefix(l, "$") {
|
||||
expMap.AddExpr(l[1:])
|
||||
continue
|
||||
}
|
||||
config = append(config, line)
|
||||
}
|
||||
|
||||
// Now apply to config
|
||||
configLines := []string{}
|
||||
for _, line := range config {
|
||||
l := string(line)
|
||||
exp, err := expMap.Expand(l)
|
||||
if err != nil {
|
||||
log.Fatal("Error expanding expression in config:", l, err)
|
||||
}
|
||||
for _, e := range exp {
|
||||
configLines = append(configLines, e)
|
||||
}
|
||||
}
|
||||
return []byte(strings.Join(configLines, "\n"))
|
||||
}
|
||||
|
||||
// LoadConfig reads a configuration from a file.
|
||||
func LoadConfig(file string) (*Config, error) {
|
||||
|
||||
// Try to get config file, fallback to alternatives
|
||||
file, err := getConfigFile(file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Read the config file and preprocess it
|
||||
configData, err := os.ReadFile(file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
configData = preprocessConfig(configData)
|
||||
|
||||
// Load configuration, but handle bgp communities section
|
||||
// with our own parser
|
||||
parsedConfig, err := ini.LoadSources(ini.LoadOptions{
|
||||
UnparseableSections: []string{
|
||||
"bgp_communities",
|
||||
"blackhole_communities",
|
||||
"rejection_reasons",
|
||||
"noexport_reasons",
|
||||
"rpki",
|
||||
},
|
||||
}, file)
|
||||
}, configData)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Map sections
|
||||
server := ServerConfig{
|
||||
HTTPTimeout: DefaultHTTPTimeout,
|
||||
StoreBackend: "memory",
|
||||
RoutesStoreRefreshParallelism: 1,
|
||||
NeighborsStoreRefreshParallelism: 1,
|
||||
HTTPTimeout: DefaultHTTPTimeout,
|
||||
PrefixLookupCommunityFilterCutoff: DefaultPrefixLookupCommunityFilterCutoff,
|
||||
StoreBackend: "memory",
|
||||
RoutesStoreRefreshParallelism: 1,
|
||||
NeighborsStoreRefreshParallelism: 1,
|
||||
RoutesStoreQueryLimit: DefaultRoutesStoreQueryLimit,
|
||||
}
|
||||
if err := parsedConfig.Section("server").MapTo(&server); err != nil {
|
||||
return nil, err
|
||||
@ -857,7 +958,7 @@ func LoadConfig(file string) (*Config, error) {
|
||||
parsedConfig.Section("postgres").MapTo(&psql)
|
||||
if server.StoreBackend == "postgres" {
|
||||
if psql.URL == "" {
|
||||
return nil, ErrPostgresUnconfigured
|
||||
psql.URL = "postgres:///?sslmode=prefer"
|
||||
}
|
||||
}
|
||||
|
||||
@ -878,6 +979,13 @@ func LoadConfig(file string) (*Config, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Update stream parser throttle on all birdwatcher sources
|
||||
for _, src := range sources {
|
||||
if src.Backend == SourceBackendBirdwatcher {
|
||||
src.Birdwatcher.StreamParserThrottle = server.StreamParserThrottle
|
||||
}
|
||||
}
|
||||
|
||||
config := &Config{
|
||||
Server: server,
|
||||
Postgres: psql,
|
||||
|
@ -38,6 +38,11 @@ func TestLoadConfigs(t *testing.T) {
|
||||
t.Error("expcted to find example community 1:23 with 'some tag'",
|
||||
"but got:", label)
|
||||
}
|
||||
|
||||
// Check prefix lookup cutoff
|
||||
if config.Server.PrefixLookupCommunityFilterCutoff != 123 {
|
||||
t.Error("Expected PrefixLookupCommunityFilterCutoff to be 123")
|
||||
}
|
||||
}
|
||||
|
||||
// TestSourceConfig checks that the proper backend type was identified for each
|
||||
@ -69,6 +74,9 @@ func TestSourceConfig(t *testing.T) {
|
||||
if rs2.Birdwatcher.AltPipeProtocolSuffix != "_lg" {
|
||||
t.Error("unexpected alt_pipe_suffix:", rs2.Birdwatcher.AltPipeProtocolSuffix)
|
||||
}
|
||||
if rs2.Birdwatcher.StreamParserThrottle != 2342 {
|
||||
t.Error("Unexpected StreamParserThrottle", rs2.Birdwatcher.StreamParserThrottle)
|
||||
}
|
||||
}
|
||||
nilGoBGPConfig := gobgp.Config{}
|
||||
if rs3.GoBGP == nilGoBGPConfig {
|
||||
@ -164,32 +172,21 @@ func TestBlackholeParsing(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestOwnASN(t *testing.T) {
|
||||
config, err := LoadConfig("testdata/alice.conf")
|
||||
if err != nil {
|
||||
t.Fatal("Could not load test config:", err)
|
||||
}
|
||||
|
||||
if config.Server.Asn != 9033 {
|
||||
t.Error("Expected a set server asn")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRpkiConfig(t *testing.T) {
|
||||
config, err := LoadConfig("testdata/alice.conf")
|
||||
if err != nil {
|
||||
t.Fatal("Could not load test config:", err)
|
||||
}
|
||||
|
||||
if len(config.UI.Rpki.Valid) != 3 {
|
||||
if len(config.UI.Rpki.Valid[0]) != 3 {
|
||||
t.Error("Unexpected RPKI:VALID,", config.UI.Rpki.Valid)
|
||||
}
|
||||
if len(config.UI.Rpki.Invalid) != 4 {
|
||||
if len(config.UI.Rpki.Invalid[0]) != 4 {
|
||||
t.Fatal("Unexpected RPKI:INVALID,", config.UI.Rpki.Invalid)
|
||||
}
|
||||
|
||||
// Check fallback
|
||||
if config.UI.Rpki.NotChecked[0] != "9033" {
|
||||
if config.UI.Rpki.NotChecked[0][0] != "9999" {
|
||||
t.Error(
|
||||
"Expected NotChecked to fall back to defaults, got:",
|
||||
config.UI.Rpki.NotChecked,
|
||||
@ -197,7 +194,7 @@ func TestRpkiConfig(t *testing.T) {
|
||||
}
|
||||
|
||||
// Check range postprocessing
|
||||
if config.UI.Rpki.Invalid[3] != "*" {
|
||||
if config.UI.Rpki.Invalid[0][3] != "*" {
|
||||
t.Error("Missing wildcard from config")
|
||||
}
|
||||
|
||||
@ -227,7 +224,7 @@ func TestRejectCandidatesConfig(t *testing.T) {
|
||||
func TestDefaultHTTPTimeout(t *testing.T) {
|
||||
config, err := LoadConfig("testdata/alice.conf")
|
||||
if err != nil {
|
||||
t.Error("Could not load test config:", err)
|
||||
t.Fatal("Could not load test config:", err)
|
||||
}
|
||||
|
||||
if config.Server.HTTPTimeout != DefaultHTTPTimeout {
|
||||
@ -249,3 +246,19 @@ func TestPostgresStoreConfig(t *testing.T) {
|
||||
}
|
||||
t.Log(config.Postgres)
|
||||
}
|
||||
|
||||
func TestGetBlackholeCommunities(t *testing.T) {
|
||||
config, _ := LoadConfig("testdata/alice.conf")
|
||||
comms := config.UI.BGPBlackholeCommunities
|
||||
|
||||
if comms.Standard[0][0].([]int)[0] != 1337 {
|
||||
t.Error("unexpected community:", comms.Standard[0])
|
||||
}
|
||||
if len(comms.Extended) != 1 {
|
||||
t.Error("unexpected communities:", comms.Extended)
|
||||
}
|
||||
if len(comms.Large) != 1 {
|
||||
t.Error("unexpected communities:", comms.Large)
|
||||
}
|
||||
t.Log(comms)
|
||||
}
|
||||
|
188
pkg/config/expand.go
Normal file
188
pkg/config/expand.go
Normal file
@ -0,0 +1,188 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Compile input pattern regex
|
||||
var (
|
||||
expandMatchPlaceholder = regexp.MustCompile(`(?U:{.*}+?)`)
|
||||
expandMatchWildcardShorthard = regexp.MustCompile(`(?U:{{.*\*}}+?)`)
|
||||
)
|
||||
|
||||
// Extract all matches from the input string.
|
||||
// The pattern to find is {INPUT}. The input string
|
||||
// itself can contain new matches.
|
||||
func expandFindPlaceholders(s string) []string {
|
||||
|
||||
// Find all matches
|
||||
results := expandMatchPlaceholder.FindAllString(s, -1)
|
||||
if len(results) == 0 {
|
||||
return []string{}
|
||||
}
|
||||
|
||||
matches := []string{}
|
||||
for _, result := range results {
|
||||
key := expandGetKey(result)
|
||||
subP := expandFindPlaceholders(key)
|
||||
matches = append(matches, result)
|
||||
matches = append(matches, subP...)
|
||||
}
|
||||
|
||||
return matches
|
||||
}
|
||||
|
||||
// Extract the key from the placeholder
|
||||
func expandGetKey(s string) string {
|
||||
// Strip the enclosing curly braces
|
||||
s = strings.TrimPrefix(s, "{")
|
||||
s = strings.TrimSuffix(s, "}")
|
||||
return s
|
||||
}
|
||||
|
||||
// ExpandMap holds the current state of variables
|
||||
type ExpandMap map[string]string
|
||||
|
||||
// Retrieve a set of matching variables, by iterating variables.
|
||||
// Whenever a key matches the wildcard, the prefix is removed.
|
||||
// Example:
|
||||
//
|
||||
// pattern = "AS*", key = "AS2342", value = "2342"
|
||||
func (e ExpandMap) matchWildcard(pattern string) []string {
|
||||
matches := []string{}
|
||||
|
||||
// Strip the wildcard from the pattern.
|
||||
pattern = strings.TrimSuffix(pattern, "*")
|
||||
|
||||
// Iterate variables and add match to result set
|
||||
for k := range e {
|
||||
if strings.HasPrefix(k, pattern) {
|
||||
key := strings.TrimPrefix(k, pattern)
|
||||
matches = append(matches, key)
|
||||
}
|
||||
}
|
||||
return matches
|
||||
}
|
||||
|
||||
// Get all substitutions for a given key.
|
||||
// This method will return an error, if a placeholder
|
||||
// does not match.
|
||||
func (e ExpandMap) getSubstitutions(key string) []string {
|
||||
// Check if the placeholder is a wildcard
|
||||
if strings.HasSuffix(key, "*") {
|
||||
return e.matchWildcard(key)
|
||||
}
|
||||
|
||||
// Check if the placeholder is direct match
|
||||
if val, ok := e[key]; ok {
|
||||
return []string{val}
|
||||
}
|
||||
|
||||
return []string{}
|
||||
}
|
||||
|
||||
// Get placeholder level. This is the number of opening
|
||||
// curly braces in the placeholder.
|
||||
func expandGetLevel(s string) int {
|
||||
level := 0
|
||||
for _, c := range s {
|
||||
if c == '{' {
|
||||
level++
|
||||
}
|
||||
}
|
||||
return level
|
||||
}
|
||||
|
||||
// Preprocess input string and resolve syntactic sugar.
|
||||
// Replace {{VAR}} with {VAR{VAR}} to make it easier
|
||||
// to access the wildcard value.
|
||||
func expandPreprocess(s string) string {
|
||||
// Find all access shorthands and replace them
|
||||
// with the full syntax
|
||||
results := expandMatchWildcardShorthard.FindAllString(s, -1)
|
||||
for _, match := range results {
|
||||
// Wildcard {{KEY*}} -> KEY
|
||||
key := match[2 : len(match)-3]
|
||||
expr := fmt.Sprintf("{%s{%s*}}", key, key)
|
||||
s = strings.Replace(s, match, expr, -1)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// Expand variables by recursive substitution and expansion
|
||||
func (e ExpandMap) Expand(s string) ([]string, error) {
|
||||
// Preprocess syntactic sugar: replace {{VAR}}
|
||||
// with {VAR{VAR}}
|
||||
s = expandPreprocess(s)
|
||||
|
||||
// Find all placeholders and substitute them
|
||||
placeholders := expandFindPlaceholders(s)
|
||||
if len(placeholders) == 0 {
|
||||
return []string{s}, nil
|
||||
}
|
||||
|
||||
// Find substitutions for each placeholder
|
||||
substitutions := map[string][]string{}
|
||||
for _, p := range placeholders {
|
||||
key := expandGetKey(p)
|
||||
subs := e.getSubstitutions(key)
|
||||
if len(subs) == 0 {
|
||||
level := expandGetLevel(p)
|
||||
if level == 1 {
|
||||
err := fmt.Errorf("No substitution for %s in '%s'", p, s)
|
||||
return []string{}, err
|
||||
}
|
||||
continue
|
||||
}
|
||||
substitutions[p] = subs
|
||||
}
|
||||
|
||||
// Apply substitutions
|
||||
subsRes := []string{s}
|
||||
for p, subs := range substitutions {
|
||||
subsExp := []string{}
|
||||
for _, s := range subsRes {
|
||||
for _, sub := range subs {
|
||||
res := strings.Replace(s, p, sub, -1)
|
||||
subsExp = append(subsExp, res)
|
||||
}
|
||||
}
|
||||
subsRes = subsExp
|
||||
}
|
||||
|
||||
// Expand recursively
|
||||
results := []string{}
|
||||
for _, s := range subsRes {
|
||||
res, err := e.Expand(s)
|
||||
if err != nil {
|
||||
return []string{}, err
|
||||
}
|
||||
results = append(results, res...)
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
// AddExpr inserts a new variable to the map. Key and value are
|
||||
// expanded.
|
||||
func (e ExpandMap) AddExpr(expr string) error {
|
||||
// Expand expression
|
||||
res, err := e.Expand(expr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, exp := range res {
|
||||
// Split key and value
|
||||
parts := strings.SplitN(exp, "=", 2)
|
||||
if len(parts) != 2 {
|
||||
return fmt.Errorf("Invalid expression '%s'", expr)
|
||||
}
|
||||
key := strings.TrimSpace(parts[0])
|
||||
val := strings.TrimSpace(parts[1])
|
||||
e[key] = val
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
122
pkg/config/expand_test.go
Normal file
122
pkg/config/expand_test.go
Normal file
@ -0,0 +1,122 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
// Text variable pattern matching
|
||||
func TestExpandMatch(t *testing.T) {
|
||||
exp := ExpandMap{
|
||||
"AS2342": "",
|
||||
"AS1111": "",
|
||||
"FOOBAR": "foo",
|
||||
}
|
||||
|
||||
matches := exp.matchWildcard("AS*")
|
||||
if len(matches) != 2 {
|
||||
t.Errorf("Expected 2 matches, got %d", len(matches))
|
||||
}
|
||||
|
||||
for _, m := range matches {
|
||||
t.Log("Match wildcard:", m)
|
||||
}
|
||||
}
|
||||
|
||||
// Test variable expansion / substitution
|
||||
func TestFindPlaceholders(t *testing.T) {
|
||||
s := "{FOO} BAR {AS{AS*}}"
|
||||
placeholders := expandFindPlaceholders(s)
|
||||
if len(placeholders) != 3 {
|
||||
t.Errorf("Expected 3 placeholders, got %d", len(placeholders))
|
||||
}
|
||||
t.Log(placeholders)
|
||||
}
|
||||
|
||||
// Test variable expansion / substitution
|
||||
func TestExpand(t *testing.T) {
|
||||
s := "{FOO} BAR {AS{AS*}} AS {AS*}"
|
||||
exp := ExpandMap{
|
||||
"AS2342": "AS2342",
|
||||
"AS1111": "AS1111",
|
||||
"FOO": "foo",
|
||||
}
|
||||
|
||||
results, err := exp.Expand(s)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
t.Log(results)
|
||||
}
|
||||
|
||||
func TestExpandErr(t *testing.T) {
|
||||
s := "{FOO} BAR {AS{AS*}} AS {AS*} {UNKNOWN}"
|
||||
exp := ExpandMap{
|
||||
"AS2342": "AS2342",
|
||||
"AS1111": "AS1111",
|
||||
"FOO": "foo",
|
||||
"FN": "fn",
|
||||
"FA": "fa",
|
||||
}
|
||||
|
||||
_, err := exp.Expand(s)
|
||||
t.Log(err)
|
||||
if err == nil {
|
||||
t.Error("Expected error, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestExpandPreprocess(t *testing.T) {
|
||||
s := "FOO {FOO} {{AS*}} {F*} {{F*}} {X{X*}}"
|
||||
expect := "FOO {FOO} {AS{AS*}} {F*} {F{F*}} {X{X*}}"
|
||||
s = expandPreprocess(s)
|
||||
if s != expect {
|
||||
t.Errorf("Expected '%s', got '%s'", expect, s)
|
||||
}
|
||||
t.Log(s)
|
||||
|
||||
s = "TEST {{FN}}"
|
||||
s = expandPreprocess(s)
|
||||
t.Log(s)
|
||||
|
||||
}
|
||||
|
||||
func TestExpandAddExpr(t *testing.T) {
|
||||
e := ExpandMap{
|
||||
"FOO": "foo23",
|
||||
"BAR": "bar42",
|
||||
"bar42": "BAM",
|
||||
}
|
||||
|
||||
if err := e.AddExpr("FOOBAR = {FOO}{BAR}{{BAR}}"); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
t.Log(e)
|
||||
|
||||
if e["FOOBAR"] != "foo23bar42BAM" {
|
||||
t.Error("Expected 'foo23bar42BAM', got", e["FOOBAR"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestExpandBgpCommunities(t *testing.T) {
|
||||
e := ExpandMap{
|
||||
"ASRS01": "6695",
|
||||
"ASRS02": "4617",
|
||||
"SW1001": "edge01.fra2",
|
||||
"SW1002": "edge01.fra6",
|
||||
"SW2038": "edge01.nyc1",
|
||||
"RDCTL911": "Redistribute",
|
||||
"RDCTL922": "Do not redistribute",
|
||||
}
|
||||
|
||||
// Some large communities:
|
||||
expr := "{{AS*}}:{RDCTL*}:{SW*} = {{RDCTL*}} to {{SW*}}"
|
||||
exp, err := e.Expand(expr)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
expected := 2 * 3 * 2
|
||||
if len(exp) != expected {
|
||||
t.Errorf("Expected %d results, got %d", expected, len(exp))
|
||||
}
|
||||
t.Log(exp)
|
||||
}
|
66
pkg/config/testdata/alice.conf
vendored
66
pkg/config/testdata/alice.conf
vendored
@ -2,6 +2,13 @@
|
||||
# Alice-LG configuration example
|
||||
# ======================================
|
||||
|
||||
$ASN01 = 1111
|
||||
$ASN02 = 2222
|
||||
|
||||
$SW1001 = switch01.dc01
|
||||
$SW1002 = switch02.dc01
|
||||
$SW2023 = switch23.dc02
|
||||
|
||||
[server]
|
||||
# configures the built-in webserver and provides global application settings
|
||||
listen_http = 127.0.0.1:7340
|
||||
@ -9,9 +16,15 @@ listen_http = 127.0.0.1:7340
|
||||
enable_prefix_lookup = true
|
||||
# Try to refresh the neighbor status on every request to /neighbors
|
||||
enable_neighbors_status_refresh = false
|
||||
asn = 9033
|
||||
# this ASN is used as a fallback value in the RPKI feature and for route
|
||||
# filtering evaluation with large BGP communities
|
||||
#
|
||||
# Prefix lookup community filter cutoff defines an upper limit
|
||||
# of returned routes for which the community filters list is
|
||||
# available. If the number of routes exceeds this limit, the
|
||||
# communities filters become available if there is a specific
|
||||
# route server selected.
|
||||
prefix_lookup_community_filter_cutoff = 123
|
||||
|
||||
# how many route servers will be refreshed at the same time
|
||||
# if set to 0 (or for the matter of fact 1), refresh will be
|
||||
@ -25,8 +38,12 @@ neighbors_store_refresh_parallelism = 10000
|
||||
routes_store_refresh_interval = 5
|
||||
neighbors_store_refresh_interval = 5
|
||||
|
||||
stream_parser_throttle = 2342
|
||||
|
||||
store_backend = postgres
|
||||
|
||||
asn = 9999
|
||||
|
||||
[postgres]
|
||||
url = "postgres://postgres:postgres@localhost:5432/alice"
|
||||
min_connections = 10
|
||||
@ -53,16 +70,16 @@ routes_not_exported_page_size = 250
|
||||
[rejection_reasons]
|
||||
# a pair of a large BGP community value and a string to signal the processing
|
||||
# results of route filtering
|
||||
9033:65666:1 = An IP Bogon was detected
|
||||
9033:65666:2 = Prefix is longer than 64
|
||||
9033:65666:3 = Prefix is longer than 24
|
||||
9033:65666:4 = AS path contains a bogon AS
|
||||
9033:65666:5 = AS path length is longer than 64
|
||||
9033:65666:6 = First AS in path is not the same as the Peer AS
|
||||
9033:65666:7 = ECIX prefix hijack
|
||||
9033:65666:8 = Origin AS not found in IRRDB for Peer AS-SET
|
||||
9033:65666:9 = Prefix not found in IRRDB for Origin AS
|
||||
9033:65666:10 = Advertised nexthop address is not the same as the peer
|
||||
{{ASN*}}:65666:1 = An IP Bogon was detected
|
||||
{{ASN*}}:65666:2 = Prefix is longer than 64
|
||||
{{ASN*}}:65666:3 = Prefix is longer than 24
|
||||
{{ASN*}}:65666:4 = AS path contains a bogon AS
|
||||
{{ASN*}}:65666:5 = AS path length is longer than 64
|
||||
{{ASN*}}:65666:6 = First AS in path is not the same as the Peer AS
|
||||
{{ASN*}}:65666:7 = ECIX prefix hijack
|
||||
{{ASN*}}:65666:8 = Origin AS not found in IRRDB for Peer AS-SET
|
||||
{{ASN*}}:65666:9 = Prefix not found in IRRDB for Origin AS
|
||||
{{ASN*}}:65666:10 = Advertised nexthop address is not the same as the peer
|
||||
|
||||
23:42:1 = Some made up reason
|
||||
|
||||
@ -78,16 +95,20 @@ load_on_demand = true # Default: false
|
||||
[noexport_reasons]
|
||||
# a pair of a large BGP community value and a string to signal the processing
|
||||
# results of route distribution and the distribution policy applied to a route
|
||||
9033:65667:1 = The target peer policy is Fairly-open and the sender ASN is an exception
|
||||
9033:65667:2 = The target peer policy is Selective and the sender ASN is no exception
|
||||
9033:65667:3 = The target peer policy is set to restrictive
|
||||
9033:65667:4 = The sender has specifically refused export to the target peer, either through sending 65000:AS, or through the portal
|
||||
9033:65667:5 = The sender has refused export to all peers and the target is no exception, either through sending 65000:0, or through the portal
|
||||
9033:65667:6 = The Sender has set (peerRTTHigherDeny:ms) and the targets RTT ms >= then the ms in the community
|
||||
9033:65667:7 = The Sender has set (peerRTTLowerDeny:ms) and the targets RTT ms <= then the ms in the community
|
||||
{{ASN*}}:65667:1 = The target peer policy is Fairly-open and the sender ASN is an exception
|
||||
{{ASN*}}:65667:2 = The target peer policy is Selective and the sender ASN is no exception
|
||||
{{ASN*}}:65667:3 = The target peer policy is set to restrictive
|
||||
{{ASN*}}:65667:4 = The sender has specifically refused export to the target peer, either through sending 65000:AS, or through the portal
|
||||
{{ASN*}}:65667:5 = The sender has refused export to all peers and the target is no exception, either through sending 65000:0, or through the portal
|
||||
{{ASN*}}:65667:6 = The Sender has set (peerRTTHigherDeny:ms) and the targets RTT ms >= then the ms in the community
|
||||
{{ASN*}}:65667:7 = The Sender has set (peerRTTLowerDeny:ms) and the targets RTT ms <= then the ms in the community
|
||||
|
||||
23:46:1 = Some other made up reason
|
||||
|
||||
[blackhole_communities]
|
||||
1337:666
|
||||
rt:1324:4200000000-4200010000
|
||||
2342:65530-65535:665-667
|
||||
|
||||
[rpki]
|
||||
# shows rpki validation status in the client, based on the presence of a large
|
||||
@ -96,10 +117,10 @@ enabled = true
|
||||
|
||||
# Optional, falling back to defaults as defined in:
|
||||
# https://www.euro-ix.net/en/forixps/large-bgp-communities/
|
||||
valid = 23042:1000:1
|
||||
unknown = 23042:1000:2
|
||||
valid = {{ASN*}}:1000:1
|
||||
unknown = {{ASN*}}:1000:2
|
||||
# not_checked = 23042:1000:3
|
||||
invalid = 23042:1000:4-*
|
||||
invalid = {{ASN*}}:1000:4-*
|
||||
|
||||
|
||||
# Define other known bgp communities
|
||||
@ -109,6 +130,8 @@ invalid = 23042:1000:4-*
|
||||
# Wildcards are supported aswell:
|
||||
0:* = do not redistribute to AS$1
|
||||
|
||||
{{ASN*}}:911:{SW*} = Redistribute to {{SW*}}
|
||||
|
||||
#
|
||||
# Define columns for neighbours and routes table,
|
||||
# with <key> = <Table Header>
|
||||
@ -128,6 +151,7 @@ invalid = 23042:1000:4-*
|
||||
# Uptime Displays the relative uptime of this neighbour
|
||||
# Description The neighbour's description with link to routes page
|
||||
#
|
||||
#
|
||||
|
||||
[neighbours_columns]
|
||||
address = Neighbour
|
||||
|
@ -27,6 +27,11 @@ func MaybePrefix(s string) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// Must contain at least one dot or colon
|
||||
if !strings.Contains(s, ":") && !strings.Contains(s, ".") {
|
||||
return false
|
||||
}
|
||||
|
||||
// Test using regex
|
||||
matches := ReMatchIPPrefix.FindAllStringIndex(s, -1)
|
||||
if len(matches) == 1 {
|
||||
|
@ -12,9 +12,11 @@ func TestMaybePrefix(t *testing.T) {
|
||||
{"10.0.0", true},
|
||||
{"23.42.11.42/23", true},
|
||||
{"fa42:2342::/32", true},
|
||||
{"200", true},
|
||||
{"1.", true},
|
||||
{"200", false},
|
||||
{"200.", true},
|
||||
{"2001:", true},
|
||||
{"A", true},
|
||||
{"A", false},
|
||||
{"A b", false},
|
||||
{"23 Foo", false},
|
||||
{"Nordfoo", false},
|
||||
|
@ -39,7 +39,7 @@ type apiEndpoint func(
|
||||
httprouter.Params,
|
||||
) (response, error)
|
||||
|
||||
// Wrap handler for access control, throtteling and compression
|
||||
// Wrap handler for access control, throttling and compression
|
||||
func endpoint(wrapped apiEndpoint) httprouter.Handle {
|
||||
return func(res http.ResponseWriter,
|
||||
req *http.Request,
|
||||
@ -101,11 +101,11 @@ func (s *Server) apiRegisterEndpoints(
|
||||
router.GET("/api/v1/routeservers",
|
||||
endpoint(s.apiRouteServersList))
|
||||
router.GET("/api/v1/routeservers/:id/status",
|
||||
endpoint(s.apiStatus))
|
||||
endpoint(s.apiRouteServerStatusShow))
|
||||
router.GET("/api/v1/routeservers/:id/neighbors",
|
||||
endpoint(s.apiNeighborsList))
|
||||
router.GET("/api/v1/routeservers/:id/neighbors/:neighborId/routes",
|
||||
endpoint(s.apiRoutesList))
|
||||
// router.GET("/api/v1/routeservers/:id/neighbors/:neighborId/routes",
|
||||
// endpoint(s.apiRoutesList))
|
||||
router.GET("/api/v1/routeservers/:id/neighbors/:neighborId/routes/received",
|
||||
endpoint(s.apiRoutesListReceived))
|
||||
router.GET("/api/v1/routeservers/:id/neighbors/:neighborId/routes/filtered",
|
||||
|
@ -20,30 +20,6 @@ func (s *Server) apiStatusShow(
|
||||
return status, err
|
||||
}
|
||||
|
||||
// Handle status
|
||||
func (s *Server) apiStatus(
|
||||
ctx context.Context,
|
||||
_req *http.Request,
|
||||
params httprouter.Params,
|
||||
) (response, error) {
|
||||
rsID, err := validateSourceID(params.ByName("id"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
source := s.cfg.SourceInstanceByID(rsID)
|
||||
if source == nil {
|
||||
return nil, ErrSourceNotFound
|
||||
}
|
||||
|
||||
result, err := source.Status(ctx)
|
||||
if err != nil {
|
||||
s.logSourceError("status", rsID, err)
|
||||
}
|
||||
|
||||
return result, err
|
||||
}
|
||||
|
||||
// Handle Config Endpoint
|
||||
func (s *Server) apiConfigShow(
|
||||
_ctx context.Context,
|
||||
@ -51,9 +27,9 @@ func (s *Server) apiConfigShow(
|
||||
_params httprouter.Params,
|
||||
) (response, error) {
|
||||
result := api.ConfigResponse{
|
||||
Asn: s.cfg.Server.Asn,
|
||||
BGPCommunities: s.cfg.UI.BGPCommunities,
|
||||
RejectReasons: s.cfg.UI.RoutesRejections.Reasons,
|
||||
BGPCommunities: s.cfg.UI.BGPCommunities,
|
||||
BGPBlackholeCommunities: s.cfg.UI.BGPBlackholeCommunities,
|
||||
RejectReasons: s.cfg.UI.RoutesRejections.Reasons,
|
||||
Noexport: api.Noexport{
|
||||
LoadOnDemand: s.cfg.UI.RoutesNoexports.LoadOnDemand,
|
||||
},
|
||||
|
@ -11,6 +11,7 @@ import (
|
||||
)
|
||||
|
||||
// Handle routes
|
||||
/*
|
||||
func (s *Server) apiRoutesList(
|
||||
ctx context.Context,
|
||||
_req *http.Request,
|
||||
@ -34,6 +35,7 @@ func (s *Server) apiRoutesList(
|
||||
|
||||
return result, err
|
||||
}
|
||||
*/
|
||||
|
||||
// Paginated Routes Respponse: Received routes
|
||||
func (s *Server) apiRoutesListReceived(
|
||||
|
@ -8,6 +8,7 @@ import (
|
||||
"github.com/julienschmidt/httprouter"
|
||||
|
||||
"github.com/alice-lg/alice-lg/pkg/api"
|
||||
"github.com/alice-lg/alice-lg/pkg/config"
|
||||
)
|
||||
|
||||
// Handle RouteServers List
|
||||
@ -41,3 +42,31 @@ func (s *Server) apiRouteServersList(
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Handle route server status
|
||||
func (s *Server) apiRouteServerStatusShow(
|
||||
ctx context.Context,
|
||||
_req *http.Request,
|
||||
params httprouter.Params,
|
||||
) (response, error) {
|
||||
rsID, err := validateSourceID(params.ByName("id"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
source := s.cfg.SourceInstanceByID(rsID)
|
||||
if source == nil {
|
||||
return nil, ErrSourceNotFound
|
||||
}
|
||||
|
||||
result, err := source.Status(ctx)
|
||||
if err != nil {
|
||||
s.logSourceError("status", rsID, err)
|
||||
return nil, err
|
||||
}
|
||||
if result != nil {
|
||||
result.Meta.Version = config.Version
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
@ -30,13 +30,16 @@ func (s *Server) apiLookupPrefixGlobal(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Check what we want to query
|
||||
// Prefix -> fetch prefix
|
||||
// _ -> fetch neighbors and routes
|
||||
lookupPrefix := decoders.MaybePrefix(q)
|
||||
q, filterTokens := QueryString(q).ExtractFilters()
|
||||
|
||||
// Measure response time
|
||||
t0 := time.Now()
|
||||
// Get filters from query string
|
||||
queryFilters, err := api.FiltersFromTokens(filterTokens)
|
||||
if err != nil {
|
||||
return nil, &ErrValidationFailed{
|
||||
Param: "q",
|
||||
Reason: err.Error(),
|
||||
}
|
||||
}
|
||||
|
||||
// Get additional filter criteria
|
||||
filtersApplied, err := api.FiltersFromQuery(req.URL.Query())
|
||||
@ -44,14 +47,35 @@ func (s *Server) apiLookupPrefixGlobal(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Merge query filters into applied filters
|
||||
filtersApplied = filtersApplied.Combine(queryFilters)
|
||||
|
||||
// Select the query strategy:
|
||||
// Prefix -> fetch prefix
|
||||
// _ -> fetch neighbors and routes
|
||||
//
|
||||
lookupPrefix := decoders.MaybePrefix(q)
|
||||
lookupEmptyQuery := false
|
||||
if q == "" && (filtersApplied.HasGroup(api.SearchKeyCommunities) ||
|
||||
filtersApplied.HasGroup(api.SearchKeyExtCommunities) ||
|
||||
filtersApplied.HasGroup(api.SearchKeyLargeCommunities)) {
|
||||
lookupPrefix = true
|
||||
lookupEmptyQuery = true
|
||||
}
|
||||
|
||||
// Measure response time
|
||||
t0 := time.Now()
|
||||
|
||||
// Perform query
|
||||
var routes api.LookupRoutes
|
||||
if lookupPrefix {
|
||||
q, err = validatePrefixQuery(q)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
if !lookupEmptyQuery {
|
||||
q, err = validatePrefixQuery(q)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
routes, err = s.routesStore.LookupPrefix(ctx, q)
|
||||
routes, err = s.routesStore.LookupPrefix(ctx, q, filtersApplied)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -66,7 +90,7 @@ func (s *Server) apiLookupPrefixGlobal(
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
routes, err = s.routesStore.LookupPrefixForNeighbors(ctx, neighbors)
|
||||
routes, err = s.routesStore.LookupPrefixForNeighbors(ctx, neighbors, filtersApplied)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -78,15 +102,27 @@ func (s *Server) apiLookupPrefixGlobal(
|
||||
imported := make(api.LookupRoutes, 0, totalResults)
|
||||
filtered := make(api.LookupRoutes, 0, totalResults)
|
||||
|
||||
// Now, as we have allocated even more space process routes by, splitting,
|
||||
// filtering and updating the available filters...
|
||||
// Check if we should calculate community filter
|
||||
// cardinalities.
|
||||
filterCutoff := s.cfg.Server.PrefixLookupCommunityFilterCutoff
|
||||
canFilterCommunities := totalResults <= filterCutoff
|
||||
|
||||
// In case there is a source filter applied, we can filter communities
|
||||
if filtersApplied.HasGroup(api.SearchKeySources) {
|
||||
canFilterCommunities = true
|
||||
}
|
||||
|
||||
filtersNotAvailable := []string{}
|
||||
if !canFilterCommunities {
|
||||
filtersNotAvailable = append(
|
||||
filtersNotAvailable, api.SearchKeyCommunities)
|
||||
}
|
||||
|
||||
// Now, as we have allocated even more space split routes,
|
||||
// and update the available filters...
|
||||
filtersAvailable := api.NewSearchFilters()
|
||||
for _, r := range routes {
|
||||
|
||||
if !filtersApplied.MatchRoute(r) {
|
||||
continue // Exclude route from results set
|
||||
}
|
||||
|
||||
switch r.State {
|
||||
case api.RouteStateFiltered:
|
||||
filtered = append(filtered, r)
|
||||
@ -94,7 +130,14 @@ func (s *Server) apiLookupPrefixGlobal(
|
||||
imported = append(imported, r)
|
||||
}
|
||||
|
||||
filtersAvailable.UpdateFromLookupRoute(r)
|
||||
// Update available filters for sources and asns,
|
||||
// conditionally for communities.
|
||||
filtersAvailable.UpdateSourcesFromLookupRoute(r)
|
||||
filtersAvailable.UpdateASNSFromLookupRoute(r)
|
||||
|
||||
if canFilterCommunities {
|
||||
filtersAvailable.UpdateCommunitiesFromLookupRoute(r)
|
||||
}
|
||||
}
|
||||
|
||||
// Remove applied filters from available
|
||||
@ -148,8 +191,9 @@ func (s *Server) apiLookupPrefixGlobal(
|
||||
Pagination: paginationFiltered,
|
||||
},
|
||||
FilteredResponse: api.FilteredResponse{
|
||||
FiltersAvailable: filtersAvailable,
|
||||
FiltersApplied: filtersApplied,
|
||||
FiltersAvailable: filtersAvailable,
|
||||
FiltersNotAvailable: filtersNotAvailable,
|
||||
FiltersApplied: filtersApplied,
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -22,6 +22,14 @@ func (err *ErrResourceNotFoundError) Error() string {
|
||||
return "resource not found"
|
||||
}
|
||||
|
||||
// ErrTimeout will be sent if the request took too long
|
||||
type ErrTimeout string
|
||||
|
||||
// Implement Error interface
|
||||
func (err ErrTimeout) Error() string {
|
||||
return string(err)
|
||||
}
|
||||
|
||||
// Variables
|
||||
var (
|
||||
ErrSourceNotFound = &ErrResourceNotFoundError{}
|
||||
@ -50,6 +58,7 @@ const (
|
||||
StatusError = http.StatusInternalServerError
|
||||
StatusResourceNotFound = http.StatusNotFound
|
||||
StatusValidationError = http.StatusBadRequest
|
||||
TimeoutError = http.StatusGatewayTimeout
|
||||
)
|
||||
|
||||
// Handle an error and create a error API response
|
||||
@ -62,26 +71,38 @@ func apiErrorResponse(
|
||||
tag := TagGenericError
|
||||
status := StatusError
|
||||
|
||||
switch e := err.(type) {
|
||||
case *ErrResourceNotFoundError:
|
||||
tag = TagResourceNotFound
|
||||
code = CodeResourceNotFound
|
||||
status = StatusResourceNotFound
|
||||
case *url.Error:
|
||||
if strings.Contains(message, "connection refused") {
|
||||
tag = TagConnectionRefused
|
||||
code = CodeConnectionRefused
|
||||
message = "Connection refused while dialing the API"
|
||||
} else if e.Timeout() {
|
||||
tag = TagConnectionTimeout
|
||||
code = CodeConnectionTimeout
|
||||
message = "Connection timed out when connecting to the backend API"
|
||||
}
|
||||
case *ErrValidationFailed:
|
||||
// TODO: This needs refactoring.
|
||||
if err == api.ErrTooManyRoutes {
|
||||
tag = TagValidationError
|
||||
code = CodeValidationError
|
||||
status = StatusValidationError
|
||||
message = e.Reason
|
||||
} else {
|
||||
|
||||
switch e := err.(type) {
|
||||
case ErrTimeout:
|
||||
tag = TagConnectionTimeout
|
||||
code = CodeConnectionTimeout
|
||||
status = TimeoutError
|
||||
case *ErrResourceNotFoundError:
|
||||
tag = TagResourceNotFound
|
||||
code = CodeResourceNotFound
|
||||
status = StatusResourceNotFound
|
||||
case *url.Error:
|
||||
if strings.Contains(message, "connection refused") {
|
||||
tag = TagConnectionRefused
|
||||
code = CodeConnectionRefused
|
||||
message = "Connection refused while dialing the API"
|
||||
} else if e.Timeout() {
|
||||
tag = TagConnectionTimeout
|
||||
code = CodeConnectionTimeout
|
||||
message = "Connection timed out when connecting to the backend API"
|
||||
}
|
||||
case *ErrValidationFailed:
|
||||
tag = TagValidationError
|
||||
code = CodeValidationError
|
||||
status = StatusValidationError
|
||||
message = e.Reason
|
||||
}
|
||||
}
|
||||
|
||||
return api.ErrorResponse{
|
||||
|
@ -8,16 +8,16 @@ import (
|
||||
|
||||
func TestApiRoutesPagination(t *testing.T) {
|
||||
routes := api.Routes{
|
||||
&api.Route{ID: "r01"},
|
||||
&api.Route{ID: "r02"},
|
||||
&api.Route{ID: "r03"},
|
||||
&api.Route{ID: "r04"},
|
||||
&api.Route{ID: "r05"},
|
||||
&api.Route{ID: "r06"},
|
||||
&api.Route{ID: "r07"},
|
||||
&api.Route{ID: "r08"},
|
||||
&api.Route{ID: "r09"},
|
||||
&api.Route{ID: "r10"},
|
||||
&api.Route{Network: "r01"},
|
||||
&api.Route{Network: "r02"},
|
||||
&api.Route{Network: "r03"},
|
||||
&api.Route{Network: "r04"},
|
||||
&api.Route{Network: "r05"},
|
||||
&api.Route{Network: "r06"},
|
||||
&api.Route{Network: "r07"},
|
||||
&api.Route{Network: "r08"},
|
||||
&api.Route{Network: "r09"},
|
||||
&api.Route{Network: "r10"},
|
||||
}
|
||||
|
||||
paginated, pagination := apiPaginateRoutes(routes, 0, 8)
|
||||
@ -36,13 +36,13 @@ func TestApiRoutesPagination(t *testing.T) {
|
||||
|
||||
// Check paginated slicing
|
||||
r := paginated[0]
|
||||
if r.ID != "r01" {
|
||||
t.Error("First route on page 0 should be r01, got:", r.ID)
|
||||
if r.Network != "r01" {
|
||||
t.Error("First route on page 0 should be r01, got:", r.Network)
|
||||
}
|
||||
|
||||
r = paginated[len(paginated)-1]
|
||||
if r.ID != "r08" {
|
||||
t.Error("Last route should be r08, but got:", r.ID)
|
||||
if r.Network != "r08" {
|
||||
t.Error("Last route should be r08, but got:", r.Network)
|
||||
}
|
||||
|
||||
// Second page
|
||||
@ -52,13 +52,13 @@ func TestApiRoutesPagination(t *testing.T) {
|
||||
}
|
||||
|
||||
r = paginated[0]
|
||||
if r.ID != "r09" {
|
||||
t.Error("First route on page 1 should be r09, got:", r.ID)
|
||||
if r.Network != "r09" {
|
||||
t.Error("First route on page 1 should be r09, got:", r.Network)
|
||||
}
|
||||
|
||||
r = paginated[len(paginated)-1]
|
||||
if r.ID != "r10" {
|
||||
t.Error("Last route should be r10, but got:", r.ID)
|
||||
if r.Network != "r10" {
|
||||
t.Error("Last route should be r10, but got:", r.Network)
|
||||
}
|
||||
|
||||
// Access out of bound page
|
||||
|
@ -50,10 +50,31 @@ func apiQueryFilterNextHopGateway(
|
||||
results := make(api.Routes, 0, len(routes))
|
||||
for _, r := range routes {
|
||||
if strings.HasPrefix(strings.ToLower(r.Network), queryString) ||
|
||||
strings.HasPrefix(strings.ToLower(r.Gateway), queryString) {
|
||||
strings.HasPrefix(strings.ToLower(*r.Gateway), queryString) {
|
||||
results = append(results, r)
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// QueryString wraps the q parameter from the query.
|
||||
// Extract the value and additional filters from the string
|
||||
type QueryString string
|
||||
|
||||
// ExtractFilters separates query and filters from string.
|
||||
func (q QueryString) ExtractFilters() (string, []string) {
|
||||
tokens := strings.Split(string(q), " ")
|
||||
query := []string{}
|
||||
filters := []string{}
|
||||
|
||||
for _, t := range tokens {
|
||||
if strings.HasPrefix(t, "#") {
|
||||
filters = append(filters, t)
|
||||
} else {
|
||||
query = append(query, t)
|
||||
}
|
||||
}
|
||||
|
||||
return strings.Join(query, " "), filters
|
||||
}
|
||||
|
@ -6,6 +6,7 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/alice-lg/alice-lg/pkg/api"
|
||||
"github.com/alice-lg/alice-lg/pkg/pools"
|
||||
)
|
||||
|
||||
func makeQueryRequest(q string) *http.Request {
|
||||
@ -19,22 +20,19 @@ func makeQueryRequest(q string) *http.Request {
|
||||
func makeQueryRoutes() api.Routes {
|
||||
routes := api.Routes{
|
||||
&api.Route{
|
||||
ID: "route_01",
|
||||
NeighborID: "n01",
|
||||
NeighborID: pools.Neighbors.Acquire("n01"),
|
||||
Network: "123.42.43.0/24",
|
||||
Gateway: "23.42.42.1",
|
||||
Gateway: pools.Gateways4.Acquire("23.42.42.1"),
|
||||
},
|
||||
&api.Route{
|
||||
ID: "route_02",
|
||||
NeighborID: "n01",
|
||||
NeighborID: pools.Neighbors.Acquire("n01"),
|
||||
Network: "142.23.0.0/16",
|
||||
Gateway: "42.42.42.1",
|
||||
Gateway: pools.Gateways4.Acquire("42.42.42.1"),
|
||||
},
|
||||
&api.Route{
|
||||
ID: "route_03",
|
||||
NeighborID: "n01",
|
||||
NeighborID: pools.Neighbors.Acquire("n01"),
|
||||
Network: "123.43.0.0/16",
|
||||
Gateway: "23.42.43.1",
|
||||
Gateway: pools.Gateways4.Acquire("23.42.43.1"),
|
||||
},
|
||||
}
|
||||
|
||||
@ -54,11 +52,11 @@ func TestApiQueryFilterNextHopGateway(t *testing.T) {
|
||||
}
|
||||
|
||||
// Check presence of route_01 and _03, matching prefix 123.
|
||||
if filtered[0].ID != "route_01" {
|
||||
t.Error("Expected route_01, got:", filtered[0].ID)
|
||||
if filtered[0].Network != "123.42.43.0/24" {
|
||||
t.Error("Expected 123.42.43.0/24 got:", filtered[0].Network)
|
||||
}
|
||||
if filtered[1].ID != "route_03" {
|
||||
t.Error("Expected route_03, got:", filtered[1].ID)
|
||||
if filtered[1].Network != "123.43.0.0/16" {
|
||||
t.Error("Expected 123.43.0.0/16, got:", filtered[1].Network)
|
||||
}
|
||||
|
||||
// Test another query matching the gateway only
|
||||
@ -71,7 +69,7 @@ func TestApiQueryFilterNextHopGateway(t *testing.T) {
|
||||
t.Error("Expected only one result")
|
||||
}
|
||||
|
||||
if filtered[0].ID != "route_02" {
|
||||
t.Error("Expected route_02 to match criteria, got:", filtered[0])
|
||||
if filtered[0].Network != "142.23.0.0/16" {
|
||||
t.Error("Expected 142.23.0.0/16 to match criteria, got:", filtered[0])
|
||||
}
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ func NewErrEmptyParam(key string) *ErrValidationFailed {
|
||||
|
||||
var (
|
||||
// ErrQueryTooShort will be returned when the query
|
||||
// is less than 2 characters.
|
||||
// is too short.
|
||||
ErrQueryTooShort = &ErrValidationFailed{
|
||||
"q", "the query is too short",
|
||||
}
|
||||
@ -106,10 +106,10 @@ func validatePrefixQuery(value string) (string, error) {
|
||||
// Helper: Validate neighbors query. A valid query should have
|
||||
// at least 4 chars.
|
||||
func validateNeighborsQuery(value string) (string, error) {
|
||||
if len(value) < 3 {
|
||||
// Maybe make configurable,
|
||||
// A length of 3 would be sufficient for "DFN" and
|
||||
// other shorthands.
|
||||
if len(value) < 4 {
|
||||
// TODO: Maybe make configurable
|
||||
// Three letters tend to result in queries with too
|
||||
// many results, which then leads to gateway timeouts.
|
||||
return "", ErrQueryTooShort
|
||||
}
|
||||
return value, nil
|
||||
|
@ -54,6 +54,15 @@ func (s *Server) Start(ctx context.Context) {
|
||||
|
||||
httpTimeout := time.Duration(s.cfg.Server.HTTPTimeout) * time.Second
|
||||
log.Println("Web server HTTP timeout set to:", httpTimeout)
|
||||
log.Println("Listening on:", s.cfg.Server.Listen)
|
||||
|
||||
if s.cfg.Server.EnablePrefixLookup {
|
||||
log.Println("Prefix Lookup (Search): enabled")
|
||||
log.Println("Prefix Lookup Community Filter Cutoff:",
|
||||
s.cfg.Server.PrefixLookupCommunityFilterCutoff)
|
||||
} else {
|
||||
log.Println("Prefix Lookup (Search): disabled")
|
||||
}
|
||||
|
||||
s.Server = &http.Server{
|
||||
Addr: s.cfg.Server.Listen,
|
||||
|
127
pkg/pools/communities.go
Normal file
127
pkg/pools/communities.go
Normal file
@ -0,0 +1,127 @@
|
||||
package pools
|
||||
|
||||
import (
|
||||
"math"
|
||||
"reflect"
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
"github.com/alice-lg/alice-lg/pkg/api"
|
||||
)
|
||||
|
||||
// CommunitiesPool is for deduplicating a single BGP community.
|
||||
// This works with large and standard communities. For extended
|
||||
// communities, use the ExtCommunityPool.
|
||||
type CommunitiesPool struct {
|
||||
root *Node[int, api.Community]
|
||||
sync.RWMutex
|
||||
}
|
||||
|
||||
// NewCommunitiesPool creates a new pool for a single BGP community
|
||||
func NewCommunitiesPool() *CommunitiesPool {
|
||||
return &CommunitiesPool{
|
||||
root: NewNode[int, api.Community](api.Community{}),
|
||||
}
|
||||
}
|
||||
|
||||
// Acquire a single bgp community
|
||||
func (p *CommunitiesPool) Acquire(c api.Community) api.Community {
|
||||
p.Lock()
|
||||
defer p.Unlock()
|
||||
if len(c) == 0 {
|
||||
return p.root.value
|
||||
}
|
||||
return p.root.traverse(c, c)
|
||||
}
|
||||
|
||||
// Read a single bgp community
|
||||
func (p *CommunitiesPool) Read(c api.Community) api.Community {
|
||||
p.RLock()
|
||||
defer p.RUnlock()
|
||||
if len(c) == 0 {
|
||||
return p.root.value
|
||||
}
|
||||
return p.root.read(c)
|
||||
}
|
||||
|
||||
// CommunitiesSetPool is for deduplicating a list of BGP communities
|
||||
// (Large and default. The ext communities representation right now
|
||||
// makes problems and need to be fixed. TODO.)
|
||||
type CommunitiesSetPool struct {
|
||||
root *Node[unsafe.Pointer, []api.Community]
|
||||
sync.Mutex
|
||||
}
|
||||
|
||||
// NewCommunitiesSetPool creates a new pool for lists
|
||||
// of BGP communities.
|
||||
func NewCommunitiesSetPool() *CommunitiesSetPool {
|
||||
return &CommunitiesSetPool{
|
||||
root: NewNode[unsafe.Pointer, []api.Community]([]api.Community{}),
|
||||
}
|
||||
}
|
||||
|
||||
// Acquire a list of bgp communities
|
||||
func (p *CommunitiesSetPool) Acquire(
|
||||
communities []api.Community,
|
||||
) []api.Community {
|
||||
p.Lock()
|
||||
defer p.Unlock()
|
||||
// Make identification list by using the pointer address
|
||||
// of the deduplicated community as ID
|
||||
ids := make([]unsafe.Pointer, len(communities))
|
||||
set := make([]api.Community, len(communities))
|
||||
for i, comm := range communities {
|
||||
ptr := Communities.Acquire(comm)
|
||||
ids[i] = reflect.ValueOf(ptr).UnsafePointer()
|
||||
set[i] = ptr
|
||||
}
|
||||
if len(ids) == 0 {
|
||||
return p.root.value
|
||||
}
|
||||
return p.root.traverse(set, ids)
|
||||
}
|
||||
|
||||
// ExtCommunitiesSetPool is for deduplicating a list of ext. BGP communities
|
||||
type ExtCommunitiesSetPool struct {
|
||||
root *Node[unsafe.Pointer, []api.ExtCommunity]
|
||||
sync.Mutex
|
||||
}
|
||||
|
||||
// NewExtCommunitiesSetPool creates a new pool for lists
|
||||
// of BGP communities.
|
||||
func NewExtCommunitiesSetPool() *ExtCommunitiesSetPool {
|
||||
return &ExtCommunitiesSetPool{
|
||||
root: NewNode[unsafe.Pointer, []api.ExtCommunity]([]api.ExtCommunity{}),
|
||||
}
|
||||
}
|
||||
|
||||
func extPrefixToInt(s string) int {
|
||||
v := 0
|
||||
for i, c := range s {
|
||||
v += int(math.Pow(1000.0, float64(i))) * int(c)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
// Acquire a list of ext bgp communities
|
||||
func (p *ExtCommunitiesSetPool) Acquire(
|
||||
communities []api.ExtCommunity,
|
||||
) []api.ExtCommunity {
|
||||
p.Lock()
|
||||
defer p.Unlock()
|
||||
|
||||
// Make identification list
|
||||
ids := make([]unsafe.Pointer, len(communities))
|
||||
for i, comm := range communities {
|
||||
r := extPrefixToInt(comm[0].(string))
|
||||
icomm := []int{r, comm[1].(int), comm[2].(int)}
|
||||
|
||||
// get community identifier
|
||||
ptr := ExtCommunities.Acquire(icomm)
|
||||
ids[i] = reflect.ValueOf(ptr).UnsafePointer()
|
||||
}
|
||||
if len(ids) == 0 {
|
||||
return p.root.value
|
||||
}
|
||||
return p.root.traverse(communities, ids)
|
||||
}
|
147
pkg/pools/communities_test.go
Normal file
147
pkg/pools/communities_test.go
Normal file
@ -0,0 +1,147 @@
|
||||
package pools
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/alice-lg/alice-lg/pkg/api"
|
||||
)
|
||||
|
||||
func TestAcquireCommunity(t *testing.T) {
|
||||
c1 := api.Community{2342, 5, 1}
|
||||
c2 := api.Community{2342, 5, 1}
|
||||
c3 := api.Community{2342, 5}
|
||||
|
||||
p := NewCommunitiesPool()
|
||||
|
||||
pc1 := p.Acquire(c1)
|
||||
pc2 := p.Acquire(c2)
|
||||
pc3 := p.Acquire(c3)
|
||||
|
||||
if fmt.Sprintf("%p", c1) == fmt.Sprintf("%p", c2) {
|
||||
t.Error("expected c1 !== c2")
|
||||
}
|
||||
|
||||
if fmt.Sprintf("%p", pc1) != fmt.Sprintf("%p", pc2) {
|
||||
t.Error("expected pc1 == pc2")
|
||||
}
|
||||
|
||||
fmt.Printf("c1: %p, c2: %p, c3: %p\n", c1, c2, c3)
|
||||
fmt.Printf("pc1: %p, pc2: %p, pc3: %p\n", pc1, pc2, pc3)
|
||||
|
||||
log.Println(c3, pc3)
|
||||
}
|
||||
|
||||
func TestCommunityRead(t *testing.T) {
|
||||
c1 := api.Community{1111, 5, 1}
|
||||
c2 := api.Community{1111, 5, 1}
|
||||
c3 := api.Community{1111, 5}
|
||||
|
||||
p := NewCommunitiesPool()
|
||||
|
||||
pc1 := p.Acquire(c1)
|
||||
pc2 := p.Read(c2)
|
||||
pc3 := p.Read(c3)
|
||||
|
||||
fmt.Printf("pc1: %p, pc2: %p, pc3: %p\n", pc1, pc2, pc3)
|
||||
|
||||
if fmt.Sprintf("%p", pc1) != fmt.Sprintf("%p", pc2) {
|
||||
t.Error("expected pc1 == pc2")
|
||||
}
|
||||
|
||||
if pc3 != nil {
|
||||
t.Error("expected pc3 == nil, got", pc3)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAcquireCommunitiesSets(t *testing.T) {
|
||||
c1 := []api.Community{
|
||||
{2342, 5, 1},
|
||||
{2342, 5, 2},
|
||||
{2342, 51, 1},
|
||||
}
|
||||
c2 := []api.Community{
|
||||
{2342, 5, 1},
|
||||
{2342, 5, 2},
|
||||
{2342, 51, 1},
|
||||
}
|
||||
c3 := []api.Community{
|
||||
{2341, 6, 1},
|
||||
{2341, 6, 2},
|
||||
{2341, 1, 1},
|
||||
}
|
||||
|
||||
p := NewCommunitiesSetPool()
|
||||
|
||||
pc1 := p.Acquire(c1)
|
||||
pc2 := p.Acquire(c2)
|
||||
pc3 := p.Acquire(c3)
|
||||
|
||||
if fmt.Sprintf("%p", c1) == fmt.Sprintf("%p", c2) {
|
||||
t.Error("expected c1 !== c2")
|
||||
}
|
||||
|
||||
if fmt.Sprintf("%p", pc1) != fmt.Sprintf("%p", pc2) {
|
||||
t.Error("expected pc1 == pc2")
|
||||
}
|
||||
|
||||
fmt.Printf("c1: %p, c2: %p, c3: %p\n", c1, c2, c3)
|
||||
fmt.Printf("pc1: %p, pc2: %p, pc3: %p\n", pc1, pc2, pc3)
|
||||
}
|
||||
|
||||
func TestSetCommunityIdentity(t *testing.T) {
|
||||
set := []api.Community{
|
||||
{2341, 6, 1},
|
||||
{2341, 6, 2},
|
||||
{2341, 1, 1},
|
||||
}
|
||||
|
||||
pset := CommunitiesSets.Acquire(set)
|
||||
pval := Communities.Acquire(api.Community{2341, 6, 2})
|
||||
|
||||
fmt.Printf("set: %p, pset[1]: %p, pval: %p\n", set, pset[1], pval)
|
||||
|
||||
p1 := reflect.ValueOf(pset[1]).UnsafePointer()
|
||||
p2 := reflect.ValueOf(pval).UnsafePointer()
|
||||
|
||||
if p1 != p2 {
|
||||
t.Error("expected pset[1] == pval")
|
||||
}
|
||||
}
|
||||
|
||||
func TestAcquireExtCommunitiesSets(t *testing.T) {
|
||||
c1 := []api.ExtCommunity{
|
||||
{"ro", 5, 1},
|
||||
{"ro", 5, 2},
|
||||
{"rt", 51, 1},
|
||||
}
|
||||
c2 := []api.ExtCommunity{
|
||||
{"ro", 5, 1},
|
||||
{"ro", 5, 2},
|
||||
{"rt", 51, 1},
|
||||
}
|
||||
c3 := []api.ExtCommunity{
|
||||
{"ro", 6, 1},
|
||||
{"rt", 6, 2},
|
||||
{"xyz", 1, 1},
|
||||
}
|
||||
|
||||
p := NewExtCommunitiesSetPool()
|
||||
|
||||
pc1 := p.Acquire(c1)
|
||||
pc2 := p.Acquire(c2)
|
||||
pc3 := p.Acquire(c3)
|
||||
|
||||
if fmt.Sprintf("%p", c1) == fmt.Sprintf("%p", c2) {
|
||||
t.Error("expected c1 !== c2")
|
||||
}
|
||||
|
||||
if fmt.Sprintf("%p", pc1) != fmt.Sprintf("%p", pc2) {
|
||||
t.Error("expected pc1 == pc2")
|
||||
}
|
||||
|
||||
fmt.Printf("c1: %p, c2: %p, c3: %p\n", c1, c2, c3)
|
||||
fmt.Printf("pc1: %p, pc2: %p, pc3: %p\n", pc1, pc2, pc3)
|
||||
}
|
74
pkg/pools/lists.go
Normal file
74
pkg/pools/lists.go
Normal file
@ -0,0 +1,74 @@
|
||||
package pools
|
||||
|
||||
import (
|
||||
"sync"
|
||||
)
|
||||
|
||||
// A IntListPool can be used to deduplicate
|
||||
// lists of integers. Like an AS path or BGP communities.
|
||||
//
|
||||
// A Tree datastructure is used.
|
||||
type IntListPool struct {
|
||||
root *Node[int, []int]
|
||||
counter uint64
|
||||
sync.Mutex
|
||||
}
|
||||
|
||||
// NewIntListPool creates a new int list pool
|
||||
func NewIntListPool() *IntListPool {
|
||||
return &IntListPool{
|
||||
root: NewNode[int, []int]([]int{}),
|
||||
}
|
||||
}
|
||||
|
||||
// Acquire int list from pool
|
||||
func (p *IntListPool) Acquire(list []int) []int {
|
||||
p.Lock()
|
||||
defer p.Unlock()
|
||||
|
||||
if len(list) == 0 {
|
||||
return p.root.value // root
|
||||
}
|
||||
return p.root.traverse(list, list)
|
||||
}
|
||||
|
||||
// A StringListPool can be used for deduplicating lists
|
||||
// of strings. (This is a variant of an int list, as string
|
||||
// values are converted to int.
|
||||
type StringListPool struct {
|
||||
root *Node[int, []string]
|
||||
values map[string]int
|
||||
head int
|
||||
sync.Mutex
|
||||
}
|
||||
|
||||
// NewStringListPool creates a new string list.
|
||||
func NewStringListPool() *StringListPool {
|
||||
return &StringListPool{
|
||||
head: 1,
|
||||
values: map[string]int{},
|
||||
root: NewNode[int, []string]([]string{}),
|
||||
}
|
||||
}
|
||||
|
||||
// Acquire the string list pointer from the pool.
|
||||
func (p *StringListPool) Acquire(list []string) []string {
|
||||
if len(list) == 0 {
|
||||
return p.root.value
|
||||
}
|
||||
|
||||
// Make idenfier list
|
||||
id := make([]int, len(list))
|
||||
for i, s := range list {
|
||||
// Resolve string value into int
|
||||
v, ok := p.values[s]
|
||||
if !ok {
|
||||
p.head++
|
||||
p.values[s] = p.head
|
||||
v = p.head
|
||||
}
|
||||
id[i] = v
|
||||
}
|
||||
|
||||
return p.root.traverse(list, id)
|
||||
}
|
43
pkg/pools/lists_test.go
Normal file
43
pkg/pools/lists_test.go
Normal file
@ -0,0 +1,43 @@
|
||||
package pools
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestAcquireIntList(t *testing.T) {
|
||||
a := []int{23, 42, 1337, 65535, 1}
|
||||
b := []int{23, 42, 1337, 65535, 1}
|
||||
c := []int{23, 42, 1338, 65535, 2}
|
||||
|
||||
p := NewIntListPool()
|
||||
|
||||
r1 := p.Acquire(a)
|
||||
p.Acquire(c)
|
||||
r2 := p.Acquire(b)
|
||||
|
||||
log.Println("r1", r1)
|
||||
log.Println("r2", r2)
|
||||
|
||||
if fmt.Sprintf("%p", a) == fmt.Sprintf("%p", b) {
|
||||
t.Error("lists should not be same pointer", fmt.Sprintf("%p %p", a, b))
|
||||
}
|
||||
if fmt.Sprintf("%p", r1) != fmt.Sprintf("%p", r2) {
|
||||
t.Error("lists should be same pointer", fmt.Sprintf("%p %p", r1, r2))
|
||||
}
|
||||
|
||||
t.Log(fmt.Sprintf("Ptr: %p %p => %p %p", a, b, r1, r2))
|
||||
}
|
||||
|
||||
func TestAcquireStringList(t *testing.T) {
|
||||
q := []string{"foo", "bar", "bgp"}
|
||||
w := []string{"foo", "bar", "bgp"}
|
||||
e := []string{"foo", "bpf"}
|
||||
|
||||
p2 := NewStringListPool()
|
||||
x1 := p2.Acquire(q)
|
||||
p2.Acquire(e)
|
||||
x2 := p2.Acquire(w)
|
||||
fmt.Printf("Ptr: %p %p => %p %p \n", q, w, x1, x2)
|
||||
}
|
63
pkg/pools/node.go
Normal file
63
pkg/pools/node.go
Normal file
@ -0,0 +1,63 @@
|
||||
package pools
|
||||
|
||||
// Node is a generic tree node
|
||||
type Node[T comparable, V any] struct {
|
||||
children map[T]*Node[T, V] // map of children
|
||||
value V
|
||||
final bool
|
||||
}
|
||||
|
||||
// NewNode creates a new tree node
|
||||
func NewNode[T comparable, V any](value V) *Node[T, V] {
|
||||
return &Node[T, V]{
|
||||
children: map[T]*Node[T, V]{},
|
||||
value: value,
|
||||
final: false,
|
||||
}
|
||||
}
|
||||
|
||||
// traverse inserts a new node into the three if required
|
||||
// or returns the object if it already exists.
|
||||
func (n *Node[T, V]) traverse(value V, tail []T) V {
|
||||
id := tail[0]
|
||||
tail = tail[1:]
|
||||
|
||||
// Seek for identifier in children
|
||||
child, ok := n.children[id]
|
||||
if !ok {
|
||||
var zero V
|
||||
child = NewNode[T, V](zero)
|
||||
n.children[id] = child
|
||||
}
|
||||
|
||||
// Set obj if required
|
||||
if len(tail) == 0 {
|
||||
if !child.final {
|
||||
child.value = value
|
||||
child.final = true
|
||||
}
|
||||
return child.value
|
||||
}
|
||||
|
||||
return child.traverse(value, tail)
|
||||
}
|
||||
|
||||
// read returns the object if it exists or nil if not.
|
||||
func (n *Node[T, V]) read(tail []T) V {
|
||||
id := tail[0]
|
||||
tail = tail[1:]
|
||||
|
||||
// Seek for identifier in children
|
||||
child, ok := n.children[id]
|
||||
if !ok {
|
||||
var zero V
|
||||
return zero
|
||||
}
|
||||
|
||||
// Set obj if required
|
||||
if len(tail) == 0 {
|
||||
return child.value
|
||||
}
|
||||
|
||||
return child.read(tail)
|
||||
}
|
74
pkg/pools/pools.go
Normal file
74
pkg/pools/pools.go
Normal file
@ -0,0 +1,74 @@
|
||||
// Package pools provides deduplication pools for strings
|
||||
// and lists of ints and strings.
|
||||
package pools
|
||||
|
||||
import "log"
|
||||
|
||||
// Default pools: These pools are defined globally
|
||||
// and are defined per intended usage
|
||||
|
||||
// RouteServers stores route server IDs
|
||||
var RouteServers *StringPool
|
||||
|
||||
// Neighbors stores neighbor IDs
|
||||
var Neighbors *StringPool
|
||||
|
||||
// Networks4 stores network ip v4 addresses
|
||||
var Networks4 *StringPool
|
||||
|
||||
// Networks6 stores network ip v6 addresses
|
||||
var Networks6 *StringPool
|
||||
|
||||
// Interfaces stores interfaces like: eth0, bond0 etc...
|
||||
var Interfaces *StringPool
|
||||
|
||||
// Gateways4 store ip v4 gateway addresses
|
||||
var Gateways4 *StringPool
|
||||
|
||||
// Gateways6 store ip v6 gateway addresses
|
||||
var Gateways6 *StringPool
|
||||
|
||||
// Origins is a store for 'IGP'
|
||||
var Origins *StringPool
|
||||
|
||||
// ASPaths stores lists of ASNs
|
||||
var ASPaths *IntListPool
|
||||
|
||||
// Types stores a list of types (['BGP', 'univ'])
|
||||
var Types *StringListPool
|
||||
|
||||
// Communities stores (large and standard) BGP communities
|
||||
var Communities *CommunitiesPool
|
||||
|
||||
// ExtCommunities stores extended BGP communities
|
||||
var ExtCommunities *CommunitiesPool
|
||||
|
||||
// CommunitiesSets store a list of BGP communities
|
||||
var CommunitiesSets *CommunitiesSetPool
|
||||
|
||||
// ExtCommunitiesSets stores a list of extended communities
|
||||
var ExtCommunitiesSets *ExtCommunitiesSetPool
|
||||
|
||||
// LargeCommunitiesSets store a list of large BGP communities
|
||||
var LargeCommunitiesSets *CommunitiesSetPool
|
||||
|
||||
// Initialize global pools
|
||||
func init() {
|
||||
log.Println("initializing memory pools")
|
||||
|
||||
RouteServers = NewStringPool()
|
||||
Neighbors = NewStringPool()
|
||||
Networks4 = NewStringPool()
|
||||
Networks6 = NewStringPool()
|
||||
Interfaces = NewStringPool()
|
||||
Gateways4 = NewStringPool()
|
||||
Gateways6 = NewStringPool()
|
||||
Origins = NewStringPool()
|
||||
ASPaths = NewIntListPool()
|
||||
Types = NewStringListPool()
|
||||
Communities = NewCommunitiesPool()
|
||||
ExtCommunities = NewCommunitiesPool()
|
||||
CommunitiesSets = NewCommunitiesSetPool()
|
||||
ExtCommunitiesSets = NewExtCommunitiesSetPool()
|
||||
LargeCommunitiesSets = NewCommunitiesSetPool()
|
||||
}
|
67
pkg/pools/string.go
Normal file
67
pkg/pools/string.go
Normal file
@ -0,0 +1,67 @@
|
||||
package pools
|
||||
|
||||
import "sync"
|
||||
|
||||
// StringPool is a pool for strings.
|
||||
// This will most likely be a pool for IP addresses.
|
||||
type StringPool struct {
|
||||
values map[string]*string
|
||||
|
||||
counter map[string]uint
|
||||
top uint
|
||||
|
||||
sync.RWMutex
|
||||
}
|
||||
|
||||
// NewStringPool creates a new string pool
|
||||
func NewStringPool() *StringPool {
|
||||
return &StringPool{
|
||||
values: map[string]*string{},
|
||||
counter: map[string]uint{},
|
||||
}
|
||||
}
|
||||
|
||||
// Acquire a pointer to a string value
|
||||
func (p *StringPool) Acquire(s string) *string {
|
||||
p.Lock()
|
||||
defer p.Unlock()
|
||||
// Deduplicate value
|
||||
ptr, ok := p.values[s]
|
||||
if !ok {
|
||||
p.values[s] = &s
|
||||
ptr = &s
|
||||
}
|
||||
p.counter[s] = p.top
|
||||
return ptr
|
||||
}
|
||||
|
||||
// Get retrieves a pointer to a string, if present.
|
||||
// Otherwise returns nil.
|
||||
func (p *StringPool) Get(s string) *string {
|
||||
p.RLock()
|
||||
defer p.RUnlock()
|
||||
|
||||
// Get value
|
||||
ptr, ok := p.values[s]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return ptr
|
||||
}
|
||||
|
||||
// GarbageCollect releases all values, which have not been seen
|
||||
// again.
|
||||
func (p *StringPool) GarbageCollect() uint {
|
||||
p.Lock()
|
||||
defer p.Unlock()
|
||||
var released uint = 0
|
||||
for k, cnt := range p.counter {
|
||||
if cnt < p.top {
|
||||
delete(p.counter, k)
|
||||
delete(p.values, k)
|
||||
released++
|
||||
}
|
||||
}
|
||||
p.top++ // Next generation
|
||||
return released
|
||||
}
|
52
pkg/pools/string_test.go
Normal file
52
pkg/pools/string_test.go
Normal file
@ -0,0 +1,52 @@
|
||||
package pools
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestAcquireString(t *testing.T) {
|
||||
p := NewStringPool()
|
||||
s1 := p.Acquire("hello")
|
||||
s2 := p.Acquire("hello")
|
||||
s3 := p.Acquire("world")
|
||||
s1 = p.Acquire("hello")
|
||||
|
||||
if s1 != s2 {
|
||||
t.Error("expected s1 == s2")
|
||||
}
|
||||
t.Log(fmt.Sprintf("s1, s2: %x %x", s1, s2))
|
||||
|
||||
if s2 == s3 {
|
||||
t.Error("expected s2 !== s3")
|
||||
}
|
||||
t.Log(fmt.Sprintf("s1, s2: %x %x", s1, s2))
|
||||
}
|
||||
|
||||
func TestGarbageCollectString(t *testing.T) {
|
||||
p := NewStringPool()
|
||||
|
||||
// Gen 1
|
||||
p.Acquire("hello")
|
||||
p.Acquire("world")
|
||||
|
||||
r := p.GarbageCollect()
|
||||
if r > 0 {
|
||||
t.Error("first run should not collect anything.")
|
||||
}
|
||||
|
||||
p.Acquire("hello")
|
||||
p.Acquire("foo")
|
||||
r = p.GarbageCollect()
|
||||
if r != 1 {
|
||||
t.Error("expected 1 released value")
|
||||
}
|
||||
|
||||
for k := range p.values {
|
||||
if k == "world" {
|
||||
t.Error("did not expect to find world here")
|
||||
}
|
||||
}
|
||||
t.Log(p.values)
|
||||
t.Log(p.counter)
|
||||
}
|
@ -20,4 +20,6 @@ type Config struct {
|
||||
AltPipeProtocolPrefix string `ini:"alt_pipe_protocol_prefix"`
|
||||
AltPipeProtocolSuffix string `ini:"alt_pipe_protocol_suffix"`
|
||||
NeighborsRefreshTimeout int `ini:"neighbors_refresh_timeout"`
|
||||
|
||||
StreamParserThrottle int
|
||||
}
|
||||
|
@ -13,6 +13,7 @@ import (
|
||||
|
||||
"github.com/alice-lg/alice-lg/pkg/api"
|
||||
"github.com/alice-lg/alice-lg/pkg/decoders"
|
||||
"github.com/alice-lg/alice-lg/pkg/pools"
|
||||
)
|
||||
|
||||
// Convert server time string to time
|
||||
@ -230,6 +231,8 @@ func parseNeighborsShort(bird ClientResponse, config Config) (api.NeighborsStatu
|
||||
|
||||
// Parse route bgp info
|
||||
func parseRouteBgpInfo(data interface{}) *api.BGPInfo {
|
||||
gwpool := pools.Gateways4 // Let's see
|
||||
|
||||
bgpData, ok := data.(map[string]interface{})
|
||||
if !ok {
|
||||
// Info is missing
|
||||
@ -244,15 +247,20 @@ func parseRouteBgpInfo(data interface{}) *api.BGPInfo {
|
||||
localPref, _ := strconv.Atoi(decoders.String(bgpData["local_pref"], "0"))
|
||||
med, _ := strconv.Atoi(decoders.String(bgpData["med"], "0"))
|
||||
|
||||
// Using pools has a bit of a performance impact. While parsing
|
||||
// ~600000 routes without deduplication, this takes roughly 14 seconds.
|
||||
// With pools this is now 19 seconds.
|
||||
bgp := &api.BGPInfo{
|
||||
Origin: decoders.String(bgpData["origin"], "unknown"),
|
||||
AsPath: asPath,
|
||||
NextHop: decoders.String(bgpData["next_hop"], "unknown"),
|
||||
Origin: pools.Origins.Acquire(
|
||||
decoders.String(bgpData["origin"], "unknown")),
|
||||
AsPath: pools.ASPaths.Acquire(asPath),
|
||||
NextHop: gwpool.Acquire(
|
||||
decoders.String(bgpData["next_hop"], "unknown")),
|
||||
LocalPref: localPref,
|
||||
Med: med,
|
||||
Communities: communities,
|
||||
ExtCommunities: extCommunities,
|
||||
LargeCommunities: largeCommunities,
|
||||
Communities: pools.CommunitiesSets.Acquire(communities),
|
||||
ExtCommunities: pools.ExtCommunitiesSets.Acquire(extCommunities),
|
||||
LargeCommunities: pools.LargeCommunitiesSets.Acquire(largeCommunities),
|
||||
}
|
||||
return bgp
|
||||
}
|
||||
@ -292,10 +300,12 @@ func parseExtBgpCommunities(data interface{}) []api.ExtCommunity {
|
||||
log.Println("Ignoring malformed ext community:", cdata)
|
||||
continue
|
||||
}
|
||||
val1, _ := strconv.Atoi(cdata[1].(string))
|
||||
val2, _ := strconv.Atoi(cdata[2].(string))
|
||||
communities = append(communities, api.ExtCommunity{
|
||||
cdata[0],
|
||||
cdata[1],
|
||||
cdata[2],
|
||||
val1,
|
||||
val2,
|
||||
})
|
||||
}
|
||||
|
||||
@ -308,6 +318,8 @@ func parseRouteData(
|
||||
config Config,
|
||||
keepDetails bool,
|
||||
) *api.Route {
|
||||
gwpool := pools.Gateways4 // Let's see
|
||||
|
||||
age := parseRelativeServerTime(rdata["age"], config)
|
||||
rtype := decoders.StringList(rdata["type"])
|
||||
bgpInfo := parseRouteBgpInfo(rdata["bgp"])
|
||||
@ -323,19 +335,25 @@ func parseRouteData(
|
||||
}
|
||||
|
||||
gateway := decoders.String(rdata["gateway"], "unknown gateway")
|
||||
learntFrom := decoders.String(rdata["learnt_from"], "")
|
||||
if learntFrom == "" {
|
||||
learntFrom = gateway
|
||||
}
|
||||
|
||||
route := &api.Route{
|
||||
ID: decoders.String(rdata["network"], "unknown"),
|
||||
NeighborID: decoders.String(rdata["from_protocol"], "unknown neighbor"),
|
||||
// ID: decoders.String(rdata["network"], "unknown"),
|
||||
|
||||
Network: decoders.String(rdata["network"], "unknown net"),
|
||||
Interface: decoders.String(rdata["interface"], "unknown interface"),
|
||||
NeighborID: pools.Neighbors.Acquire(
|
||||
decoders.String(rdata["from_protocol"], "unknown neighbor")),
|
||||
Network: decoders.String(rdata["network"], "unknown net"),
|
||||
Interface: pools.Interfaces.Acquire(
|
||||
decoders.String(rdata["interface"], "unknown interface")),
|
||||
Metric: decoders.Int(rdata["metric"], -1),
|
||||
Primary: decoders.Bool(rdata["primary"], false),
|
||||
LearntFrom: decoders.String(rdata["learnt_from"], gateway),
|
||||
Gateway: gateway,
|
||||
LearntFrom: gwpool.Acquire(learntFrom),
|
||||
Gateway: gwpool.Acquire(gateway),
|
||||
Age: age,
|
||||
Type: rtype,
|
||||
Type: pools.Types.Acquire(rtype),
|
||||
BGP: bgpInfo,
|
||||
|
||||
Details: &details,
|
||||
|
@ -118,16 +118,16 @@ func (b *GenericBirdwatcher) filterProtocolsPipe(
|
||||
|
||||
func (b *GenericBirdwatcher) filterRoutesByPeerOrLearntFrom(
|
||||
routes api.Routes,
|
||||
peer string,
|
||||
learntFrom string,
|
||||
peerPtr *string,
|
||||
learntFromPtr *string,
|
||||
) api.Routes {
|
||||
resultRoutes := make(api.Routes, 0, len(routes))
|
||||
|
||||
// Choose routes with next_hop == gateway of this neighbor
|
||||
for _, route := range routes {
|
||||
if (route.Gateway == peer) ||
|
||||
(route.Gateway == learntFrom) ||
|
||||
(route.LearntFrom == peer) {
|
||||
if (route.Gateway == peerPtr) ||
|
||||
(route.Gateway == learntFromPtr) ||
|
||||
(route.LearntFrom == peerPtr) {
|
||||
resultRoutes = append(resultRoutes, route)
|
||||
}
|
||||
}
|
||||
@ -147,12 +147,12 @@ func (b *GenericBirdwatcher) filterRoutesByDuplicates(
|
||||
|
||||
routesMap := make(map[string]*api.Route) // for O(1) access
|
||||
for _, route := range routes {
|
||||
routesMap[route.ID] = route
|
||||
routesMap[route.Network] = route
|
||||
}
|
||||
|
||||
// Remove routes from "routes" that are contained within filterRoutes
|
||||
for _, filterRoute := range filterRoutes {
|
||||
delete(routesMap, filterRoute.ID)
|
||||
delete(routesMap, filterRoute.Network)
|
||||
}
|
||||
|
||||
for _, route := range routesMap {
|
||||
@ -259,49 +259,3 @@ func (b *GenericBirdwatcher) NeighborsStatus(ctx context.Context) (
|
||||
}
|
||||
return response, nil // dereference for now
|
||||
}
|
||||
|
||||
// LookupPrefix makes a routes lookup
|
||||
func (b *GenericBirdwatcher) LookupPrefix(
|
||||
ctx context.Context,
|
||||
prefix string,
|
||||
) (*api.RoutesLookupResponse, error) {
|
||||
// Get RS info
|
||||
rs := &api.RouteServer{
|
||||
ID: b.config.ID,
|
||||
Name: b.config.Name,
|
||||
}
|
||||
|
||||
// Query prefix on RS
|
||||
bird, err := b.client.GetJSON(ctx, "/routes/prefix?prefix="+prefix)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Parse API status
|
||||
apiStatus, err := parseAPIStatus(bird, b.config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Parse routes
|
||||
routes, _ := parseRoutes(bird, b.config, true)
|
||||
|
||||
// Add corresponding neighbor and source rs to result
|
||||
results := api.LookupRoutes{}
|
||||
for _, src := range routes {
|
||||
route := &api.LookupRoute{
|
||||
RouteServer: rs,
|
||||
Route: src,
|
||||
}
|
||||
results = append(results, route)
|
||||
}
|
||||
|
||||
// Make result
|
||||
response := &api.RoutesLookupResponse{
|
||||
Response: api.Response{
|
||||
Meta: apiStatus,
|
||||
},
|
||||
Routes: results,
|
||||
}
|
||||
return response, nil
|
||||
}
|
||||
|
@ -10,6 +10,7 @@ import (
|
||||
|
||||
"github.com/alice-lg/alice-lg/pkg/api"
|
||||
"github.com/alice-lg/alice-lg/pkg/decoders"
|
||||
"github.com/alice-lg/alice-lg/pkg/pools"
|
||||
)
|
||||
|
||||
// MultiTableBirdwatcher implements a birdwatcher with
|
||||
@ -365,6 +366,7 @@ func (src *MultiTableBirdwatcher) fetchRequiredRoutes(
|
||||
// Perform route deduplication
|
||||
importedRoutes := api.Routes{}
|
||||
if len(receivedRoutes) > 0 {
|
||||
// TODO: maybe we can utilize the ptr here
|
||||
peer := receivedRoutes[0].Gateway
|
||||
learntFrom := receivedRoutes[0].LearntFrom
|
||||
|
||||
@ -534,18 +536,18 @@ func (src *MultiTableBirdwatcher) NeighborsSummary(
|
||||
// from the birdwatcher backend.
|
||||
func (src *MultiTableBirdwatcher) Routes(
|
||||
ctx context.Context,
|
||||
neighbourID string,
|
||||
neighborID string,
|
||||
) (*api.RoutesResponse, error) {
|
||||
response := &api.RoutesResponse{}
|
||||
// Fetch required routes first (received and filtered)
|
||||
// However: Store in separate cache for faster access
|
||||
required, err := src.fetchRequiredRoutes(ctx, neighbourID)
|
||||
required, err := src.fetchRequiredRoutes(ctx, neighborID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Optional: NoExport
|
||||
_, notExported, err := src.fetchNotExportedRoutes(ctx, neighbourID)
|
||||
_, notExported, err := src.fetchNotExportedRoutes(ctx, neighborID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -682,8 +684,8 @@ func (src *MultiTableBirdwatcher) AllRoutes(
|
||||
// We load the filtered routes asynchronously with workers.
|
||||
type fetchFilteredReq struct {
|
||||
protocolID string
|
||||
peer string
|
||||
learntFrom string
|
||||
peer *string
|
||||
learntFrom *string
|
||||
}
|
||||
reqQ := make(chan fetchFilteredReq, 1000)
|
||||
resQ := make(chan api.Routes, 1000)
|
||||
@ -710,11 +712,16 @@ func (src *MultiTableBirdwatcher) AllRoutes(
|
||||
}()
|
||||
}
|
||||
|
||||
gwpool := pools.Gateways4
|
||||
|
||||
// Fill request queue
|
||||
go func() {
|
||||
for protocolID, protocolsData := range protocolsBgp["protocols"].(map[string]interface{}) {
|
||||
peer := protocolsData.(map[string]interface{})["neighbor_address"].(string)
|
||||
learntFrom := decoders.String(protocolsData.(map[string]interface{})["learnt_from"], peer)
|
||||
peer := gwpool.Acquire(
|
||||
protocolsData.(map[string]interface{})["neighbor_address"].(string))
|
||||
learntFrom := gwpool.Acquire(
|
||||
decoders.String(protocolsData.(map[string]interface{})["learnt_from"], *peer))
|
||||
|
||||
reqQ <- fetchFilteredReq{
|
||||
protocolID: protocolID,
|
||||
peer: peer,
|
||||
|
@ -16,77 +16,58 @@ func (src *SingleTableBirdwatcher) fetchReceivedRoutes(
|
||||
ctx context.Context,
|
||||
neighborID string,
|
||||
) (*api.Meta, api.Routes, error) {
|
||||
// Query birdwatcher
|
||||
bird, err := src.client.GetJSON(ctx, "/routes/protocol/"+neighborID)
|
||||
res, err := src.client.GetEndpoint(ctx, "/routes/protocol/"+neighborID)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
meta, routes, err := parseRoutesResponseStream(res.Body, src.config)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Use api status from first request
|
||||
apiStatus, err := parseAPIStatus(bird, src.config)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Parse the routes
|
||||
received, err := parseRoutes(bird, src.config, true)
|
||||
if err != nil {
|
||||
log.Println("WARNING Could not retrieve received routes:", err)
|
||||
log.Println("Is the 'routes_protocol' module active in birdwatcher?")
|
||||
return apiStatus, nil, err
|
||||
}
|
||||
|
||||
return apiStatus, received, nil
|
||||
return meta, routes, nil
|
||||
}
|
||||
|
||||
func (src *SingleTableBirdwatcher) fetchFilteredRoutes(
|
||||
ctx context.Context,
|
||||
neighborID string,
|
||||
) (*api.Meta, api.Routes, error) {
|
||||
// Query birdwatcher
|
||||
bird, err := src.client.GetJSON(ctx, "/routes/filtered/"+neighborID)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Use api status from first request
|
||||
apiStatus, err := parseAPIStatus(bird, src.config)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Parse the routes
|
||||
filtered, err := parseRoutes(bird, src.config, true)
|
||||
res, err := src.client.GetEndpoint(ctx, "/routes/filtered/"+neighborID)
|
||||
if err != nil {
|
||||
log.Println("WARNING Could not retrieve filtered routes:", err)
|
||||
log.Println("Is the 'routes_filtered' module active in birdwatcher?")
|
||||
return apiStatus, nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
meta, routes, err := parseRoutesResponseStream(res.Body, src.config)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
return apiStatus, filtered, nil
|
||||
return meta, routes, nil
|
||||
}
|
||||
|
||||
func (src *SingleTableBirdwatcher) fetchNotExportedRoutes(
|
||||
ctx context.Context,
|
||||
neighborID string,
|
||||
) (*api.Meta, api.Routes, error) {
|
||||
// Query birdwatcher
|
||||
bird, _ := src.client.GetJSON(ctx, "/routes/noexport/"+neighborID)
|
||||
res, err := src.client.GetEndpoint(ctx, "/routes/noexport/"+neighborID)
|
||||
if err != nil {
|
||||
log.Println("WARNING Could not retrieve routes not exported:", err)
|
||||
log.Println("Is the 'routes_noexport' module active in birdwatcher?")
|
||||
return nil, nil, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
// Use api status from first request
|
||||
apiStatus, err := parseAPIStatus(bird, src.config)
|
||||
meta, routes, err := parseRoutesResponseStream(res.Body, src.config)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Parse the routes
|
||||
notExported, err := parseRoutes(bird, src.config, true)
|
||||
if err != nil {
|
||||
log.Println("WARNING Could not retrieve routes not exported:", err)
|
||||
log.Println("Is the 'routes_noexport' module active in birdwatcher?")
|
||||
}
|
||||
|
||||
return apiStatus, notExported, nil
|
||||
return meta, routes, nil
|
||||
}
|
||||
|
||||
// RoutesRequired is a specialized request to fetch:
|
||||
@ -316,40 +297,38 @@ func (src *SingleTableBirdwatcher) AllRoutes(
|
||||
) (*api.RoutesResponse, error) {
|
||||
// First fetch all routes from the master table
|
||||
mainTable := src.GenericBirdwatcher.config.MainTable
|
||||
birdImported, err := src.client.GetJSON(ctx, "/routes/table/"+mainTable)
|
||||
|
||||
// Routes received
|
||||
res, err := src.client.GetEndpoint(ctx, "/routes/table/"+mainTable)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
meta, birdImported, err := parseRoutesResponseStream(res.Body, src.config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Then fetch all filtered routes from the master table
|
||||
birdFiltered, err := src.client.GetJSON(ctx, "/routes/table/"+mainTable+"/filtered")
|
||||
// Routes filtered
|
||||
res, err = src.client.GetEndpoint(ctx, "/routes/table/"+mainTable+"/filtered")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
// Use api status from second request
|
||||
apiStatus, err := parseAPIStatus(birdFiltered, src.config)
|
||||
_, birdFiltered, err := parseRoutesResponseStream(res.Body, src.config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response := &api.RoutesResponse{
|
||||
Response: api.Response{
|
||||
Meta: apiStatus,
|
||||
Meta: meta,
|
||||
},
|
||||
Imported: birdImported,
|
||||
Filtered: birdFiltered,
|
||||
}
|
||||
|
||||
// Parse the routes
|
||||
imported := parseRoutesData(birdImported["routes"].([]interface{}), src.config, false)
|
||||
// Sort routes for deterministic ordering
|
||||
// sort.Sort(imported)
|
||||
response.Imported = imported
|
||||
|
||||
// Parse the routes
|
||||
filtered := parseRoutesData(birdFiltered["routes"].([]interface{}), src.config, false)
|
||||
// Sort routes for deterministic ordering
|
||||
// sort.Sort(filtered)
|
||||
response.Filtered = filtered
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
@ -3,6 +3,7 @@ package birdwatcher
|
||||
import (
|
||||
"encoding/json"
|
||||
"io"
|
||||
"time"
|
||||
|
||||
"github.com/alice-lg/alice-lg/pkg/api"
|
||||
)
|
||||
@ -15,6 +16,8 @@ func parseRoutesResponseStream(
|
||||
meta := &api.Meta{}
|
||||
routes := api.Routes{}
|
||||
|
||||
throttle := time.Duration(config.StreamParserThrottle) * time.Nanosecond
|
||||
|
||||
for {
|
||||
t, err := dec.Token()
|
||||
if err == io.EOF {
|
||||
@ -65,6 +68,10 @@ func parseRoutesResponseStream(
|
||||
if err := dec.Decode(&rdata); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Wait a bit, so our CPUs do not go up in flames.
|
||||
time.Sleep(throttle)
|
||||
|
||||
route := parseRouteData(rdata, config, false)
|
||||
routes = append(routes, route)
|
||||
}
|
||||
|
@ -11,6 +11,7 @@ import (
|
||||
"github.com/osrg/gobgp/pkg/packet/bgp"
|
||||
|
||||
"github.com/alice-lg/alice-lg/pkg/api"
|
||||
"github.com/alice-lg/alice-lg/pkg/pools"
|
||||
"github.com/alice-lg/alice-lg/pkg/sources/gobgp/apiutil"
|
||||
)
|
||||
|
||||
@ -84,10 +85,11 @@ func (gobgp *GoBGP) parsePathIntoRoute(
|
||||
) (*api.Route, error) {
|
||||
|
||||
route := api.Route{}
|
||||
route.ID = fmt.Sprintf("%s_%s", path.SourceId, prefix)
|
||||
route.NeighborID = PeerHashWithASAndAddress(path.SourceAsn, path.NeighborIp)
|
||||
// route.ID = fmt.Sprintf("%s_%s", path.SourceId, prefix)
|
||||
route.NeighborID = pools.Neighbors.Acquire(
|
||||
PeerHashWithASAndAddress(path.SourceAsn, path.NeighborIp))
|
||||
route.Network = prefix
|
||||
route.Interface = "Unknown"
|
||||
route.Interface = pools.Interfaces.Acquire("unknown")
|
||||
route.Age = time.Since(time.Unix(path.Age.GetSeconds(), int64(path.Age.GetNanos())))
|
||||
route.Primary = path.Best
|
||||
|
||||
@ -106,18 +108,18 @@ func (gobgp *GoBGP) parsePathIntoRoute(
|
||||
case *bgp.PathAttributeMultiExitDisc:
|
||||
route.BGP.Med = int(attr.Value)
|
||||
case *bgp.PathAttributeNextHop:
|
||||
route.Gateway = attr.Value.String()
|
||||
route.BGP.NextHop = attr.Value.String()
|
||||
route.Gateway = pools.Gateways4.Acquire(attr.Value.String())
|
||||
route.BGP.NextHop = pools.Gateways4.Acquire(attr.Value.String())
|
||||
case *bgp.PathAttributeLocalPref:
|
||||
route.BGP.LocalPref = int(attr.Value)
|
||||
case *bgp.PathAttributeOrigin:
|
||||
switch attr.Value {
|
||||
case bgp.BGP_ORIGIN_ATTR_TYPE_IGP:
|
||||
route.BGP.Origin = "IGP"
|
||||
route.BGP.Origin = pools.Origins.Acquire("IGP")
|
||||
case bgp.BGP_ORIGIN_ATTR_TYPE_EGP:
|
||||
route.BGP.Origin = "EGP"
|
||||
route.BGP.Origin = pools.Origins.Acquire("EGP")
|
||||
case bgp.BGP_ORIGIN_ATTR_TYPE_INCOMPLETE:
|
||||
route.BGP.Origin = "Incomplete"
|
||||
route.BGP.Origin = pools.Origins.Acquire("Incomplete")
|
||||
}
|
||||
case *bgp.PathAttributeAsPath:
|
||||
for _, aspth := range attr.Value {
|
||||
@ -155,6 +157,11 @@ func (gobgp *GoBGP) parsePathIntoRoute(
|
||||
}
|
||||
}
|
||||
|
||||
route.BGP.AsPath = pools.ASPaths.Acquire(route.BGP.AsPath)
|
||||
route.BGP.Communities = pools.CommunitiesSets.Acquire(route.BGP.Communities)
|
||||
route.BGP.ExtCommunities = pools.ExtCommunitiesSets.Acquire(route.BGP.ExtCommunities)
|
||||
route.BGP.LargeCommunities = pools.LargeCommunitiesSets.Acquire(route.BGP.LargeCommunities)
|
||||
|
||||
route.Metric = (route.BGP.LocalPref + route.BGP.Med)
|
||||
|
||||
return &route, nil
|
||||
|
@ -10,7 +10,6 @@ import (
|
||||
"github.com/alice-lg/alice-lg/pkg/sources"
|
||||
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"time"
|
||||
@ -319,14 +318,6 @@ func (gobgp *GoBGP) RoutesNotExported(
|
||||
return &routes, nil
|
||||
}
|
||||
|
||||
// LookupPrefix searches for a prefix
|
||||
func (gobgp *GoBGP) LookupPrefix(
|
||||
ctx context.Context,
|
||||
prefix string,
|
||||
) (*api.RoutesLookupResponse, error) {
|
||||
return nil, fmt.Errorf("not implemented: LookupPrefix")
|
||||
}
|
||||
|
||||
// AllRoutes returns a routes dump (filtered, received),
|
||||
// which is used to learn all prefixes to build
|
||||
// up a local store for searching.
|
||||
|
@ -9,6 +9,7 @@ import (
|
||||
|
||||
"github.com/alice-lg/alice-lg/pkg/api"
|
||||
"github.com/alice-lg/alice-lg/pkg/decoders"
|
||||
"github.com/alice-lg/alice-lg/pkg/pools"
|
||||
)
|
||||
|
||||
// Decode the api status response from the openbgpd
|
||||
@ -42,7 +43,7 @@ func decodeNeighbor(n interface{}) (*api.Neighbor, error) {
|
||||
prefixes := decoders.MapGet(stats, "prefixes", map[string]interface{}{})
|
||||
|
||||
neighbor := &api.Neighbor{
|
||||
ID: decoders.MapGetString(nb, "remote_addr", "invalid_id"),
|
||||
// ID: decoders.MapGetString(nb, "remote_addr", "invalid_id"),
|
||||
Address: decoders.MapGetString(nb, "remote_addr", "invalid_address"),
|
||||
ASN: decoders.IntFromString(decoders.MapGetString(nb, "remote_as", ""), -1),
|
||||
State: decodeState(decoders.MapGetString(nb, "state", "unknown")),
|
||||
@ -179,12 +180,12 @@ func decodeRoute(details map[string]interface{}) (*api.Route, error) {
|
||||
|
||||
// Make bgp info
|
||||
bgpInfo := &api.BGPInfo{
|
||||
Origin: origin,
|
||||
AsPath: asPath,
|
||||
NextHop: trueNextHop,
|
||||
Communities: communities,
|
||||
ExtCommunities: extendedCommunities,
|
||||
LargeCommunities: largeCommunities,
|
||||
Origin: pools.Origins.Acquire(origin),
|
||||
AsPath: pools.ASPaths.Acquire(asPath),
|
||||
NextHop: pools.Gateways4.Acquire(trueNextHop),
|
||||
Communities: pools.CommunitiesSets.Acquire(communities),
|
||||
ExtCommunities: pools.ExtCommunitiesSets.Acquire(extendedCommunities),
|
||||
LargeCommunities: pools.LargeCommunitiesSets.Acquire(largeCommunities),
|
||||
LocalPref: localPref,
|
||||
}
|
||||
|
||||
@ -195,13 +196,12 @@ func decodeRoute(details map[string]interface{}) (*api.Route, error) {
|
||||
rawDetails := json.RawMessage(detailsJSON)
|
||||
|
||||
r := &api.Route{
|
||||
ID: prefix,
|
||||
NeighborID: neighborID,
|
||||
NeighborID: pools.Neighbors.Acquire(neighborID),
|
||||
Network: prefix,
|
||||
Gateway: trueNextHop,
|
||||
Gateway: pools.Gateways4.Acquire(trueNextHop),
|
||||
BGP: bgpInfo,
|
||||
Age: lastUpdate,
|
||||
Type: []string{origin},
|
||||
Type: pools.Types.Acquire([]string{origin}),
|
||||
Primary: isPrimary,
|
||||
Details: &rawDetails,
|
||||
}
|
||||
@ -242,11 +242,13 @@ func decodeExtendedCommunities(c interface{}) api.ExtCommunities {
|
||||
for _, com := range details {
|
||||
tokens := strings.SplitN(com, " ", 2)
|
||||
if len(tokens) != 2 {
|
||||
log.Println("can not decode ext. community:", com)
|
||||
continue
|
||||
}
|
||||
nums := decoders.IntListFromStrings(
|
||||
strings.SplitN(tokens[1], ":", 2))
|
||||
if len(nums) != 2 {
|
||||
log.Println("can not decode ext. community:", com)
|
||||
continue
|
||||
}
|
||||
comms = append(comms, []interface{}{tokens[0], nums[0], nums[1]})
|
||||
|
@ -54,7 +54,8 @@ func TestDecodeRoutes(t *testing.T) {
|
||||
|
||||
// Check first route
|
||||
r := routes[0]
|
||||
if r.Network != "23.42.1.0/24" {
|
||||
ip := "23.42.1.0/24"
|
||||
if r.Network != ip {
|
||||
t.Error("unexpected network:", r.Network)
|
||||
}
|
||||
// Community decoding
|
||||
@ -73,6 +74,7 @@ func TestDecodeRoutes(t *testing.T) {
|
||||
if r.BGP.ExtCommunities[1][2] != 11000 {
|
||||
t.Error("unexpected community:", r.BGP.ExtCommunities[0])
|
||||
}
|
||||
t.Log(r.BGP.ExtCommunities)
|
||||
|
||||
if r.BGP.AsPath[0] != 1111 {
|
||||
t.Error("unexpected as_path:", r.BGP.AsPath)
|
||||
@ -90,3 +92,26 @@ func TestDecodeExtendedCommunities(t *testing.T) {
|
||||
t.Fatal("unexpected result:", comms[0])
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeMalformedExtendedCommunities(t *testing.T) {
|
||||
data := []interface{}{
|
||||
"0x8000000000000000",
|
||||
"8000000000000000",
|
||||
"rt 1239", "generic :123", "generic ro-23:123",
|
||||
"generic 123123192399281398193489:asd",
|
||||
"[0] 0x8000000000000000",
|
||||
"[0] 0x800000000:0000000",
|
||||
"foo bar:23:42",
|
||||
"foo 2342:bar",
|
||||
"foo 23:bar:42",
|
||||
"foo",
|
||||
"b 9223372036854775808",
|
||||
922337203685477580,
|
||||
"ro 2::42",
|
||||
"generic rt a:b"}
|
||||
comms := decodeExtendedCommunities(data)
|
||||
t.Log(comms)
|
||||
if len(comms) > 0 {
|
||||
t.Error("expected empty communities")
|
||||
}
|
||||
}
|
||||
|
@ -9,7 +9,7 @@ import (
|
||||
func TestFilterReceivedRoutes(t *testing.T) {
|
||||
routes := api.Routes{
|
||||
&api.Route{
|
||||
ID: "1.2.3.4",
|
||||
Network: "1.2.3.4",
|
||||
BGP: &api.BGPInfo{
|
||||
LargeCommunities: api.Communities{
|
||||
api.Community{9999, 23, 23},
|
||||
@ -18,7 +18,7 @@ func TestFilterReceivedRoutes(t *testing.T) {
|
||||
},
|
||||
},
|
||||
&api.Route{
|
||||
ID: "5.6.6.6",
|
||||
Network: "5.6.6.6",
|
||||
BGP: &api.BGPInfo{
|
||||
LargeCommunities: api.Communities{
|
||||
api.Community{9999, 23, 23},
|
||||
@ -28,7 +28,7 @@ func TestFilterReceivedRoutes(t *testing.T) {
|
||||
},
|
||||
},
|
||||
&api.Route{
|
||||
ID: "5.6.7.8",
|
||||
Network: "5.6.7.8",
|
||||
BGP: &api.BGPInfo{
|
||||
LargeCommunities: api.Communities{
|
||||
api.Community{9999, 23, 23},
|
||||
@ -43,7 +43,7 @@ func TestFilterReceivedRoutes(t *testing.T) {
|
||||
}
|
||||
filtered := filterReceivedRoutes(c, routes)
|
||||
|
||||
if filtered[0].ID != "5.6.7.8" {
|
||||
if filtered[0].Network != "5.6.7.8" {
|
||||
t.Error("unexpected route:", filtered[0])
|
||||
}
|
||||
}
|
||||
@ -51,7 +51,7 @@ func TestFilterReceivedRoutes(t *testing.T) {
|
||||
func TestFilterRejectedRoutes(t *testing.T) {
|
||||
routes := api.Routes{
|
||||
&api.Route{
|
||||
ID: "5.6.7.8",
|
||||
Network: "5.6.7.8",
|
||||
BGP: &api.BGPInfo{
|
||||
LargeCommunities: api.Communities{
|
||||
api.Community{9999, 23, 23},
|
||||
@ -60,7 +60,7 @@ func TestFilterRejectedRoutes(t *testing.T) {
|
||||
},
|
||||
},
|
||||
&api.Route{
|
||||
ID: "1.2.3.4",
|
||||
Network: "1.2.3.4",
|
||||
BGP: &api.BGPInfo{
|
||||
LargeCommunities: api.Communities{
|
||||
api.Community{9999, 23, 23},
|
||||
@ -69,7 +69,7 @@ func TestFilterRejectedRoutes(t *testing.T) {
|
||||
},
|
||||
},
|
||||
&api.Route{
|
||||
ID: "5.6.6.6",
|
||||
Network: "5.6.6.6",
|
||||
BGP: &api.BGPInfo{
|
||||
LargeCommunities: api.Communities{
|
||||
api.Community{9999, 23, 23},
|
||||
@ -89,7 +89,7 @@ func TestFilterRejectedRoutes(t *testing.T) {
|
||||
t.Error("expected two filtered routes")
|
||||
}
|
||||
|
||||
if filtered[0].ID != "1.2.3.4" {
|
||||
if filtered[0].Network != "1.2.3.4" {
|
||||
t.Error("unexpected route:", filtered[0])
|
||||
}
|
||||
}
|
||||
|
3
pkg/sources/openbgpd/testdata/rib.json
vendored
3
pkg/sources/openbgpd/testdata/rib.json
vendored
@ -25,7 +25,8 @@
|
||||
],
|
||||
"extended_communities": [
|
||||
"[0] 11000:0",
|
||||
"rt 65000:11000"
|
||||
"rt 65000:11000",
|
||||
"[0] 0x8000000000000000"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
@ -66,13 +66,21 @@ func (r *RoutesBackend) CountRoutesAt(
|
||||
// list of neighbors identified by ID.
|
||||
func (r *RoutesBackend) FindByNeighbors(
|
||||
ctx context.Context,
|
||||
neighborIDs []string,
|
||||
query []*api.NeighborQuery,
|
||||
filters *api.SearchFilters,
|
||||
) (api.LookupRoutes, error) {
|
||||
result := api.LookupRoutes{}
|
||||
|
||||
r.routes.Range(func(k, rs interface{}) bool {
|
||||
for _, route := range rs.(api.LookupRoutes) {
|
||||
if isMemberOf(neighborIDs, route.NeighborID) {
|
||||
for _, q := range query {
|
||||
if !route.MatchNeighborQuery(q) {
|
||||
continue
|
||||
}
|
||||
if !filters.MatchRoute(route) {
|
||||
continue
|
||||
}
|
||||
|
||||
result = append(result, route)
|
||||
}
|
||||
}
|
||||
@ -86,29 +94,42 @@ func (r *RoutesBackend) FindByNeighbors(
|
||||
func (r *RoutesBackend) FindByPrefix(
|
||||
ctx context.Context,
|
||||
prefix string,
|
||||
filters *api.SearchFilters,
|
||||
limit uint,
|
||||
) (api.LookupRoutes, error) {
|
||||
// We make our compare case insensitive
|
||||
var (
|
||||
count uint
|
||||
limitExceeded bool
|
||||
)
|
||||
|
||||
prefix = strings.ToLower(prefix)
|
||||
result := api.LookupRoutes{}
|
||||
hasPrefix := prefix != ""
|
||||
r.routes.Range(func(k, rs interface{}) bool {
|
||||
if limit > 0 && count >= limit {
|
||||
limitExceeded = true
|
||||
return false
|
||||
}
|
||||
for _, route := range rs.(api.LookupRoutes) {
|
||||
// Naiive string filtering:
|
||||
if strings.HasPrefix(strings.ToLower(route.Network), prefix) {
|
||||
result = append(result, route)
|
||||
if hasPrefix && !strings.HasPrefix(strings.ToLower(route.Network), prefix) {
|
||||
continue
|
||||
}
|
||||
if !filters.MatchRoute(route) {
|
||||
continue
|
||||
}
|
||||
result = append(result, route)
|
||||
count++
|
||||
if limit > 0 && count >= limit {
|
||||
limitExceeded = true
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
if limitExceeded {
|
||||
return nil, api.ErrTooManyRoutes
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// isMemberOf checks if a key is present in
|
||||
// a list of strings.
|
||||
func isMemberOf(list []string, key string) bool {
|
||||
for _, v := range list {
|
||||
if v == key {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
@ -7,9 +7,44 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/alice-lg/alice-lg/pkg/api"
|
||||
"github.com/alice-lg/alice-lg/pkg/pools"
|
||||
"github.com/alice-lg/alice-lg/pkg/store/testdata"
|
||||
)
|
||||
|
||||
func TestFindByNeighbors(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
rs1 := testdata.LoadTestLookupRoutes("rs1", "routeserver1")
|
||||
rs2 := testdata.LoadTestLookupRoutes("rs2", "routeserver2")
|
||||
|
||||
b := NewRoutesBackend()
|
||||
b.SetRoutes(ctx, "rs1", rs1)
|
||||
b.SetRoutes(ctx, "rs2", rs2)
|
||||
|
||||
q := &api.NeighborQuery{
|
||||
NeighborID: pools.Neighbors.Get("ID7254_AS31334"),
|
||||
SourceID: pools.RouteServers.Get("rs1"),
|
||||
}
|
||||
|
||||
routes, err := b.FindByNeighbors(
|
||||
ctx,
|
||||
[]*api.NeighborQuery{q},
|
||||
api.NewSearchFilters())
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if len(routes) != 1 {
|
||||
t.Error("Route lookup returned unexpected length", len(routes))
|
||||
}
|
||||
|
||||
route := routes[0]
|
||||
if *route.NeighborID != "ID7254_AS31334" {
|
||||
t.Error("Route lookup has wrong neighbor ID")
|
||||
}
|
||||
}
|
||||
|
||||
func TestConcurrentRoutesAccess(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
@ -23,11 +58,20 @@ func TestConcurrentRoutesAccess(t *testing.T) {
|
||||
b.SetRoutes(ctx, "rs1", rs1)
|
||||
b.SetRoutes(ctx, "rs2", rs2)
|
||||
|
||||
n1 := &api.NeighborQuery{
|
||||
NeighborID: pools.Neighbors.Get("ID7254_AS31334"),
|
||||
SourceID: pools.RouteServers.Get("rs1"),
|
||||
}
|
||||
n2 := &api.NeighborQuery{
|
||||
NeighborID: pools.Neighbors.Get("ID163_AS31078"),
|
||||
SourceID: pools.RouteServers.Get("rs2"),
|
||||
}
|
||||
|
||||
// Current: ~327 ms, With sync.Map: 80 ms... neat
|
||||
for i := 0; i < 200000; i++ {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
b.FindByNeighbors(ctx, []string{"ID7254_AS31334", "ID163_AS31078"})
|
||||
b.FindByNeighbors(ctx, []*api.NeighborQuery{n1, n2}, api.NewSearchFilters())
|
||||
wg.Done()
|
||||
}()
|
||||
}
|
||||
|
@ -139,7 +139,7 @@ func (b *RoutesBackend) persist(
|
||||
_, err := tx.Exec(
|
||||
ctx,
|
||||
qry,
|
||||
route.Route.ID,
|
||||
route.Route.Network,
|
||||
sourceID,
|
||||
route.Neighbor.ID,
|
||||
route.Route.Network,
|
||||
@ -213,7 +213,8 @@ func (b *RoutesBackend) CountRoutesAt(
|
||||
// list of neighbors identified by ID.
|
||||
func (b *RoutesBackend) FindByNeighbors(
|
||||
ctx context.Context,
|
||||
neighborIDs []string,
|
||||
neighbors []*api.NeighborQuery,
|
||||
filters *api.SearchFilters,
|
||||
) (api.LookupRoutes, error) {
|
||||
tx, err := b.pool.BeginTx(ctx, pgx.TxOptions{
|
||||
IsoLevel: pgx.ReadCommitted,
|
||||
@ -223,23 +224,22 @@ func (b *RoutesBackend) FindByNeighbors(
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
|
||||
vals := make([]interface{}, len(neighborIDs))
|
||||
for i := range neighborIDs {
|
||||
vals[i] = neighborIDs[i]
|
||||
}
|
||||
vars := make([]string, len(neighborIDs))
|
||||
for i := range neighborIDs {
|
||||
vars[i] = fmt.Sprintf("$%d", i+1)
|
||||
}
|
||||
listQry := strings.Join(vars, ",")
|
||||
vals := make([]interface{}, 0, len(neighbors))
|
||||
vars := 0
|
||||
|
||||
qrys := []string{}
|
||||
for _, src := range b.sources {
|
||||
tbl := b.routesTable(src.ID)
|
||||
|
||||
for _, neighborQuery := range neighbors {
|
||||
tbl := b.routesTable(*neighborQuery.SourceID)
|
||||
param := fmt.Sprintf("$%d", vars+1)
|
||||
vals = append(vals, *neighborQuery.NeighborID)
|
||||
|
||||
qry := `
|
||||
SELECT route FROM ` + tbl + `
|
||||
WHERE neighbor_id IN (` + listQry + `)`
|
||||
WHERE neighbor_id = ` + param
|
||||
qrys = append(qrys, qry)
|
||||
|
||||
vars++
|
||||
}
|
||||
|
||||
qry := strings.Join(qrys, " UNION ")
|
||||
@ -249,13 +249,15 @@ func (b *RoutesBackend) FindByNeighbors(
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return fetchRoutes(rows)
|
||||
return fetchRoutes(rows, filters, 0)
|
||||
}
|
||||
|
||||
// FindByPrefix will return the prefixes matching a pattern
|
||||
func (b *RoutesBackend) FindByPrefix(
|
||||
ctx context.Context,
|
||||
prefix string,
|
||||
filters *api.SearchFilters,
|
||||
limit uint,
|
||||
) (api.LookupRoutes, error) {
|
||||
tx, err := b.pool.BeginTx(ctx, pgx.TxOptions{
|
||||
IsoLevel: pgx.ReadCommitted,
|
||||
@ -279,11 +281,16 @@ func (b *RoutesBackend) FindByPrefix(
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return fetchRoutes(rows)
|
||||
return fetchRoutes(rows, filters, limit)
|
||||
}
|
||||
|
||||
// Private fetchRoutes will load the queried result set
|
||||
func fetchRoutes(rows pgx.Rows) (api.LookupRoutes, error) {
|
||||
func fetchRoutes(
|
||||
rows pgx.Rows,
|
||||
filters *api.SearchFilters,
|
||||
limit uint,
|
||||
) (api.LookupRoutes, error) {
|
||||
var count uint
|
||||
cmd := rows.CommandTag()
|
||||
results := make(api.LookupRoutes, 0, cmd.RowsAffected())
|
||||
for rows.Next() {
|
||||
@ -291,7 +298,14 @@ func fetchRoutes(rows pgx.Rows) (api.LookupRoutes, error) {
|
||||
if err := rows.Scan(&route); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !filters.MatchRoute(route) {
|
||||
continue
|
||||
}
|
||||
results = append(results, route)
|
||||
count++
|
||||
if limit > 0 && count >= limit {
|
||||
return nil, api.ErrTooManyRoutes
|
||||
}
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
@ -7,6 +7,7 @@ import (
|
||||
|
||||
"github.com/alice-lg/alice-lg/pkg/api"
|
||||
"github.com/alice-lg/alice-lg/pkg/config"
|
||||
"github.com/alice-lg/alice-lg/pkg/pools"
|
||||
)
|
||||
|
||||
func TestRoutesTable(t *testing.T) {
|
||||
@ -34,19 +35,24 @@ func TestCountRoutesAt(t *testing.T) {
|
||||
ID: "n23",
|
||||
},
|
||||
Route: &api.Route{
|
||||
ID: "r1.2.3.4",
|
||||
Network: "1.2.3.0/24",
|
||||
},
|
||||
}
|
||||
b.initTable(ctx, tx, "rs1")
|
||||
b.persist(ctx, tx, "rs1", r, now)
|
||||
if err := b.persist(ctx, tx, "rs1", r, now); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
r.Route.ID = "r4242"
|
||||
b.persist(ctx, tx, "rs1", r, now)
|
||||
r.Route.Network = "1.2.6.1/24"
|
||||
if err := b.persist(ctx, tx, "rs1", r, now); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
r.Route.ID = "r4243"
|
||||
r.State = "imported"
|
||||
b.persist(ctx, tx, "rs1", r, now)
|
||||
r.Route.Network = "1.2.5.5/24"
|
||||
if err := b.persist(ctx, tx, "rs1", r, now); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if err := tx.Commit(ctx); err != nil {
|
||||
t.Fatal(err)
|
||||
@ -86,22 +92,20 @@ func TestFindByNeighbors(t *testing.T) {
|
||||
ID: "n23",
|
||||
},
|
||||
Route: &api.Route{
|
||||
ID: "r1.2.3.4",
|
||||
Network: "1.2.3.0/24",
|
||||
Network: "1.2.3.0/24",
|
||||
NeighborID: pools.Neighbors.Acquire("n23"),
|
||||
},
|
||||
}
|
||||
b.initTable(ctx, tx, "rs1")
|
||||
b.initTable(ctx, tx, "rs2")
|
||||
b.persist(ctx, tx, "rs1", r, now)
|
||||
|
||||
r.Route.ID = "r4242"
|
||||
r.Network = "1.4.5.0/24"
|
||||
b.persist(ctx, tx, "rs1", r, now)
|
||||
|
||||
r.Route.ID = "r4243"
|
||||
r.Neighbor.ID = "n24"
|
||||
b.persist(ctx, tx, "rs1", r, now)
|
||||
|
||||
r.Route.ID = "r4244"
|
||||
r.Neighbor.ID = "n25"
|
||||
b.persist(ctx, tx, "rs2", r, now)
|
||||
|
||||
@ -109,9 +113,19 @@ func TestFindByNeighbors(t *testing.T) {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
routes, err := b.FindByNeighbors(ctx, []string{
|
||||
"n24", "n25",
|
||||
})
|
||||
nq1 := &api.NeighborQuery{
|
||||
NeighborID: pools.Neighbors.Acquire("n24"),
|
||||
SourceID: pools.RouteServers.Acquire("rs1"),
|
||||
}
|
||||
nq2 := &api.NeighborQuery{
|
||||
NeighborID: pools.Neighbors.Acquire("n25"),
|
||||
SourceID: pools.RouteServers.Acquire("rs2"),
|
||||
}
|
||||
|
||||
routes, err := b.FindByNeighbors(
|
||||
ctx,
|
||||
[]*api.NeighborQuery{nq1, nq2},
|
||||
api.NewSearchFilters())
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -144,7 +158,6 @@ func TestFindByPrefix(t *testing.T) {
|
||||
ID: "n23",
|
||||
},
|
||||
Route: &api.Route{
|
||||
ID: "r1.2.3.4",
|
||||
Network: "1.2.3.0/24",
|
||||
},
|
||||
}
|
||||
@ -153,16 +166,13 @@ func TestFindByPrefix(t *testing.T) {
|
||||
b.initTable(ctx, tx, "rs2")
|
||||
b.persist(ctx, tx, "rs1", r, now)
|
||||
|
||||
r.Route.ID = "r4242"
|
||||
r.Route.Network = "1.2.4.0/24"
|
||||
b.persist(ctx, tx, "rs1", r, now)
|
||||
|
||||
r.Route.ID = "r4243"
|
||||
r.Route.Network = "1.2.5.0/24"
|
||||
r.Neighbor.ID = "n24"
|
||||
b.persist(ctx, tx, "rs2", r, now)
|
||||
|
||||
r.Route.ID = "r4244"
|
||||
r.Route.Network = "5.5.5.0/24"
|
||||
r.Neighbor.ID = "n25"
|
||||
b.persist(ctx, tx, "rs1", r, now)
|
||||
@ -171,7 +181,7 @@ func TestFindByPrefix(t *testing.T) {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
routes, err := b.FindByPrefix(ctx, "1.2.")
|
||||
routes, err := b.FindByPrefix(ctx, "1.2.", api.NewSearchFilters(), 0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -180,6 +190,6 @@ func TestFindByPrefix(t *testing.T) {
|
||||
t.Error("unexpected routes:", routes)
|
||||
}
|
||||
|
||||
routes, _ = b.FindByPrefix(ctx, "5.5.")
|
||||
routes, _ = b.FindByPrefix(ctx, "5.5.", api.NewSearchFilters(), 0)
|
||||
t.Log(routes)
|
||||
}
|
||||
|
@ -247,8 +247,6 @@ func (s *NeighborsStore) lookupNeighborsAt(
|
||||
sourceID string,
|
||||
query string,
|
||||
) (api.Neighbors, error) {
|
||||
|
||||
results := api.Neighbors{}
|
||||
neighbors, err := s.backend.GetNeighborsAt(ctx, sourceID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -262,6 +260,7 @@ func (s *NeighborsStore) lookupNeighborsAt(
|
||||
}
|
||||
}
|
||||
|
||||
results := api.Neighbors{}
|
||||
for _, neighbor := range neighbors {
|
||||
if asn >= 0 && neighbor.ASN == asn { // only executed if valid AS query is detected
|
||||
results = append(results, neighbor)
|
||||
|
@ -9,9 +9,27 @@ import (
|
||||
|
||||
"github.com/alice-lg/alice-lg/pkg/api"
|
||||
"github.com/alice-lg/alice-lg/pkg/config"
|
||||
"github.com/alice-lg/alice-lg/pkg/pools"
|
||||
"github.com/alice-lg/alice-lg/pkg/sources"
|
||||
)
|
||||
|
||||
// newNeighborQuery creates a new NeighborQuery
|
||||
func newNeighborQuery(neighborID string, sourceID string) *api.NeighborQuery {
|
||||
ptrNeighborID := pools.Neighbors.Get(neighborID)
|
||||
if ptrNeighborID == nil {
|
||||
return nil
|
||||
}
|
||||
ptrSourceID := pools.RouteServers.Get(sourceID)
|
||||
if ptrSourceID == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return &api.NeighborQuery{
|
||||
NeighborID: ptrNeighborID,
|
||||
SourceID: ptrSourceID,
|
||||
}
|
||||
}
|
||||
|
||||
// RoutesStoreBackend interface
|
||||
type RoutesStoreBackend interface {
|
||||
// SetRoutes updates the routes in the store after a refresh.
|
||||
@ -33,13 +51,16 @@ type RoutesStoreBackend interface {
|
||||
// announced by the neighbor at a given source
|
||||
FindByNeighbors(
|
||||
ctx context.Context,
|
||||
neighborIDs []string,
|
||||
neighbors []*api.NeighborQuery,
|
||||
filters *api.SearchFilters,
|
||||
) (api.LookupRoutes, error)
|
||||
|
||||
// FindByPrefix
|
||||
FindByPrefix(
|
||||
ctx context.Context,
|
||||
prefix string,
|
||||
filters *api.SearchFilters,
|
||||
limit uint,
|
||||
) (api.LookupRoutes, error)
|
||||
}
|
||||
|
||||
@ -50,6 +71,7 @@ type RoutesStore struct {
|
||||
backend RoutesStoreBackend
|
||||
sources *SourcesStore
|
||||
neighbors *NeighborsStore
|
||||
limit uint
|
||||
}
|
||||
|
||||
// NewRoutesStore makes a new store instance
|
||||
@ -73,6 +95,7 @@ func NewRoutesStore(
|
||||
|
||||
log.Println("Routes refresh interval set to:", refreshInterval)
|
||||
log.Println("Routes refresh parallelism:", refreshParallelism)
|
||||
log.Println("Routes store query limit:", cfg.Server.RoutesStoreQueryLimit)
|
||||
|
||||
// Store refresh information per store
|
||||
sources := NewSourcesStore(cfg, refreshInterval, refreshParallelism)
|
||||
@ -80,6 +103,7 @@ func NewRoutesStore(
|
||||
backend: backend,
|
||||
sources: sources,
|
||||
neighbors: neighbors,
|
||||
limit: cfg.Server.RoutesStoreQueryLimit,
|
||||
}
|
||||
return store
|
||||
}
|
||||
@ -182,8 +206,8 @@ func (s *RoutesStore) updateSource(
|
||||
"accepted and", len(res.Filtered), "filtered routes for:", src.Name)
|
||||
|
||||
// Prepare imported routes for lookup
|
||||
srcRS := &api.RouteServer{
|
||||
ID: src.ID,
|
||||
srcRS := &api.LookupRouteServer{
|
||||
ID: pools.RouteServers.Acquire(src.ID),
|
||||
Name: src.Name,
|
||||
}
|
||||
imported := res.Imported.ToLookupRoutes("imported", srcRS, neighbors)
|
||||
@ -310,8 +334,9 @@ func (s *RoutesStore) CacheTTL(
|
||||
func (s *RoutesStore) LookupPrefix(
|
||||
ctx context.Context,
|
||||
prefix string,
|
||||
filters *api.SearchFilters,
|
||||
) (api.LookupRoutes, error) {
|
||||
return s.backend.FindByPrefix(ctx, prefix)
|
||||
return s.backend.FindByPrefix(ctx, prefix, filters, s.limit)
|
||||
}
|
||||
|
||||
// LookupPrefixForNeighbors returns all routes for
|
||||
@ -319,12 +344,18 @@ func (s *RoutesStore) LookupPrefix(
|
||||
func (s *RoutesStore) LookupPrefixForNeighbors(
|
||||
ctx context.Context,
|
||||
neighbors api.NeighborsLookupResults,
|
||||
filters *api.SearchFilters,
|
||||
) (api.LookupRoutes, error) {
|
||||
neighborIDs := []string{}
|
||||
for _, rs := range neighbors {
|
||||
for _, neighbor := range rs {
|
||||
neighborIDs = append(neighborIDs, neighbor.ID)
|
||||
query := make([]*api.NeighborQuery, 0, len(neighbors))
|
||||
|
||||
for sourceID, sourceNeighbors := range neighbors {
|
||||
for _, neighbor := range sourceNeighbors {
|
||||
q := newNeighborQuery(neighbor.ID, sourceID)
|
||||
if q == nil {
|
||||
continue
|
||||
}
|
||||
query = append(query, q)
|
||||
}
|
||||
}
|
||||
return s.backend.FindByNeighbors(ctx, neighborIDs)
|
||||
return s.backend.FindByNeighbors(ctx, query, filters)
|
||||
}
|
||||
|
@ -8,6 +8,7 @@ import (
|
||||
|
||||
"github.com/alice-lg/alice-lg/pkg/api"
|
||||
"github.com/alice-lg/alice-lg/pkg/config"
|
||||
"github.com/alice-lg/alice-lg/pkg/pools"
|
||||
"github.com/alice-lg/alice-lg/pkg/store/backends/memory"
|
||||
"github.com/alice-lg/alice-lg/pkg/store/testdata"
|
||||
)
|
||||
@ -28,8 +29,8 @@ func importRoutes(
|
||||
ID: "ID7254_AS31334",
|
||||
},
|
||||
}
|
||||
srcRS := &api.RouteServer{
|
||||
ID: src.ID,
|
||||
srcRS := &api.LookupRouteServer{
|
||||
ID: pools.RouteServers.Acquire(src.ID),
|
||||
Name: src.Name,
|
||||
}
|
||||
imported := res.Imported.ToLookupRoutes("imported", srcRS, neighbors)
|
||||
@ -121,7 +122,10 @@ func TestLookupPrefix(t *testing.T) {
|
||||
store := makeTestRoutesStore()
|
||||
query := "193.200."
|
||||
|
||||
results, err := store.LookupPrefix(context.Background(), query)
|
||||
results, err := store.LookupPrefix(
|
||||
context.Background(),
|
||||
query,
|
||||
api.NewSearchFilters())
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -151,11 +155,15 @@ func TestLookupPrefixForNeighbors(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
pools.Neighbors.Acquire("ID163_AS31078")
|
||||
|
||||
store := makeTestRoutesStore()
|
||||
|
||||
// Query
|
||||
results, err := store.LookupPrefixForNeighbors(context.Background(), neighbors)
|
||||
results, err := store.LookupPrefixForNeighbors(
|
||||
context.Background(),
|
||||
neighbors,
|
||||
api.NewSearchFilters())
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
11
pkg/store/testdata/testdata.go
vendored
11
pkg/store/testdata/testdata.go
vendored
@ -6,6 +6,7 @@ import (
|
||||
"log"
|
||||
|
||||
"github.com/alice-lg/alice-lg/pkg/api"
|
||||
"github.com/alice-lg/alice-lg/pkg/pools"
|
||||
)
|
||||
|
||||
//go:embed routes_response.json
|
||||
@ -18,6 +19,12 @@ func RoutesResponse() *api.RoutesResponse {
|
||||
if err != nil {
|
||||
log.Panic("could not unmarshal response test data:", err)
|
||||
}
|
||||
for _, route := range response.Imported {
|
||||
route.NeighborID = pools.Neighbors.Acquire(*route.NeighborID)
|
||||
}
|
||||
for _, route := range response.Filtered {
|
||||
route.NeighborID = pools.Neighbors.Acquire(*route.NeighborID)
|
||||
}
|
||||
return response
|
||||
}
|
||||
|
||||
@ -35,8 +42,8 @@ func LoadTestLookupRoutes(srcID, srcName string) api.LookupRoutes {
|
||||
ID: "ID7254_AS31334",
|
||||
},
|
||||
}
|
||||
rs := &api.RouteServer{
|
||||
ID: srcID,
|
||||
rs := &api.LookupRouteServer{
|
||||
ID: pools.RouteServers.Acquire(srcID),
|
||||
Name: srcName,
|
||||
}
|
||||
imported := res.Imported.ToLookupRoutes("imported", rs, neighbors)
|
||||
|
@ -3,24 +3,24 @@
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@fortawesome/fontawesome-svg-core": "^6.1.1",
|
||||
"@fortawesome/free-regular-svg-icons": "^6.1.1",
|
||||
"@fortawesome/free-solid-svg-icons": "^6.1.1",
|
||||
"@fortawesome/react-fontawesome": "^0.1.18",
|
||||
"@testing-library/jest-dom": "^5.16.4",
|
||||
"@testing-library/react": "^13.3.0",
|
||||
"@testing-library/user-event": "^14.2.1",
|
||||
"axios": "^0.27.2",
|
||||
"big-integer": "^1.6.51",
|
||||
"@fortawesome/fontawesome-svg-core": "6",
|
||||
"@fortawesome/free-regular-svg-icons": "6",
|
||||
"@fortawesome/free-solid-svg-icons": "6",
|
||||
"@fortawesome/react-fontawesome": "^0.2",
|
||||
"@testing-library/jest-dom": "5",
|
||||
"@testing-library/react": "13",
|
||||
"@testing-library/user-event": "14",
|
||||
"axios": "1",
|
||||
"big-integer": "1",
|
||||
"bootstrap": "3",
|
||||
"moment": "^2.29.4",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0",
|
||||
"moment": "2",
|
||||
"react": "18",
|
||||
"react-dom": "18",
|
||||
"react-router-dom": "6",
|
||||
"react-scripts": "5.0.1",
|
||||
"react-spinners": "^0.13.3",
|
||||
"sass": "^1.53.0",
|
||||
"web-vitals": "^2.1.4"
|
||||
"react-scripts": "5",
|
||||
"react-spinners": "^0.13",
|
||||
"sass": "1",
|
||||
"web-vitals": "3"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "react-scripts start",
|
||||
@ -46,5 +46,17 @@
|
||||
"last 1 firefox version",
|
||||
"last 1 safari version"
|
||||
]
|
||||
},
|
||||
"jest": {
|
||||
"transform": {
|
||||
"^.+\\.[t|j]sx?$": "babel-jest"
|
||||
},
|
||||
"transformIgnorePatterns": [
|
||||
"node_modules/(?!axios)/"
|
||||
]
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/plugin-proposal-private-property-in-object": "^7.21.11",
|
||||
"@babel/plugin-transform-private-property-in-object": "^7.23.4"
|
||||
}
|
||||
}
|
||||
|
@ -37,6 +37,47 @@ import SearchGlobalPage
|
||||
import NotFoundPage
|
||||
from 'app/pages/NotFoundPage';
|
||||
|
||||
|
||||
/**
|
||||
* Select the current page to show in the application
|
||||
* using the router.
|
||||
*/
|
||||
const Routing = () => (
|
||||
<Routes>
|
||||
<Route index element={<StartPage />} />
|
||||
|
||||
{/* RouteServers */}
|
||||
<Route
|
||||
path="routeservers/:routeServerId"
|
||||
element={<RouteServerPage />}>
|
||||
|
||||
<Route index element={<NeighborsPage />} />
|
||||
|
||||
{/* Neighbors */}
|
||||
<Route
|
||||
path="neighbors/:neighborId/routes"
|
||||
element={<RoutesPage />} />
|
||||
{/* DEPRECATION NOTICE: The 'protocols' route will be */}
|
||||
{/* removed and is only here for backwards compatibility */}
|
||||
<Route
|
||||
path="protocols/:neighborId/routes"
|
||||
element={<RoutesPage />} />
|
||||
|
||||
</Route>
|
||||
|
||||
{/* Search */}
|
||||
<Route path="search" element={<SearchGlobalPage />} />
|
||||
|
||||
{/* Fallback */}
|
||||
<Route path="*" element={<NotFoundPage />} />
|
||||
</Routes>
|
||||
);
|
||||
|
||||
|
||||
/**
|
||||
* The application main entry point.
|
||||
* Instanciate global providers and the router.
|
||||
*/
|
||||
const Main = () => {
|
||||
return (
|
||||
<ErrorsProvider>
|
||||
@ -45,27 +86,7 @@ const Main = () => {
|
||||
<ContentProvider>
|
||||
<BrowserRouter>
|
||||
<Layout>
|
||||
<Routes>
|
||||
<Route index element={<StartPage />} />
|
||||
|
||||
{/* RouteServers */}
|
||||
<Route path="routeservers/:routeServerId"
|
||||
element={<RouteServerPage />}>
|
||||
|
||||
<Route index
|
||||
element={<NeighborsPage />} />
|
||||
<Route path="protocols/:neighborId/routes"
|
||||
element={<RoutesPage />} />
|
||||
|
||||
</Route>
|
||||
|
||||
{/* Search */}
|
||||
<Route path="search"
|
||||
element={<SearchGlobalPage />} />
|
||||
|
||||
{/* Fallback */}
|
||||
<Route path="*" element={<NotFoundPage />} />
|
||||
</Routes>
|
||||
<Routing />
|
||||
</Layout>
|
||||
</BrowserRouter>
|
||||
</ContentProvider>
|
||||
|
12
ui/src/app/components/asns/AsPath.js
Normal file
12
ui/src/app/components/asns/AsPath.js
Normal file
@ -0,0 +1,12 @@
|
||||
|
||||
import AsnLink
|
||||
from 'app/components/asns/AsnLink';
|
||||
|
||||
/**
|
||||
* Render an AS path as a list of links to ASNs.
|
||||
*/
|
||||
const AsPath = ({ asns }) => asns.map((asn, i) => (
|
||||
[<AsnLink key={i} asn={asn} />, " "]
|
||||
));
|
||||
|
||||
export default AsPath;
|
14
ui/src/app/components/asns/AsnLink.js
Normal file
14
ui/src/app/components/asns/AsnLink.js
Normal file
@ -0,0 +1,14 @@
|
||||
|
||||
/**
|
||||
* Wrap an ASNumber with a link to bgp.tools for more information.
|
||||
*/
|
||||
const AsnLink = ({ asn }) => {
|
||||
// const baseUrl = "https://irrexplorer.nlnog.net/asn/AS";
|
||||
const baseUrl = "https://bgp.tools/as/";
|
||||
const url = `${baseUrl}${asn}`;
|
||||
return (
|
||||
<a href={url} target="_blank" rel="noreferrer">{asn}</a>
|
||||
);
|
||||
}
|
||||
|
||||
export default AsnLink;
|
26
ui/src/app/components/content/Content.test.js
Normal file
26
ui/src/app/components/content/Content.test.js
Normal file
@ -0,0 +1,26 @@
|
||||
import { useEffect } from 'react';
|
||||
|
||||
import { render, screen } from '@testing-library/react';
|
||||
|
||||
import Content from 'app/components/content/Content';
|
||||
import { ContentProvider } from 'app/context/content';
|
||||
import { updateContent } from 'api';
|
||||
|
||||
test("render Content with test context", () => {
|
||||
|
||||
const App = () => {
|
||||
useEffect(() => {
|
||||
updateContent({"cid": "test123"});
|
||||
});
|
||||
|
||||
return (
|
||||
<ContentProvider>
|
||||
<Content id="cid" />
|
||||
</ContentProvider>
|
||||
);
|
||||
};
|
||||
|
||||
render(<App />);
|
||||
expect(screen.queryByText("test123")).not.toBe(null);
|
||||
});
|
||||
|
@ -9,8 +9,11 @@ import { parseServerTime }
|
||||
/**
|
||||
* DateTime formats the provided datetime
|
||||
*/
|
||||
const DateTime = ({value, format="LLLL"}) => {
|
||||
const time = parseServerTime(value);
|
||||
const DateTime = ({value, format="LLLL", utc=false}) => {
|
||||
let time = parseServerTime(value);
|
||||
if (utc) {
|
||||
time = time.utc();
|
||||
}
|
||||
return (<>{time.format(format)}</>);
|
||||
}
|
||||
|
||||
|
12
ui/src/app/components/datetime/DateTime.test.js
Normal file
12
ui/src/app/components/datetime/DateTime.test.js
Normal file
@ -0,0 +1,12 @@
|
||||
import {render, screen} from '@testing-library/react';
|
||||
|
||||
import DateTime from 'app/components/datetime/DateTime';
|
||||
|
||||
|
||||
test("render a parsed server time as date time", () => {
|
||||
const t = "2022-05-06T23:42:11.123Z";
|
||||
render(<p data-testid="result"><DateTime value={t} utc={true}/></p>);
|
||||
|
||||
const result = screen.getByTestId("result");
|
||||
expect(result.innerHTML).toBe("Friday, May 6, 2022 11:42 PM");
|
||||
});
|
15
ui/src/app/components/datetime/RelativeTime.test.js
Normal file
15
ui/src/app/components/datetime/RelativeTime.test.js
Normal file
@ -0,0 +1,15 @@
|
||||
import {render, screen} from '@testing-library/react';
|
||||
import moment from 'moment';
|
||||
|
||||
import RelativeTime from 'app/components/datetime/RelativeTime';
|
||||
|
||||
|
||||
test("render a relative time", () => {
|
||||
const t = moment().subtract(11, 'hours');
|
||||
render(<p data-testid="result"><RelativeTime value={t} /></p>);
|
||||
|
||||
const result = screen.getByTestId("result");
|
||||
expect(result.innerHTML).toBe("11 hours ago");
|
||||
});
|
||||
|
||||
|
@ -3,9 +3,13 @@ import moment from 'moment'
|
||||
/**
|
||||
* Render a relative timestamp
|
||||
*/
|
||||
const RelativeTimestamp = ({value, suffix}) => {
|
||||
const RelativeTimestamp = ({value, suffix, now}) => {
|
||||
if (!now) {
|
||||
now = moment().utc();
|
||||
} else {
|
||||
now = moment(now);
|
||||
}
|
||||
const tsMs = value / 1000.0 / 1000.0; // nano -> micro -> milli
|
||||
const now = moment.utc()
|
||||
const rel = now.subtract(tsMs, 'ms');
|
||||
return (
|
||||
<>{rel.fromNow(suffix)}</>
|
||||
|
14
ui/src/app/components/datetime/RelativeTimestamp.test.js
Normal file
14
ui/src/app/components/datetime/RelativeTimestamp.test.js
Normal file
@ -0,0 +1,14 @@
|
||||
import {render, screen} from '@testing-library/react';
|
||||
|
||||
import RelativeTimestamp from 'app/components/datetime/RelativeTimestamp';
|
||||
|
||||
|
||||
test("render a relative timestamp", () => {
|
||||
const t = 15 * 60 * 1000 * 1000 * 1000; // 15 min
|
||||
render(<p data-testid="result"><RelativeTimestamp value={t} /></p>);
|
||||
|
||||
const result = screen.getByTestId("result");
|
||||
expect(result.innerHTML).toBe("15 minutes ago");
|
||||
});
|
||||
|
||||
|
24
ui/src/app/components/datetime/RelativeTimestampFormat.js
Normal file
24
ui/src/app/components/datetime/RelativeTimestampFormat.js
Normal file
@ -0,0 +1,24 @@
|
||||
|
||||
import moment from 'moment'
|
||||
|
||||
/**
|
||||
* Render the formated 'absolute' time when given a
|
||||
* relative timestamp (in nanoseconds).
|
||||
*
|
||||
* The timestamp is the duration from now to the absolute
|
||||
* date time in the past.
|
||||
*/
|
||||
const RelativeTimestampFormat = ({value, format, now}) => {
|
||||
if (!now) {
|
||||
now = moment().utc();
|
||||
} else {
|
||||
now = moment(now);
|
||||
}
|
||||
const tsMs = value / 1000.0 / 1000.0; // nano -> micro -> milli
|
||||
const abs = now.subtract(tsMs, 'ms');
|
||||
return (
|
||||
<>{abs.format(format)}</>
|
||||
);
|
||||
}
|
||||
|
||||
export default RelativeTimestampFormat;
|
@ -0,0 +1,18 @@
|
||||
import {render, screen} from '@testing-library/react';
|
||||
import moment from 'moment';
|
||||
|
||||
import RelativeTimestampFormat from 'app/components/datetime/RelativeTimestampFormat';
|
||||
|
||||
|
||||
test("render a formatted relative timestamp", () => {
|
||||
const now = moment.utc();
|
||||
const time = now.clone().subtract(10, 'hours');
|
||||
const t = (now - time) * 1000 * 1000;
|
||||
render(<p data-testid="result"><RelativeTimestampFormat value={t} /></p>);
|
||||
const result = screen.getByTestId("result");
|
||||
|
||||
const expected = time.format();
|
||||
expect(result.innerHTML).toBe(expected);
|
||||
});
|
||||
|
||||
|
12
ui/src/app/components/datetime/time.test.js
Normal file
12
ui/src/app/components/datetime/time.test.js
Normal file
@ -0,0 +1,12 @@
|
||||
|
||||
import {parseServerTime} from 'app/components/datetime/time';
|
||||
|
||||
test("parse server time", () => {
|
||||
const t = "2023-10-24T23:42:11.3333333333Z";
|
||||
const result = parseServerTime(t).utc();
|
||||
expect(result).not.toBe(null);
|
||||
|
||||
expect(result.month()).toBe(9);
|
||||
expect(result.year()).toBe(2023);
|
||||
expect(result.date()).toBe(24);
|
||||
});
|
@ -73,7 +73,7 @@ const Error = ({error, onDismiss}) => {
|
||||
{rs && <span> of <b>{rs.name}</b></span>}
|
||||
{errorStatus}.
|
||||
</p>
|
||||
<p>If this problem persist, we suggest you
|
||||
<p>If this problem persists, we suggest you
|
||||
try again later.</p>
|
||||
</div>
|
||||
);
|
||||
|
@ -3,11 +3,14 @@ import { useMemo
|
||||
}
|
||||
from 'react';
|
||||
|
||||
import { useQuery }
|
||||
from 'app/context/query';
|
||||
import { useReadableCommunity }
|
||||
from 'app/context/bgp';
|
||||
import { FILTER_GROUP_COMMUNITIES
|
||||
, FILTER_GROUP_EXT_COMMUNITIES
|
||||
, FILTER_GROUP_LARGE_COMMUNITIES
|
||||
, useFilters
|
||||
, useCommunitiesFilters
|
||||
, useExtCommunitiesFilters
|
||||
, useLargeCommunitiesFilters
|
||||
@ -24,6 +27,10 @@ const AppliedCommunity = ({group, filter, onRemove}) => {
|
||||
const removeFilter = useCallback(() => {
|
||||
onRemove([group, filter.value]);
|
||||
}, [filter, group, onRemove]);
|
||||
const [{q}] = useQuery();
|
||||
let query = q || '';
|
||||
const repr = filter.value.join(':');
|
||||
const canRemove = !query.includes(repr);
|
||||
|
||||
return (
|
||||
<tr>
|
||||
@ -31,7 +38,7 @@ const AppliedCommunity = ({group, filter, onRemove}) => {
|
||||
<BgpCommunityLabel community={filter.value} />
|
||||
</td>
|
||||
<td>
|
||||
<ButtonRemoveFilter onClick={removeFilter} />
|
||||
{canRemove &&<ButtonRemoveFilter onClick={removeFilter} />}
|
||||
</td>
|
||||
</tr>
|
||||
);
|
||||
@ -86,6 +93,7 @@ const useUpdateFilters = (filter) => {
|
||||
|
||||
|
||||
const CommunitiesSelect = () => {
|
||||
const { filters } = useFilters();
|
||||
const { apply, remove } = useUpdateFilters();
|
||||
|
||||
const communitiesFilters = useCommunitiesFilters();
|
||||
@ -101,21 +109,16 @@ const CommunitiesSelect = () => {
|
||||
remove[group](value);
|
||||
}, [remove]);
|
||||
|
||||
// Nothing to do if we don't have filters
|
||||
// Nothing to do if we don't have filters or if the community
|
||||
// filter is disable because of a large result set.
|
||||
const filtersNotAvailable = filters.notAvailable;
|
||||
const isDisabled = filtersNotAvailable.includes("communities");
|
||||
|
||||
const hasAvailable =
|
||||
communitiesFilters.filters.available.length > 0 ||
|
||||
extCommunitiesFilters.filters.available.length > 0 ||
|
||||
largeCommunitiesFilters.filters.available.length > 0;
|
||||
|
||||
const hasApplied =
|
||||
communitiesFilters.filters.applied.length > 0 ||
|
||||
extCommunitiesFilters.filters.applied.length > 0 ||
|
||||
largeCommunitiesFilters.filters.applied.length > 0;
|
||||
|
||||
if (!hasApplied && !hasAvailable) {
|
||||
return null; // nothing to do here.
|
||||
}
|
||||
|
||||
const communitiesAvailable =
|
||||
communitiesFilters.filters.available.sort((a, b) => {
|
||||
return (a.value[0] - b.value[0]) * 100000 + (a.value[1] - b.value[1]);
|
||||
@ -171,6 +174,11 @@ const CommunitiesSelect = () => {
|
||||
{appliedCommunities}
|
||||
{appliedExtCommunities}
|
||||
{appliedLargeCommunities}
|
||||
{isDisabled && <div className="text-muted">
|
||||
Due to a large number of results, selecting BGP communities
|
||||
becomes available only after selecting a route server or
|
||||
a neighbor.
|
||||
</div>}
|
||||
{hasAvailable &&
|
||||
<tr>
|
||||
<td className="select-container" colSpan="2">
|
||||
|
@ -1,4 +1,5 @@
|
||||
|
||||
import { useEffect } from 'react';
|
||||
|
||||
export const ModalHeader = ({children, onDismiss}) => {
|
||||
return (
|
||||
@ -37,6 +38,19 @@ export const Modal = ({
|
||||
onDismiss,
|
||||
className="",
|
||||
}) => {
|
||||
// When escape is pressed, the modal is dismissed
|
||||
useEffect(() => {
|
||||
let handler = (e) => {
|
||||
if (e.key === "Escape" || e.key === "Esc") {
|
||||
onDismiss();
|
||||
}
|
||||
};
|
||||
document.addEventListener("keyup", handler);
|
||||
return () => {
|
||||
document.removeEventListener("keyup", handler);
|
||||
};
|
||||
});
|
||||
|
||||
return (
|
||||
<div className={className}>
|
||||
<div className="modal modal-open modal-show fade in" role="dialog">
|
||||
|
@ -5,6 +5,7 @@ import axios
|
||||
import { useEffect
|
||||
, useState
|
||||
, useCallback
|
||||
, useMemo
|
||||
}
|
||||
from 'react';
|
||||
import { Link }
|
||||
@ -12,10 +13,14 @@ import { Link }
|
||||
|
||||
import { useErrorHandler }
|
||||
from 'app/context/errors';
|
||||
import { useRouteServers }
|
||||
import { useRouteServers, useRouteServer }
|
||||
from 'app/context/route-servers';
|
||||
|
||||
|
||||
/**
|
||||
* Show the name of the route server and display
|
||||
* the type and version. In case the route server is not
|
||||
* available, show an error message.
|
||||
*/
|
||||
const Status = ({routeServerId}) => {
|
||||
const [status, setStatus] = useState({
|
||||
backend: "",
|
||||
@ -33,15 +38,17 @@ const Status = ({routeServerId}) => {
|
||||
});
|
||||
}, [routeServerId, handleError]);
|
||||
|
||||
if (error && error.code >= 100 && error.code < 200) {
|
||||
const errorInfo = error?.response?.data;
|
||||
|
||||
if (errorInfo && errorInfo.tag === "CONNECTION_REFUSED") {
|
||||
return (
|
||||
<div className="routeserver-status">
|
||||
<div className="api-error">
|
||||
unreachable
|
||||
route server unreachable
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
} else if (error?.response?.data?.tag === "GENERIC_ERROR") {
|
||||
} else if (errorInfo && errorInfo.tag === "GENERIC_ERROR") {
|
||||
return (
|
||||
<div className="routeserver-status">
|
||||
<div className="api-error">
|
||||
@ -49,11 +56,11 @@ const Status = ({routeServerId}) => {
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
} else if (error) {
|
||||
} else if (errorInfo) {
|
||||
return (
|
||||
<div className="routeserver-status">
|
||||
<div className="api-error">
|
||||
{error.response?.data?.tag}
|
||||
{errorInfo.tag}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
@ -109,7 +116,20 @@ const GroupSelect = ({groups, selected, onSelect}) => {
|
||||
<GroupSelectOption key={group} group={group} onSelect={selectGroup} />
|
||||
);
|
||||
|
||||
const dropdownClass = `dropdown ${expanded && 'open'}`;
|
||||
// Partition options into n coulumns with a maximum
|
||||
// of 10 rows per column.
|
||||
const maxRows = 10;
|
||||
const n = Math.ceil(options.length / maxRows);
|
||||
const columns = [];
|
||||
for (let i = 0; i < n; i++) {
|
||||
columns.push(options.slice(i * maxRows, (i + 1) * maxRows));
|
||||
}
|
||||
|
||||
|
||||
let dropdownClass = "rs-group-dropdown";
|
||||
if (expanded) {
|
||||
dropdownClass += " open";
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="routeservers-groups-select">
|
||||
@ -123,38 +143,69 @@ const GroupSelect = ({groups, selected, onSelect}) => {
|
||||
{selected}
|
||||
<span className="caret"></span>
|
||||
</button>
|
||||
<ul className="dropdown-menu"
|
||||
aria-labelledby="select-routeservers-group">
|
||||
{options}
|
||||
</ul>
|
||||
<div className="dropdown-options">
|
||||
{columns.map((options, i) => (
|
||||
<ul key={i}>
|
||||
{options}
|
||||
</ul>
|
||||
))}
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* useGroupSelect holds the state of the group selector,
|
||||
* it accepts the list of routes and returns the selected group.
|
||||
*/
|
||||
const useRouteServerGroup = () => {
|
||||
const routeServers = useRouteServers();
|
||||
const current = useRouteServer();
|
||||
const [selectedGroup, setSelectedGroup] = useState(null);
|
||||
|
||||
useEffect(() => {
|
||||
let selected = routeServers[0]?.group;
|
||||
if (current) {
|
||||
selected = current.group;
|
||||
}
|
||||
setSelectedGroup(selected);
|
||||
}, [routeServers, current])
|
||||
|
||||
const group = useMemo(() =>
|
||||
routeServers.filter((rs) => rs.group === selectedGroup),
|
||||
[routeServers, selectedGroup]);
|
||||
|
||||
return [group, selectedGroup, setSelectedGroup];
|
||||
}
|
||||
|
||||
/**
|
||||
* useRouteServerGroups gets all groups
|
||||
*/
|
||||
const useRouteServerGroups = () => {
|
||||
const routeServers = useRouteServers();
|
||||
const groups = useMemo(() => {
|
||||
let groups = [];
|
||||
for (const rs of routeServers) {
|
||||
if (groups.indexOf(rs.group) === -1) {
|
||||
groups.push(rs.group);
|
||||
}
|
||||
}
|
||||
return groups;
|
||||
}, [routeServers]);
|
||||
|
||||
return groups;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Routeservers shows a list of routeservers for navigation
|
||||
*/
|
||||
const RouteServers = () => {
|
||||
const routeServers = useRouteServers();
|
||||
const [selectedGroup, setSelectedGroup] = useState(null);
|
||||
|
||||
let groups = [];
|
||||
for (const rs of routeServers) {
|
||||
if (groups.indexOf(rs.group) === -1) {
|
||||
groups.push(rs.group);
|
||||
}
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
setSelectedGroup(routeServers[0]?.group);
|
||||
}, [routeServers])
|
||||
|
||||
if (selectedGroup === null) {
|
||||
return null; // nothing to display yet
|
||||
}
|
||||
|
||||
const groupRs = routeServers.filter((rs) => rs.group === selectedGroup);
|
||||
const groups = useRouteServerGroups();
|
||||
const [routeServers, selectedGroup, setSelectedGroup] = useRouteServerGroup();
|
||||
|
||||
return (
|
||||
<div className="routeservers-list">
|
||||
@ -163,7 +214,7 @@ const RouteServers = () => {
|
||||
selected={selectedGroup}
|
||||
onSelect={setSelectedGroup} />
|
||||
<ul>
|
||||
{groupRs.map((rs) =>
|
||||
{routeServers.map((rs) =>
|
||||
<li key={rs.id}>
|
||||
<Link to={`/routeservers/${rs.id}`}
|
||||
className="routeserver-id">{rs.name}</Link>
|
||||
|
@ -19,7 +19,7 @@ const LocalRelatedPeersTabs = () => {
|
||||
}
|
||||
|
||||
const peerUrl = (n) =>
|
||||
`/routeservers/${routeServerId}/protocols/${n.id}/routes`;
|
||||
`/routeservers/${routeServerId}/neighbors/${n.id}/routes`;
|
||||
|
||||
const relatedPeers = peers.map((p) => (
|
||||
<li key={p.id}
|
||||
|
@ -34,6 +34,8 @@ import { isUpState }
|
||||
from 'app/components/neighbors/state';
|
||||
import RelativeTimestamp
|
||||
from 'app/components/datetime/RelativeTimestamp';
|
||||
import AsnLink
|
||||
from 'app/components/asns/AsnLink';
|
||||
|
||||
/**
|
||||
* Default: Sort by ASN, ascending order.
|
||||
@ -143,7 +145,7 @@ const RoutesLink = ({neighbor, children}) => {
|
||||
if (!isUpState(neighbor.state)) {
|
||||
return <>{children}</>;
|
||||
};
|
||||
const url = `/routeservers/${routeServerId}/protocols/${neighbor.id}/routes`;
|
||||
const url = `/routeservers/${routeServerId}/neighbors/${neighbor.id}/routes`;
|
||||
return (
|
||||
<Link to={url}>{children}</Link>
|
||||
);
|
||||
@ -238,6 +240,12 @@ const ColPlain = ({neighbor, column}) => {
|
||||
);
|
||||
}
|
||||
|
||||
const ColAsn = ({neighbor}) => {
|
||||
return (
|
||||
<td><AsnLink asn={neighbor.asn} /></td>
|
||||
);
|
||||
}
|
||||
|
||||
const ColNotAvailable = () => {
|
||||
return <td>-</td>;
|
||||
}
|
||||
@ -247,7 +255,7 @@ const NeighborColumn = ({neighbor, column}) => {
|
||||
const rs = useRouteServer();
|
||||
const widgets = {
|
||||
// Special cases
|
||||
"asn": ColPlain,
|
||||
"asn": ColAsn,
|
||||
"state": ColPlain,
|
||||
|
||||
"Uptime": ColUptime,
|
||||
|
@ -49,7 +49,7 @@ const PeerLink = ({to, children}) => {
|
||||
|
||||
let peerUrl;
|
||||
if (isUpState(neighbor.state)) {
|
||||
peerUrl = `/routeservers/${rid}/protocols/${pid}/routes`;
|
||||
peerUrl = `/routeservers/${rid}/neighbors/${pid}/routes`;
|
||||
} else {
|
||||
peerUrl = `/routeservers/${rid}#sessions-down`;
|
||||
}
|
||||
|
56
ui/src/app/components/routes/Age.js
Normal file
56
ui/src/app/components/routes/Age.js
Normal file
@ -0,0 +1,56 @@
|
||||
|
||||
import { useMemo }
|
||||
from 'react';
|
||||
|
||||
import { useApiStatus }
|
||||
from 'app/context/api-status';
|
||||
|
||||
|
||||
import RelativeTimestampFormat
|
||||
from 'app/components/datetime/RelativeTimestampFormat';
|
||||
import RelativeTimestamp
|
||||
from 'app/components/datetime/RelativeTimestamp';
|
||||
|
||||
export const RouteAgeDetails = ({route}) => {
|
||||
const api = useApiStatus();
|
||||
|
||||
return useMemo(() =>
|
||||
<>
|
||||
<RelativeTimestampFormat
|
||||
value={route.age}
|
||||
now={api.receivedAt}
|
||||
format="YYYY-MM-DD HH:mm:ss"/> UTC
|
||||
<b> (<RelativeTimestamp
|
||||
value={route.age}
|
||||
now={api.receivedAt}
|
||||
suffix={true} />)
|
||||
</b>
|
||||
</>,
|
||||
[route.age, api.receivedAt]
|
||||
);
|
||||
}
|
||||
|
||||
export const RouteAgeRelative = ({route}) => {
|
||||
const api = useApiStatus();
|
||||
|
||||
return useMemo(() =>
|
||||
<RelativeTimestamp
|
||||
value={route.age}
|
||||
now={api.receivedAt}
|
||||
suffix={true} />,
|
||||
[route.age, api.receivedAt]
|
||||
);
|
||||
}
|
||||
|
||||
export const RouteAgeAbsolute = ({route}) => {
|
||||
const api = useApiStatus();
|
||||
|
||||
return useMemo(() =>
|
||||
<><RelativeTimestampFormat
|
||||
value={route.age}
|
||||
now={api.receivedAt}
|
||||
format="YYYY-MM-DD HH:mm:ss"/> UTC</>
|
||||
, [route.age, api.receivedAt]
|
||||
);
|
||||
}
|
||||
|
@ -6,6 +6,8 @@ import { useRoutesReceived
|
||||
, useRoutesNotExported
|
||||
}
|
||||
from 'app/context/routes';
|
||||
import { isTimeoutError }
|
||||
from 'app/context/errors';
|
||||
|
||||
|
||||
/**
|
||||
@ -51,6 +53,18 @@ const EmptyResults = () => {
|
||||
|
||||
// Maybe this has something to do with a filter
|
||||
if (!hasContent && hasQuery && isRequested) {
|
||||
if (isTimeoutError(received?.error)) {
|
||||
return (
|
||||
<div className="card info-result-empty">
|
||||
<h4 className="text-danger">The query took too long to process.</h4>
|
||||
<p>
|
||||
Unfortunately, it looks like the query matches a lot of routes.<br />
|
||||
Please try to refine your query to be more specific.
|
||||
</p>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="card info-result-empty">
|
||||
<h4>No routes matching your query.</h4>
|
||||
|
21
ui/src/app/components/routes/FlagIcon.js
Normal file
21
ui/src/app/components/routes/FlagIcon.js
Normal file
@ -0,0 +1,21 @@
|
||||
import { FontAwesomeIcon }
|
||||
from '@fortawesome/react-fontawesome';
|
||||
|
||||
/**
|
||||
* Display a route flag icon indicator with a tooltip.
|
||||
*
|
||||
* @param icon - The icon to display.
|
||||
* @param tooltip - The tooltip to display.
|
||||
*/
|
||||
const FlagIcon = ({icon, tooltip}) => {
|
||||
return (
|
||||
<>
|
||||
<i><FontAwesomeIcon icon={icon} /></i>
|
||||
<div>{tooltip}</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export default FlagIcon;
|
||||
|
||||
|
23
ui/src/app/components/routes/FlagIcon.test.js
Normal file
23
ui/src/app/components/routes/FlagIcon.test.js
Normal file
@ -0,0 +1,23 @@
|
||||
import { render, screen }
|
||||
from '@testing-library/react';
|
||||
|
||||
import { faCircle }
|
||||
from '@fortawesome/free-solid-svg-icons';
|
||||
|
||||
import FlagIcon
|
||||
from './FlagIcon';
|
||||
|
||||
/**
|
||||
* Test rendering of the flag icon component.
|
||||
*/
|
||||
test('renders flag icon', () => {
|
||||
render(
|
||||
<div data-testid="icon">
|
||||
<FlagIcon icon={faCircle} tooltip="A flag icon" />
|
||||
</div>
|
||||
);
|
||||
|
||||
// Check that the tooltip is in the document.
|
||||
expect(screen.getByText('A flag icon')).toBeInTheDocument();
|
||||
});
|
||||
|
@ -4,7 +4,6 @@ import { useCallback }
|
||||
|
||||
import { useRouteDetails }
|
||||
from 'app/context/routes';
|
||||
|
||||
import { Modal
|
||||
, ModalHeader
|
||||
, ModalBody
|
||||
@ -13,11 +12,13 @@ import { Modal
|
||||
from 'app/components/modal/Modal';
|
||||
import BgpCommunitiyLabel
|
||||
from 'app/components/routes/BgpCommunityLabel';
|
||||
|
||||
import { RouteAgeDetails }
|
||||
from 'app/components/routes/Age';
|
||||
import AsPath
|
||||
from 'app/components/asns/AsPath';
|
||||
|
||||
const RouteDetailsModal = () => {
|
||||
const [ route, setRoute ] = useRouteDetails();
|
||||
|
||||
const onDismiss = useCallback(() => setRoute(null), [setRoute]);
|
||||
|
||||
const attrs = route?.bgp;
|
||||
@ -49,6 +50,12 @@ const RouteDetailsModal = () => {
|
||||
<ModalBody>
|
||||
<table className="table table-nolines">
|
||||
<tbody>
|
||||
<tr>
|
||||
<th>Age:</th>
|
||||
<td>
|
||||
<RouteAgeDetails route={route} />
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>Origin:</th><td>{attrs.origin}</td>
|
||||
</tr>
|
||||
@ -59,12 +66,13 @@ const RouteDetailsModal = () => {
|
||||
<th>Next Hop:</th><td>{attrs.next_hop}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th>MED</th>
|
||||
<th>MED:</th>
|
||||
<td>{attrs.med}</td>
|
||||
</tr>
|
||||
{attrs.as_path &&
|
||||
<tr>
|
||||
<th>AS Path:</th><td>{attrs.as_path.join(' ')}</td>
|
||||
<th>AS Path:</th>
|
||||
<td><AsPath asns={attrs.as_path} /></td>
|
||||
</tr>}
|
||||
{communities.length > 0 &&
|
||||
<tr>
|
||||
|
@ -24,13 +24,32 @@ import BlackholeIndicator
|
||||
from 'app/components/routes/flags/BlackholeIndicator';
|
||||
import RejectCandidateIndicator
|
||||
from 'app/components/routes/flags/RejectCandidateIndicator';
|
||||
import AsPath
|
||||
from 'app/components/asns/AsPath';
|
||||
import AsnLink
|
||||
from 'app/components/asns/AsnLink';
|
||||
import { RouteAgeRelative }
|
||||
from 'app/components/routes/Age';
|
||||
|
||||
// Helper: Lookup value in route path
|
||||
export const getAttr = (r, path) => {
|
||||
return path.split(".").reduce((acc, elem) => acc[elem], r);
|
||||
}
|
||||
|
||||
// Linking: Create link targes as a function of the route
|
||||
// Link to the route server
|
||||
const linkRouteServer = (route) =>
|
||||
`/routeservers/${route?.routeserver?.id}`;
|
||||
|
||||
// Create a link to the routes page of a neighbor
|
||||
const linkNeighborRoutes = (route) => {
|
||||
const rs = route?.routeserver?.id;
|
||||
const neighbor = route?.neighbor_id;
|
||||
return `/routeservers/${rs}/neighbors/${neighbor}/routes`;
|
||||
}
|
||||
|
||||
// Default column: Show the attribute and bind the
|
||||
// onClick attribute.
|
||||
export const ColDefault = ({onClick, route, column}) => {
|
||||
return (
|
||||
<td>
|
||||
@ -39,6 +58,20 @@ export const ColDefault = ({onClick, route, column}) => {
|
||||
);
|
||||
}
|
||||
|
||||
// ColLink provides a cell with a linkable target.
|
||||
// The attribute `to` is a function of the `route`
|
||||
// attribute, returning the url.
|
||||
export const ColLink = ({to, route, column}) => {
|
||||
const href = to(route);
|
||||
return (
|
||||
<td>
|
||||
<a href={href} target="_blank" rel="noreferrer">
|
||||
{getAttr(route, column)}
|
||||
</a>
|
||||
</td>
|
||||
);
|
||||
}
|
||||
|
||||
// Include filter and noexport reason in this column.
|
||||
export const ColNetwork = ({onClick, route}) => {
|
||||
return (
|
||||
@ -58,18 +91,32 @@ export const ColAsPath = ({route}) => {
|
||||
if(!asns){
|
||||
asns = [];
|
||||
}
|
||||
const baseUrl = "https://irrexplorer.nlnog.net/asn/AS"
|
||||
let asnLinks = asns.map((asn, i) => {
|
||||
return (<a key={`${asn}_${i}`} href={baseUrl + asn} target="_blank" rel="noreferrer">{asn} </a>);
|
||||
});
|
||||
|
||||
return (
|
||||
<td>
|
||||
{asnLinks}
|
||||
<AsPath asns={asns} />
|
||||
</td>
|
||||
);
|
||||
}
|
||||
|
||||
// ASN
|
||||
export const ColAsn = ({route, column, onClick}) => {
|
||||
const asn = getAttr(route, column);
|
||||
return (
|
||||
<td className="col-route-asn">
|
||||
<AsnLink asn={asn} />
|
||||
</td>
|
||||
);
|
||||
}
|
||||
|
||||
// Route Age
|
||||
export const ColAge = ({route, onClick}) => {
|
||||
return (
|
||||
<td className="route-age">
|
||||
<span onClick={onClick}><RouteAgeRelative route={route} /></span>
|
||||
</td>
|
||||
);
|
||||
}
|
||||
|
||||
export const ColFlags = ({route}) => {
|
||||
return (
|
||||
<td className="col-route-flags">
|
||||
@ -83,19 +130,35 @@ export const ColFlags = ({route}) => {
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
export const ColRouteServer = ({route, column}) =>
|
||||
<ColLink to={linkRouteServer} route={route} column={column} />;
|
||||
|
||||
|
||||
export const ColNeighbor = ({route, column}) =>
|
||||
<ColLink to={linkNeighborRoutes} route={route} column={column} />;
|
||||
|
||||
|
||||
const RouteColumn = ({onClick, column, route}) => {
|
||||
const widgets = {
|
||||
const cells = {
|
||||
"network": ColNetwork,
|
||||
"flags": ColFlags,
|
||||
"bgp.as_path": ColAsPath,
|
||||
|
||||
"Flags": ColFlags,
|
||||
"ASPath": ColAsPath,
|
||||
"asn": ColAsn,
|
||||
"neighbor.asn": ColAsn,
|
||||
|
||||
"routeserver.name": ColRouteServer,
|
||||
"neighbor.description": ColNeighbor,
|
||||
|
||||
"age": ColAge,
|
||||
};
|
||||
|
||||
let Widget = widgets[column] || ColDefault;
|
||||
let Cell = cells[column] || ColDefault;
|
||||
return (
|
||||
<Widget
|
||||
<Cell
|
||||
column={column}
|
||||
route={route}
|
||||
onClick={onClick} />
|
||||
|
@ -1,36 +1,66 @@
|
||||
|
||||
import { FontAwesomeIcon }
|
||||
from '@fortawesome/react-fontawesome';
|
||||
import { faCircle }
|
||||
from '@fortawesome/free-solid-svg-icons';
|
||||
|
||||
import { useRouteServer }
|
||||
from 'app/context/route-servers';
|
||||
import { matchCommunityRange
|
||||
, useBlackholeCommunities
|
||||
}
|
||||
from 'app/context/bgp';
|
||||
|
||||
import FlagIcon
|
||||
from 'app/components/routes/FlagIcon';
|
||||
|
||||
/**
|
||||
* BlackholeIndicator
|
||||
* Displays a blackhole indicator if the route is a blackhole.
|
||||
*/
|
||||
const BlackholeIndicator = ({route}) => {
|
||||
const routeServer = useRouteServer(); // blackholes are store per RS
|
||||
const blackholeCommunities = useBlackholeCommunities();
|
||||
|
||||
const blackholes = routeServer?.blackholes || [];
|
||||
const communities = route?.bgp?.communities || [];
|
||||
const nextHop = route?.bgp?.next_hop;
|
||||
const routeStandard = route?.bgp?.communities || [];
|
||||
const routeExtended = route?.bgp?.ext_communities || [];
|
||||
const routeLarge = route?.bgp?.large_communities || [];
|
||||
|
||||
// Check if next hop is a known blackhole
|
||||
let isBlackhole = blackholes.includes(nextHop);
|
||||
|
||||
// Check if BGP community 65535:666 is set
|
||||
for (const c of communities) {
|
||||
if (c[0] === 65535 && c[1] === 666) {
|
||||
isBlackhole = true;
|
||||
break;
|
||||
// Check standard communities
|
||||
for (const c of blackholeCommunities.standard) {
|
||||
for (const r of routeStandard) {
|
||||
if (matchCommunityRange(r, c)) {
|
||||
isBlackhole = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Check large communities
|
||||
for (const c of blackholeCommunities.large) {
|
||||
for (const r of routeLarge) {
|
||||
if (matchCommunityRange(r, c)) {
|
||||
isBlackhole = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Check extended
|
||||
for (const c of blackholeCommunities.extended) {
|
||||
for (const r of routeExtended) {
|
||||
if (matchCommunityRange(r, c)) {
|
||||
isBlackhole = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (isBlackhole) {
|
||||
return(
|
||||
<span className="route-prefix-flag blackhole-route is-blackhole-route">
|
||||
<FontAwesomeIcon icon={faCircle} />
|
||||
<div>Blackhole</div>
|
||||
<FlagIcon icon={faCircle} tooltip="Blackhole" />
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
@ -1,15 +1,20 @@
|
||||
|
||||
import { FontAwesomeIcon }
|
||||
from '@fortawesome/react-fontawesome';
|
||||
import { faStar }
|
||||
from '@fortawesome/free-solid-svg-icons';
|
||||
|
||||
import FlagIcon
|
||||
from 'app/components/routes/FlagIcon';
|
||||
|
||||
/**
|
||||
* Show a primary route indicator icon
|
||||
*
|
||||
* @param route - The route object
|
||||
*/
|
||||
const PrimaryIndicator = ({route}) => {
|
||||
if (route.primary) {
|
||||
return(
|
||||
<span className="route-prefix-flag primary-route is-primary-route">
|
||||
<FontAwesomeIcon icon={faStar} />
|
||||
<div>Best Route</div>
|
||||
<FlagIcon icon={faStar} tooltip="Best Route" />
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
28
ui/src/app/components/routes/flags/PrimaryIndicator.test.js
Normal file
28
ui/src/app/components/routes/flags/PrimaryIndicator.test.js
Normal file
@ -0,0 +1,28 @@
|
||||
|
||||
import { render, screen }
|
||||
from '@testing-library/react';
|
||||
|
||||
import PrimaryIndicator
|
||||
from './PrimaryIndicator';
|
||||
|
||||
/**
|
||||
* Test rendering the primary indicator
|
||||
*/
|
||||
test('renders primary indicator', () => {
|
||||
// Routes for testing: primary and not primary
|
||||
const primaryRoute = {
|
||||
primary: true,
|
||||
};
|
||||
const notPrimaryRoute = {
|
||||
primary: false,
|
||||
};
|
||||
|
||||
// Render the non primary route indicator
|
||||
render(<PrimaryIndicator route={notPrimaryRoute} />);
|
||||
expect(screen.queryByText('Best Route')).not.toBeInTheDocument();
|
||||
|
||||
// Render the primary indicator
|
||||
render(<PrimaryIndicator route={primaryRoute} />);
|
||||
expect(screen.getByText('Best Route')).toBeInTheDocument();
|
||||
});
|
||||
|
@ -1,24 +1,28 @@
|
||||
|
||||
import { FontAwesomeIcon }
|
||||
from '@fortawesome/react-fontawesome';
|
||||
import { faCircleExclamation }
|
||||
from '@fortawesome/free-solid-svg-icons';
|
||||
|
||||
import { useRejectCandidate }
|
||||
from 'app/context/bgp';
|
||||
|
||||
import FlagIcon
|
||||
from 'app/components/routes/FlagIcon';
|
||||
|
||||
/**
|
||||
* RejectCandidateIndicator
|
||||
* Displays a flag if the route is a reject candidate.
|
||||
*
|
||||
* @param route - The route to check
|
||||
*/
|
||||
const RejectCandidateIndicator = ({route}) => {
|
||||
const isRejectCandidate = useRejectCandidate(route);
|
||||
if (!isRejectCandidate) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const cls = `route-prefix-flag reject-candidate-route`;
|
||||
return (
|
||||
<span className={cls}>
|
||||
<FontAwesomeIcon icon={faCircleExclamation} />
|
||||
<div>Reject Candidate</div>
|
||||
<span className="route-prefix-flag reject-candidate-route">
|
||||
<FlagIcon icon={faCircleExclamation} tooltip="Reject Candidate" />
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
@ -0,0 +1,46 @@
|
||||
import { render, screen }
|
||||
from '@testing-library/react';
|
||||
|
||||
import RejectCandidateIndicator
|
||||
from './RejectCandidateIndicator';
|
||||
|
||||
import { ConfigContext }
|
||||
from 'app/context/config';
|
||||
|
||||
|
||||
// Mock config with reject candidate community
|
||||
const config = {
|
||||
reject_candidates: {
|
||||
communities: {
|
||||
1111: {
|
||||
1234: {
|
||||
1: "reject-candidate-2",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* Test the RejectCandidateIndicator component with
|
||||
*/
|
||||
test('renders reject candidate indicator' , () => {
|
||||
const route = {
|
||||
bgp: {
|
||||
large_communities: [
|
||||
[1111, 1234, 1],
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
// Render the component
|
||||
render(
|
||||
<ConfigContext.Provider value={config}>
|
||||
<RejectCandidateIndicator route={route} />
|
||||
</ConfigContext.Provider>
|
||||
);
|
||||
|
||||
// Check that the indicator is rendered
|
||||
expect(screen.getByText('Reject Candidate')).toBeInTheDocument();
|
||||
});
|
||||
|
@ -1,6 +1,4 @@
|
||||
|
||||
import { FontAwesomeIcon }
|
||||
from '@fortawesome/react-fontawesome';
|
||||
import { faCircleCheck
|
||||
, faCircleMinus
|
||||
, faCircleQuestion
|
||||
@ -12,6 +10,9 @@ import { faCircle }
|
||||
import { useConfig }
|
||||
from 'app/context/config';
|
||||
|
||||
import FlagIcon
|
||||
from 'app/components/routes/FlagIcon';
|
||||
|
||||
const RpkiIndicator = ({route}) => {
|
||||
const { rpki } = useConfig();
|
||||
|
||||
@ -19,45 +20,44 @@ const RpkiIndicator = ({route}) => {
|
||||
if (rpki.enabled === false) { return null; }
|
||||
|
||||
// Check for BGP large communities as configured in the alice.conf
|
||||
// FIXME: why are we using strings here?! ['1234', '123', '1'].
|
||||
const rpkiValid = rpki.valid;
|
||||
const rpkiUnknown = rpki.unknown;
|
||||
const rpkiNotChecked = rpki.not_checked;
|
||||
const rpkiInvalid = rpki.invalid;
|
||||
|
||||
const communities = route?.bgp?.large_communities || [];
|
||||
|
||||
const matchCommunity = (com, coms) =>
|
||||
coms.some((match) =>
|
||||
(com[0].toFixed() === match[0] &&
|
||||
com[1].toFixed() === match[1] &&
|
||||
com[2].toFixed() === match[2]));
|
||||
|
||||
for (const com of communities) {
|
||||
// RPKI VALID
|
||||
if (com[0].toFixed() === rpkiValid[0] &&
|
||||
com[1].toFixed() === rpkiValid[1] &&
|
||||
com[2].toFixed() === rpkiValid[2]) {
|
||||
if (matchCommunity(com, rpkiValid)) {
|
||||
return (
|
||||
<span className="route-prefix-flag rpki-route rpki-valid">
|
||||
<FontAwesomeIcon icon={faCircleCheck} />
|
||||
<div>RPKI Valid</div>
|
||||
<FlagIcon icon={faCircleCheck} tooltip="RPKI Valid" />
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
||||
// RPKI UNKNOWN
|
||||
if (com[0].toFixed() === rpkiUnknown[0] &&
|
||||
com[1].toFixed() === rpkiUnknown[1] &&
|
||||
com[2].toFixed() === rpkiUnknown[2]) {
|
||||
if (matchCommunity(com, rpkiUnknown)) {
|
||||
return (
|
||||
<span className="route-prefix-flag rpki-route rpki-unknown">
|
||||
<FontAwesomeIcon icon={faCircleQuestion} />
|
||||
<div>RPKI Unknown</div>
|
||||
<FlagIcon icon={faCircleQuestion} tooltip="RPKI Unknown" />
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
||||
// RPKI NOT CHECKED
|
||||
if (com[0].toFixed() === rpkiNotChecked[0] &&
|
||||
com[1].toFixed() === rpkiNotChecked[1] &&
|
||||
com[2].toFixed() === rpkiNotChecked[2]) {
|
||||
if (matchCommunity(com, rpkiNotChecked)) {
|
||||
return (
|
||||
<span className="route-prefix-flag rpki-route rpki-not-checked">
|
||||
<FontAwesomeIcon icon={faCircle} />
|
||||
<div>RPKI not checked</div>
|
||||
<FlagIcon icon={faCircle} tooltip="RPKI Not Checked" />
|
||||
</span>
|
||||
);
|
||||
}
|
||||
@ -66,20 +66,23 @@ const RpkiIndicator = ({route}) => {
|
||||
// Depending on the configration this can either be a
|
||||
// single flag or a range with a given reason.
|
||||
let rpkiInvalidReason = 0;
|
||||
if (com[0].toFixed() === rpkiInvalid[0] &&
|
||||
com[1].toFixed() === rpkiInvalid[1]) {
|
||||
for (const invalid of rpkiInvalid) {
|
||||
if (com[0].toFixed() === invalid[0] &&
|
||||
com[1].toFixed() === invalid[1]) {
|
||||
|
||||
// This needs to be considered invalid, now try to detect why
|
||||
if (rpkiInvalid.length > 3 && rpkiInvalid[3] === "*") {
|
||||
// Check if token falls within range
|
||||
const start = parseInt(rpkiInvalid[2], 10);
|
||||
if (com[2] >= start) {
|
||||
rpkiInvalidReason = com[2];
|
||||
}
|
||||
} else {
|
||||
if (com[2].toFixed() === rpkiInvalid[2]) {
|
||||
rpkiInvalidReason = 1;
|
||||
// This needs to be considered invalid, now try to detect why
|
||||
if (invalid.length > 3 && invalid[3] === "*") {
|
||||
// Check if token falls within range
|
||||
const start = parseInt(invalid[2], 10);
|
||||
if (com[2] >= start) {
|
||||
rpkiInvalidReason = com[2];
|
||||
}
|
||||
} else {
|
||||
if (com[2].toFixed() === invalid[2]) {
|
||||
rpkiInvalidReason = 1;
|
||||
}
|
||||
}
|
||||
break; // We found a match, stop searching
|
||||
}
|
||||
}
|
||||
|
||||
@ -88,8 +91,7 @@ const RpkiIndicator = ({route}) => {
|
||||
const cls = `route-prefix-flag rpki-route rpki-invalid rpki-invalid-${rpkiInvalidReason}`
|
||||
return (
|
||||
<span className={cls}>
|
||||
<FontAwesomeIcon icon={faCircleMinus} />
|
||||
<div>RPKI Invalid</div>
|
||||
<FlagIcon icon={faCircleMinus} tooltip="RPKI Invalid" />
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
102
ui/src/app/components/routes/flags/RpkiIndicator.test.js
Normal file
102
ui/src/app/components/routes/flags/RpkiIndicator.test.js
Normal file
@ -0,0 +1,102 @@
|
||||
import { render, screen }
|
||||
from '@testing-library/react';
|
||||
|
||||
import { ConfigContext }
|
||||
from 'app/context/config';
|
||||
import RpkiIndicator
|
||||
from './RpkiIndicator';
|
||||
|
||||
// Provide config context with rpki settings
|
||||
const config = {
|
||||
rpki: {
|
||||
enabled: true,
|
||||
valid: [["1234", "1111", "1"]],
|
||||
unknown: [["1234", "1111", "0"]],
|
||||
not_checked: [["1234", "1111", "10"]],
|
||||
invalid: [["1234", "1111", "100"]],
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Test rendering the RpkiIndicator component.
|
||||
*/
|
||||
test('renders RpkiIndicator with a valid route', () => {
|
||||
// Render the RpkiIndicator component for a valid prefix
|
||||
const route = {
|
||||
bgp: {
|
||||
large_communities: [
|
||||
[1234, 1111, 1],
|
||||
],
|
||||
},
|
||||
};
|
||||
render(
|
||||
<ConfigContext.Provider value={config}>
|
||||
<RpkiIndicator route={route} />
|
||||
</ConfigContext.Provider>
|
||||
);
|
||||
expect(screen.getByText('RPKI Valid')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
/**
|
||||
* Test rendering the RpkiIndicator component with
|
||||
* an rpki unknown route.
|
||||
*/
|
||||
test('renders RpkiIndicator with an unknown route', () => {
|
||||
// Render the RpkiIndicator component for an unknown prefix
|
||||
const route = {
|
||||
bgp: {
|
||||
large_communities: [
|
||||
[1234, 1111, 0],
|
||||
],
|
||||
},
|
||||
};
|
||||
render(
|
||||
<ConfigContext.Provider value={config}>
|
||||
<RpkiIndicator route={route} />
|
||||
</ConfigContext.Provider>
|
||||
);
|
||||
expect(screen.getByText('RPKI Unknown')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
/**
|
||||
* Test rendering the RpkiIndicator component with
|
||||
* an rpki not checked route.
|
||||
*/
|
||||
test('renders RpkiIndicator with a not checked route', () => {
|
||||
// Render the RpkiIndicator component for a not checked prefix
|
||||
const route = {
|
||||
bgp: {
|
||||
large_communities: [
|
||||
[1234, 1111, 10],
|
||||
],
|
||||
},
|
||||
};
|
||||
render(
|
||||
<ConfigContext.Provider value={config}>
|
||||
<RpkiIndicator route={route} />
|
||||
</ConfigContext.Provider>
|
||||
);
|
||||
expect(screen.getByText('RPKI Not Checked')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
/**
|
||||
* Test rendering the RpkiIndicator component with an
|
||||
* rpki invalid route.
|
||||
*/
|
||||
test('renders RpkiIndicator with an invalid route', () => {
|
||||
// Render the RpkiIndicator component for an invalid prefix
|
||||
const route = {
|
||||
bgp: {
|
||||
large_communities: [
|
||||
[1234, 1111, 100],
|
||||
],
|
||||
},
|
||||
};
|
||||
render(
|
||||
<ConfigContext.Provider value={config}>
|
||||
<RpkiIndicator route={route} />
|
||||
</ConfigContext.Provider>
|
||||
);
|
||||
expect(screen.getByText('RPKI Invalid')).toBeInTheDocument();
|
||||
});
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user