Compare commits

...

640 Commits

Author SHA1 Message Date
Annika Hannig
6a3947bc17
Merge pull request #168 from nicko170/main
Add IAA and NZIX to README
2024-09-27 14:21:09 +02:00
Nick Pratley
d0b11b1688 Add IAA and NZIX to README 2024-09-25 11:08:54 +10:00
Annika Hannig
4a05471da0
Update .goreleaser.yaml
removed deprecated 'replacements'
2024-02-12 21:00:28 +01:00
Annika Hannig
8ce5bcd22e added release date 2024-02-12 17:53:54 +01:00
Annika Hannig
965313c62b updated changelog 2024-02-12 17:39:55 +01:00
Annika Hannig
4e88ca340c fixed typo 2024-02-08 16:22:27 +01:00
Annika Hannig
369d3e117c set default values for community filter cutoff and query limit 2024-02-08 15:13:38 +01:00
Annika Hannig
a8f6abee84 updated text 2024-02-08 15:07:27 +01:00
Annika Hannig
f427012e6f validate ext. community in search query 2024-01-31 16:26:24 +01:00
Annika Hannig
bb457e1ba9 updated changelog 2024-01-31 16:01:38 +01:00
Annika Hannig
7ab7436c85 updated example config 2024-01-30 11:46:38 +01:00
Annika Hannig
b046db6911
Merge pull request #126 from jof/jof/update-docs
Update README.md with updated links and Explanations summary
2024-01-30 11:03:11 +01:00
Annika Hannig
88f7c47884
Merge pull request #128 from jof/jof/update-example
Update example config
2024-01-30 11:00:57 +01:00
Annika Hannig
d6fed41e8d
Merge pull request #127 from jof/jof/update-spelling
Update spelling
2024-01-30 10:59:04 +01:00
Annika Hannig
65443abea0 updated dev dependencies 2024-01-30 10:56:09 +01:00
Annika Hannig
c8306988e6 removed unused import 2024-01-30 10:55:56 +01:00
Annika Hannig
d800360ea2 updated tests 2024-01-30 10:52:51 +01:00
Jonathan Lassoff
b5dec11f7c Update spelling 2024-01-30 01:43:58 -08:00
Jonathan Lassoff
f2a75fdaba Update example config 2024-01-30 01:43:46 -08:00
Jonathan Lassoff
94d5de9c97 Update README.md with updated links and Explanations summary 2024-01-30 01:43:23 -08:00
Annika Hannig
bac0119f1b
Merge pull request #125 from jof/jof/neighbour_rename
Update neighbor spelling
2024-01-30 10:40:33 +01:00
Jonathan Lassoff
b5299ebcd3 Update neighbor spelling 2024-01-30 01:32:24 -08:00
Annika Hannig
57765c6cfd fixed text inconsistencies 2024-01-26 17:03:40 +01:00
Annika Hannig
bb16957745 fixed non query context 2024-01-26 15:50:03 +01:00
Annika Hannig
4db0f92816 link to bgp tools for asns, updated tables and modal 2024-01-26 15:43:43 +01:00
Annika Hannig
0853cb83de filter from query can not be removed - yet 2024-01-26 14:31:27 +01:00
Annika Hannig
5cc6b7a0b9 removed debug 2024-01-26 14:31:05 +01:00
Annika Hannig
719da070ee global community search and examples 2024-01-26 14:19:43 +01:00
Annika Hannig
3951704b92 provide community search backend 2024-01-26 14:19:18 +01:00
Annika Hannig
dff2773826 disable legacy endpoint 2024-01-26 14:19:02 +01:00
Annika Hannig
f72c4a3518 allow passing bgp community filters in query text 2024-01-18 13:21:38 +01:00
Annika Hannig
bd78a05f37 parse filters text 2024-01-18 11:23:59 +01:00
Annika Hannig
498ce65af5 Merge branch 'develop' into feature/community-query-search 2024-01-18 10:19:37 +01:00
Annika Hannig
98847ba0cb use pointers as identifier. 2024-01-16 17:07:55 +01:00
Annika Hannig
1b600447a8 experimental query 2024-01-16 16:51:39 +01:00
Annika Hannig
b21c47c8de move filtering to store 2024-01-16 16:33:22 +01:00
Annika Hannig
852d2d7a6d fixed linting error 2024-01-16 14:27:09 +01:00
Annika Hannig
aa3fe6dd2c use internal id 2024-01-16 13:40:10 +01:00
Annika Hannig
85c5d19d4f pools with generics 2024-01-16 12:22:57 +01:00
Annika Hannig
d6fa635377 use pool for communities 2024-01-15 22:13:51 +01:00
Annika Hannig
07092b51bc fixed wrongfully assumed v6 prefix 2024-01-15 14:00:42 +01:00
Annika Hannig
130db6c0f4 inform user when communities filter is not availble 2024-01-15 11:42:48 +01:00
Annika Hannig
c272856047 updated config example with community filter cutoff 2024-01-15 11:16:34 +01:00
Annika Hannig
6fe7856808 configure community filter cutoff 2024-01-15 11:16:13 +01:00
Annika Hannig
01491ec0b4 for large result sets, do not calculate communities filters 2024-01-15 10:53:20 +01:00
Annika Hannig
f59eb1fe94 fixed route server group selection on page reload 2024-01-12 16:19:39 +01:00
Annika Hannig
585a4a6b49 added build dependency 2024-01-12 11:37:49 +01:00
Annika Hannig
6c5c7ea14f FIX: In some cases the search returned routes for neighbors not queried for. 2024-01-12 11:34:05 +01:00
Annika Hannig
e6c4b42bfc updated memory routes backend 2024-01-12 11:20:25 +01:00
Annika Hannig
5bd48aa9ac removed dead code 2024-01-12 10:47:46 +01:00
Annika Hannig
e6537752d3 tidy mod 2024-01-11 15:05:50 +01:00
Annika Hannig
1c9375d797 updated formatting 2024-01-11 15:03:05 +01:00
Annika Hannig
0d14d22cf6 minimum version bump 2024-01-11 15:00:30 +01:00
Annika Hannig
b79bbcb160 go version update 2024-01-11 14:59:57 +01:00
Annika Hannig
4c6cc454bb updated version 2024-01-11 14:55:47 +01:00
Annika Hannig
bb16436c95 updated go version 2024-01-11 14:55:09 +01:00
Annika Hannig
1d05b4605a updated dependencies 2024-01-11 14:28:24 +01:00
Annika Hannig
871dcd6673 updated go dependencies 2024-01-11 14:26:22 +01:00
Annika Hannig
0432c83899 added route age to modal (Issue #121) 2023-12-13 15:11:52 +01:00
Annika Hannig
254a0c9bf2
fixed linter error 2023-11-28 15:08:47 +01:00
Annika Hannig
eb4cc3159a
fixed rpki enable parsing 2023-11-28 14:47:49 +01:00
Annika Hannig
35240d08c6
fixed config 2023-11-28 14:34:12 +01:00
Annika Hannig
23e5539688
use links 2023-11-28 14:33:52 +01:00
Annika Hannig
cd8fdfa610 Merge branch 'develop' of ssh://github.com/alice-lg/alice-lg into develop 2023-11-24 15:30:07 +01:00
Annika Hannig
7e92e99be3
Merge branch 'develop' of ssh://github.com/alice-lg/alice-lg into develop 2023-11-23 17:24:43 +01:00
Annika Hannig
985e8ad180
updated test config 2023-11-23 17:24:04 +01:00
Annika Hannig
5cac4ee02b
added comment 2023-11-23 17:23:36 +01:00
Annika Hannig
6a4f9935e6
use rpki array 2023-11-23 16:59:29 +01:00
Annika Hannig
c131ca1a0b
use default asn for rpki fallback config 2023-11-23 15:32:31 +01:00
Annika Hannig
fd0700ed6f
updated config tests; updated rpki config 2023-11-23 15:17:51 +01:00
Annika Hannig
ad8a55815c
added check 2023-11-23 14:40:21 +01:00
Annika Hannig
ca3fe5d6a7
updated test 2023-11-23 14:39:19 +01:00
Annika Hannig
4e7330b4f3
fixed test 2023-11-22 16:58:25 +01:00
Annika Hannig
be5568d530
expand patterns 2023-11-22 16:33:17 +01:00
Annika Hannig
084f595a86 added comments 2023-11-20 14:27:00 +01:00
Annika Hannig
108504b9f9 use neighbors instead of protocols in url 2023-11-20 12:28:11 +01:00
Annika Hannig
085640170d updated documentation 2023-11-20 12:15:24 +01:00
Annika Hannig
3c13d05414
Merge pull request #119 from itzwam/patch-1
Update config.go to Support PostgresURL as ENV Var
2023-11-20 12:12:49 +01:00
Annika Hannig
88b31ece52 updated changelog 2023-11-20 12:07:09 +01:00
Annika Hannig
d0bed6e6ce
updated documentation 2023-05-26 12:22:56 +02:00
Annika Hannig
775b423152
added stream parser throttle to config 2023-05-26 11:52:25 +02:00
Annika Hannig
65f5edc83b
use throttle from config 2023-05-26 11:51:52 +02:00
Annika Hannig
ac5b8b8287
set stream parser throttle 2023-05-26 11:50:07 +02:00
Annika Hannig
d0779320e3
Merge branch 'develop' of ssh://github.com/alice-lg/alice-lg into develop 2023-05-26 11:36:16 +02:00
Annika Hannig
8d02af4b78 add parser delay to prevent excessive cpu use 2023-05-26 11:31:46 +02:00
Annika Hannig
86ce9b2681
fixed missing colon 2023-05-26 11:21:15 +02:00
Annika Hannig
7675418a2a
color tuning 2023-05-17 16:02:00 +02:00
Annika Hannig
07b8b7c993
use a multi column dropdown 2023-05-17 15:51:27 +02:00
Annika Hannig
f644edc500
also consider the gateway error a timeout 2023-05-15 15:58:03 +02:00
Annika Hannig
3d588023b0 Merge branch 'develop' of ssh://github.com/alice-lg/alice-lg into develop 2023-05-15 14:57:04 +02:00
Annika Hannig
ec963a67db
fixed missing error return 2023-05-15 14:10:22 +02:00
Annika Hannig
17a9cc67f2
fixed error display 2023-05-15 14:09:52 +02:00
Annika Hannig
4c1909a659 Merge branch 'develop' of ssh://github.com/alice-lg/alice-lg into develop 2023-05-15 12:15:50 +02:00
Annika Hannig
0449e563d8
use streaming parser 2023-05-15 12:14:01 +02:00
toinux
e8509f74d5
WTF is that char 2023-05-14 17:13:04 +01:00
toinux
ddb01405df
Update config.go - libpq handle env correctly, just passtrough with an "null" URLstring if url is none 2023-05-14 17:06:40 +01:00
toinux
ff00fc5677
Update config.go to Support PostgresURL as ENV Var 2023-05-14 16:18:34 +01:00
Annika Hannig
4f88a05f4d updated changelog 2023-05-12 14:39:26 +02:00
Annika Hannig
c32cb948d3 added timeout error 2023-05-12 14:37:28 +02:00
Annika Hannig
9c96ccd888 timeout error handling 2023-05-12 14:36:55 +02:00
Annika Hannig
a3c276a3e0 handle errors 2023-05-12 14:36:42 +02:00
Annika Hannig
e2b6272c71 check if error is a timeout 2023-05-12 14:36:17 +02:00
Annika Hannig
0873b84e68 fixed exception with 'undefined' value. 2023-05-12 13:20:12 +02:00
Annika Hannig
9a0b5b5d7d fixed typo 2023-05-12 12:54:20 +02:00
Annika Hannig
71469cf4c9 ensure compat 2023-04-14 13:10:46 +02:00
Annika Hannig
dc9baafbdb dependency updates 2023-04-14 13:07:45 +02:00
Annika Hannig
555eec64af updated changelog 2023-04-14 13:02:55 +02:00
Annika Hannig
44a67c1f89 export context for testing,
added tests for indicator icons,
use wrapped indicator icon for backward compatibility.
2023-04-14 13:01:22 +02:00
Annika Hannig
56a8fd65cc dependency upgrade 2023-04-13 14:13:15 +02:00
Annika Hannig
dd6bf20f8d changed default to memory 2023-04-13 13:58:01 +02:00
Annika Hannig
758bfa4c5e added more test and test skeleton 2023-01-26 11:40:39 +01:00
Annika Hannig
4455ade49e test relative timestamp 2023-01-26 10:53:11 +01:00
Annika Hannig
e8c2a9c7fd test relative time 2023-01-26 10:47:35 +01:00
Annika Hannig
1ea457d9f1 fixed ext community parsing 2023-01-19 12:46:58 +01:00
Annika Hannig
e9c57c811f update test, set fix timezone 2023-01-19 12:20:03 +01:00
Annika Hannig
9f3b8d6399 test updateContent API 2023-01-19 11:52:34 +01:00
Annika Hannig
da3913a25e fixed ui test 2023-01-19 11:34:29 +01:00
Annika Hannig
4084504c80 updated dependencies 2023-01-19 11:21:12 +01:00
Annika Hannig
cf90b3e903 updated dependencies 2023-01-19 11:07:35 +01:00
Annika Hannig
16c6eabe13 fixed status response 2023-01-18 15:58:13 +01:00
Annika Hannig
d768d3edac fixed tests 2023-01-18 15:42:48 +01:00
Annika Hannig
1535c5f7e9 updated changelog 2023-01-18 15:31:52 +01:00
Annika Hannig
bf95c539ce improved add filter performance 2022-11-25 15:38:38 +01:00
Annika Hannig
cbdc1f5d67 better strtoint 2022-11-25 15:02:22 +01:00
Annika Hannig
97dbe0c4c8 route id is just a duplicate 2022-11-25 14:05:48 +01:00
Annika Hannig
022e8a2fd5 do not use ID 2022-11-25 14:05:30 +01:00
Annika Hannig
6e3a433c82 do not deduplicate networks 2022-11-25 12:28:41 +01:00
Annika Hannig
50f155917a fixed test, deduplicate testdata neighborIDs 2022-11-25 12:28:24 +01:00
Annika Hannig
e30d98f0cf use pointer for neighbor filtering 2022-11-25 11:43:19 +01:00
Annika Hannig
9955f3feeb added string lookup 2022-11-25 11:37:19 +01:00
Annika Hannig
b47769817e Merge branch 'feature/blackhole-communities' into develop 2022-11-24 17:59:18 +01:00
Annika Hannig
9c8093e630 updated parsing 2022-11-24 17:59:12 +01:00
Annika Hannig
d335a22666 use blackhole community ranges 2022-11-24 17:58:33 +01:00
Annika Hannig
ee8faf706d match blackhole community ranges 2022-11-24 17:58:20 +01:00
Annika Hannig
b5b5148dec added blackhole community config parsing 2022-11-24 16:12:13 +01:00
Annika Hannig
27a6b9806d added blackhole communities ranges to config 2022-11-17 15:21:52 +01:00
Annika Hannig
a04d065423 initial blackhole communities config 2022-11-17 14:39:56 +01:00
Annika Hannig
8b4b14b7a9 test parse ext. communities form openbgpd 2022-11-17 14:34:58 +01:00
Annika Hannig
11e1508a16 added full gobgp example 2022-11-17 14:34:26 +01:00
Annika Hannig
a6da9c1b0a added gobgp config 2022-11-17 14:33:52 +01:00
Annika Hannig
c66d1973e4 updated changelog 2022-11-16 17:13:55 +01:00
Annika Hannig
4a87daf97f fixed ext. community test case 2022-11-16 17:07:16 +01:00
Annika Hannig
91562472ec
Merge pull request #108 from alice-lg/dependabot/npm_and_yarn/ui/loader-utils-2.0.4
Bump loader-utils from 2.0.3 to 2.0.4 in /ui
2022-11-16 17:02:50 +01:00
dependabot[bot]
8a67a2d7be
Bump loader-utils from 2.0.3 to 2.0.4 in /ui
Bumps [loader-utils](https://github.com/webpack/loader-utils) from 2.0.3 to 2.0.4.
- [Release notes](https://github.com/webpack/loader-utils/releases)
- [Changelog](https://github.com/webpack/loader-utils/blob/v2.0.4/CHANGELOG.md)
- [Commits](https://github.com/webpack/loader-utils/compare/v2.0.3...v2.0.4)

---
updated-dependencies:
- dependency-name: loader-utils
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-11-16 15:51:08 +00:00
Annika Hannig
ac6cae3aa6
Merge pull request #107 from alice-lg/dependabot/npm_and_yarn/ui/loader-utils-2.0.4
Bump loader-utils from 2.0.3 to 2.0.4 in /ui
2022-11-16 16:31:47 +01:00
Annika Hannig
4db8affe13 fixed parsind of extended community search filters 2022-11-16 16:30:44 +01:00
dependabot[bot]
0328bacd49
Bump loader-utils from 2.0.3 to 2.0.4 in /ui
Bumps [loader-utils](https://github.com/webpack/loader-utils) from 2.0.3 to 2.0.4.
- [Release notes](https://github.com/webpack/loader-utils/releases)
- [Changelog](https://github.com/webpack/loader-utils/blob/v2.0.4/CHANGELOG.md)
- [Commits](https://github.com/webpack/loader-utils/compare/v2.0.3...v2.0.4)

---
updated-dependencies:
- dependency-name: loader-utils
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-11-16 15:28:14 +00:00
Annika Hannig
4db3f39baa dismiss modal when esc is pressed 2022-11-16 14:26:38 +01:00
Annika Hannig
4193bcb107 Merge branch 'develop' into feature/memory-deduplication 2022-11-16 13:57:52 +01:00
Annika Hannig
2b7efe725f deduplicate extended communities 2022-11-16 13:42:51 +01:00
Annika Hannig
afc402f3cc added pool for ext. communities 2022-11-16 13:30:44 +01:00
Annika Hannig
0c589507f0 use ext communities key 2022-11-16 13:04:26 +01:00
Annika Hannig
76c4f1fb6f updated usage of routes with pools 2022-11-16 11:52:59 +01:00
Annika Hannig
3a5c2e235c updated performance evaluation 2022-11-16 11:23:07 +01:00
Annika Hannig
f649350867 use map instead of iterating a list 2022-11-16 11:20:54 +01:00
Annika Hannig
6a45a4d961 use communities pools 2022-11-16 11:12:09 +01:00
Annika Hannig
b10634d666 added communities pool 2022-11-16 10:51:49 +01:00
Annika Hannig
a8c3c11fd2 use pools in routes decoding 2022-11-16 10:25:02 +01:00
Annika Hannig
83c3d857cc added neighbors pool 2022-11-16 10:07:15 +01:00
Annika Hannig
0648f8e095 added default pools for routes 2022-11-16 10:03:33 +01:00
Annika Hannig
9da371065d added string and int lists 2022-11-15 15:36:14 +01:00
Annika Hannig
9622f5745b initial pointer list 2022-11-15 11:07:43 +01:00
Annika Hannig
8c21b29bfd added int list pool 2022-11-15 10:32:14 +01:00
Annika Hannig
59f3141f2a initial string deduplication and integer list 2022-11-14 22:54:33 +01:00
Annika Hannig
f97a4a7bc8 initial string pool 2022-11-14 19:37:37 +01:00
Annika Hannig
64e5b1874b fixed font size 2022-11-11 14:43:58 +01:00
Annika Hannig
8f5ea78429 fixed layout 2022-11-11 14:43:45 +01:00
Annika Hannig
39339b9969 updated readme 2022-11-11 10:36:04 +01:00
Annika Hannig
9064364325 fixed spelling errors 2022-11-10 12:15:52 +01:00
Annika Hannig
5bb0d0aaf2 simplified using gofmt 2022-11-10 12:07:02 +01:00
Annika Hannig
12d66146e8 added clarifications and breaking change 2022-11-10 12:01:26 +01:00
Annika Hannig
0a6155035d
Merge pull request #105 from alice-lg/dependabot/npm_and_yarn/ui/loader-utils-2.0.3
Bump loader-utils from 2.0.2 to 2.0.3 in /ui
2022-11-10 11:51:56 +01:00
Annika Hannig
691c6adb31
Merge pull request #106 from alice-lg/dependabot/npm_and_yarn/ui/terser-5.15.1
Bump terser from 5.14.1 to 5.15.1 in /ui
2022-11-10 11:51:36 +01:00
dependabot[bot]
3ad8f2da78
Bump terser from 5.14.1 to 5.15.1 in /ui
Bumps [terser](https://github.com/terser/terser) from 5.14.1 to 5.15.1.
- [Release notes](https://github.com/terser/terser/releases)
- [Changelog](https://github.com/terser/terser/blob/master/CHANGELOG.md)
- [Commits](https://github.com/terser/terser/compare/v5.14.1...v5.15.1)

---
updated-dependencies:
- dependency-name: terser
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-11-10 10:44:23 +00:00
dependabot[bot]
6e67454feb
Bump loader-utils from 2.0.2 to 2.0.3 in /ui
Bumps [loader-utils](https://github.com/webpack/loader-utils) from 2.0.2 to 2.0.3.
- [Release notes](https://github.com/webpack/loader-utils/releases)
- [Changelog](https://github.com/webpack/loader-utils/blob/v2.0.3/CHANGELOG.md)
- [Commits](https://github.com/webpack/loader-utils/compare/v2.0.2...v2.0.3)

---
updated-dependencies:
- dependency-name: loader-utils
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2022-11-10 10:44:16 +00:00
Annika Hannig
e613494e3c added release workflow 2022-11-10 11:35:23 +01:00
Annika Hannig
ea5bac3927 removed legacy package-lock 2022-11-10 11:31:11 +01:00
Annika Hannig
158dd73854 install dependencies before test 2022-11-10 11:29:17 +01:00
Annika Hannig
2f322305fe fixed test 2022-11-10 11:26:37 +01:00
Annika Hannig
cc9c919bbb fixed linter errors 2022-11-10 11:23:57 +01:00
Annika Hannig
468371737b fixed linter 2022-11-10 11:07:33 +01:00
Annika Hannig
82afece002 fixed test name 2022-11-10 11:06:18 +01:00
Annika Hannig
d9513f4c32 only run on create release 2022-11-10 11:04:52 +01:00
Annika Hannig
8ae09b11f7 do not run on PRs 2022-11-10 11:01:53 +01:00
Annika Hannig
10d07a9f34 added ui build stub 2022-11-10 11:00:37 +01:00
Annika Hannig
8ba9c09b80 applied gofmt 2022-11-10 10:58:15 +01:00
Annika Hannig
ef3acbae17 added node dev dependency, fixed postgres env 2022-11-10 10:51:17 +01:00
Annika Hannig
9339d0d296 added test CI workflow 2022-11-10 10:48:49 +01:00
Annika Hannig
e5aae9eaf8 initial goreleaser build 2022-11-10 10:42:42 +01:00
Annika Hannig
15eb5cc295 updated docker build 2022-11-10 10:42:33 +01:00
Annika Hannig
a412e3d56f ignore ui build files 2022-11-10 10:16:50 +01:00
Annika Hannig
e24e8635e8 fixed tests 2022-11-10 10:05:04 +01:00
Annika Hannig
b763d97956 go mod tidy 2022-11-10 10:04:55 +01:00
Annika Hannig
b996f8ed0f updated default credentials 2022-11-10 10:04:40 +01:00
Annika Hannig
88ba3ac7e1 updated default credentials 2022-11-10 10:04:25 +01:00
Annika Hannig
c484ff2a0b improved table memoization and performance 2022-08-02 10:51:01 +02:00
Annika Hannig
e21d9b238e updated readme 2022-08-02 09:34:43 +02:00
Annika Hannig
4e2e36970f fixed another crash with missing data 2022-07-28 10:34:46 +02:00
Annika Hannig
5dcb66965a ignore missing results for now 2022-07-28 10:23:20 +02:00
Annika Hannig
926a75fb0b better message when results are missing 2022-07-28 10:18:57 +02:00
Annika Hannig
5ec9187102 fixed crash when BGP communities are missing 2022-07-28 10:11:48 +02:00
Annika Hannig
f1fa264043 started with makefile update 2022-07-27 18:54:55 +02:00
Annika Hannig
240a49065d added helper makefile 2022-07-27 18:39:43 +02:00
Annika Hannig
c1621589a2 added store status to response meta 2022-07-27 18:00:31 +02:00
Annika Hannig
2dccc56367 added helper makefile 2022-07-27 17:08:24 +02:00
Annika Hannig
989f03a4aa added trivial test 2022-07-27 16:03:00 +02:00
Annika Hannig
7bbc38c7cf added comment 2022-07-26 17:04:34 +02:00
Annika Hannig
334ced87db moved libs out more 2022-07-26 17:02:16 +02:00
Annika Hannig
e35f4ad17c Merge branch 'feature/client-ui' into develop 2022-07-26 16:54:14 +02:00
Annika Hannig
6ccf8e6261 rs status handling 2022-07-26 16:48:42 +02:00
Annika Hannig
b844af4097 better response error 2022-07-26 16:39:55 +02:00
Annika Hannig
fb9747b76d show group select dropdown 2022-07-26 16:26:51 +02:00
Annika Hannig
2b0d4a7d31 added changelog 2022-07-26 15:07:07 +02:00
Annika Hannig
f5d4766896 utils are bad lol 2022-07-26 15:06:52 +02:00
Annika Hannig
a106a84b36 added theme compatibility 2022-07-25 23:04:07 +02:00
Annika Hannig
b2bb0b3d5d extension API 2022-07-25 22:51:25 +02:00
Annika Hannig
c70f067845 use local gitignore 2022-07-25 20:54:00 +02:00
Annika Hannig
a68897ffc8 fixed error handler 2022-07-25 20:46:49 +02:00
Annika Hannig
0a81590b56 refactored errors provider 2022-07-25 20:46:24 +02:00
Annika Hannig
d2fb819723 removed legacy react client 2022-07-25 18:50:38 +02:00
Annika Hannig
e762f11257 use and serve build 2022-07-25 18:48:19 +02:00
Annika Hannig
acc37d53d8 simplified code 2022-07-25 18:20:39 +02:00
Annika Hannig
04681d5bbb move loading indicator 2022-07-25 18:09:57 +02:00
Annika Hannig
0d5f2f9484 fixed empty filter error 2022-07-25 17:59:36 +02:00
Annika Hannig
3b6c76a37f show result summary 2022-07-25 17:49:38 +02:00
Annika Hannig
8a867675ca search filters now working 2022-07-25 17:13:40 +02:00
Annika Hannig
f5668ba5a2 improved query state and locations 2022-07-25 15:47:43 +02:00
Annika Hannig
8e96efdf23 improved validation error responses 2022-07-25 15:02:08 +02:00
Annika Hannig
2d506c4c51 show error from server 2022-07-25 11:41:07 +02:00
Annika Hannig
eae42bd93a initial search 2022-07-22 16:40:31 +02:00
Annika Hannig
266b97c368 filter routes with communities 2022-07-22 13:31:40 +02:00
Annika Hannig
6cf502800e load and merge filters 2022-07-21 12:01:49 +02:00
Annika Hannig
5b6dcb37f6 imported filter state 2022-07-21 11:09:51 +02:00
Annika Hannig
4f525d8dc9 better placement of waiting text 2022-07-21 10:44:27 +02:00
Annika Hannig
7ccf57d548 refactored bgp communities 2022-07-21 10:33:14 +02:00
Annika Hannig
0c87b8924f initial details modal 2022-07-20 17:11:37 +02:00
Annika Hannig
462938db68 load routes, show spinner, load not exported on demand, refactored scroll to anchor 2022-07-20 16:36:20 +02:00
Annika Hannig
b230411b37
fixed font awesome icons 2022-07-19 12:58:08 +00:00
Annika Hannig
2f5f98c330 imported blackhole idicator, todo: fix icons 2022-07-19 11:47:55 +02:00
Annika Hannig
ec15e9f8df added route flag indicators, todo: fix icons 2022-07-19 11:41:52 +02:00
Annika Hannig
0d1a3bed3f fixed onClick 2022-07-19 11:27:11 +02:00
Annika Hannig
1b122e67c7 initial routes rendering 2022-07-19 11:24:29 +02:00
Annika Hannig
6950593e8d added pagination 2022-07-19 10:44:06 +02:00
Annika Hannig
479c4d608b load and provide routes 2022-07-18 23:21:49 +02:00
Annika Hannig
fdf4304f7a ported related peers card 2022-07-18 18:51:38 +02:00
Annika Hannig
ec07decba0 ported local related peers 2022-07-18 18:11:50 +02:00
Annika Hannig
30bec0f37e refactored contexts 2022-07-18 17:36:14 +02:00
Annika Hannig
d696d8cb37 use global state for search input 2022-07-18 16:40:21 +02:00
Annika Hannig
072fa3ac09 route server commons, initial page 2022-07-15 10:25:20 +02:00
Annika Hannig
3632debc03 fixed anchors 2022-07-13 16:47:38 +02:00
Annika Hannig
70fab38365 initial port of neighbors table 2022-07-13 16:31:58 +02:00
Annika Hannig
96454440ac use query parameters 2022-07-13 16:31:41 +02:00
Annika Hannig
5d495aea13 updated readme 2022-07-13 14:24:40 +02:00
Annika Hannig
ec09652dec moved out of pkg because grep became unusable due to node_modules/ pollution 2022-07-13 14:15:23 +02:00
Annika Hannig
bbd69e3d05 query handling and server down message 2022-07-12 13:03:13 +02:00
Annika Hannig
a67a04e5f3 improved query param handling 2022-07-12 12:15:07 +02:00
Annika Hannig
28dd2f8d13 Added debounce to input 2022-07-12 10:41:45 +02:00
Annika Hannig
41b890ac0e initial port of the neighbors view 2022-07-11 17:15:58 +02:00
Annika Hannig
c2e33a4715 imported utils 2022-07-11 16:10:02 +02:00
Annika Hannig
395b3e9e03 imported utils 2022-07-11 16:07:29 +02:00
Annika Hannig
35bf00ff49 initial neighbors page, added icons, date handling, status 2022-07-11 15:51:19 +02:00
Annika Hannig
1ea530fca3 restructured pages 2022-07-11 10:46:08 +02:00
Annika Hannig
9ae3d8232e initial neighbors page 2022-07-11 10:36:17 +02:00
Annika Hannig
e67aaa2ee8 access host 2022-06-24 16:53:23 +02:00
Annika Hannig
c0e64d5946 added 404 fallback 2022-06-24 16:53:17 +02:00
Annika Hannig
7169c61e6f Merge branch 'develop' into feature/client-ui 2022-06-24 16:02:46 +02:00
Annika Hannig
4c94909282 removed sad things 2022-06-21 10:41:58 +02:00
Annika Hannig
6588dd4765 versionbump 2022-06-21 10:41:20 +02:00
Annika Hannig
ad4041a7a1 updated changelog 2022-06-21 10:41:02 +02:00
Annika Hannig
2a81057963 allow shorter query 2022-06-20 18:08:02 +02:00
Annika Hannig
452dfbfb66 fixed error code, validate neighbors query 2022-06-20 18:00:41 +02:00
Annika Hannig
538cefe098 fixed crash in communities to String 2022-06-20 16:42:20 +02:00
Annika Hannig
0bc219065d prevent rendering loop when using handle error 2022-06-20 10:27:14 +02:00
Annika Hannig
da87e3e4ca improved route servers handling 2022-06-19 21:17:38 +02:00
Annika Hannig
348e670491 fixed linter errors 2022-06-19 15:43:16 +02:00
Annika Hannig
a1cc181d30
initial navigation components 2022-06-19 13:30:20 +00:00
Annika Hannig
e6a987b52c
load routeservers, show errors 2022-06-17 11:32:11 +00:00
Annika Hannig
a808cfb79d
initial layout, content and config provider 2022-06-16 14:41:50 +00:00
Annika Hannig
1202d2af90
added react router 2022-06-16 12:19:05 +00:00
Annika Hannig
61962f2eef
added bootstrap v3 2022-06-16 12:14:51 +00:00
Annika Hannig
d2c20c74d5
initial base app 2022-06-16 12:09:53 +00:00
Annika Hannig
d21033824f
updated container dev environment 2022-06-16 11:36:50 +00:00
Annika Hannig
e9e8751e26
updated dependencies 2022-06-16 11:36:12 +00:00
Annika Hannig
fc466cd816
Merge branch 'develop' into feature/client-ui 2022-06-16 10:57:51 +00:00
Annika Hannig
068b270c7f Merge branch 'feature/memory-store-concurrency' into develop 2022-06-15 19:06:50 +02:00
Annika Hannig
d74c7d7abb use sync map for neighbors 2022-06-15 19:06:40 +02:00
Annika Hannig
6d58715742 use sync.Map 2022-06-15 18:56:36 +02:00
Annika Hannig
3b825913ad moved testdata, refactored route to lookup route 2022-06-15 18:33:11 +02:00
Annika Hannig
2c25d9618d Merge branch 'feature/source-contexts' into develop 2022-06-15 16:51:30 +02:00
Annika Hannig
7420dbef40 updated tests 2022-06-15 16:10:41 +02:00
Annika Hannig
d6011fc200 remove warning 2022-06-15 16:09:18 +02:00
Annika Hannig
1cba3790ea use contexts 2022-06-15 15:49:45 +02:00
Annika Hannig
fe66f1a510 use context 2022-06-15 15:43:27 +02:00
Annika Hannig
1d16ecec15 update context 2022-06-15 15:43:12 +02:00
Annika Hannig
6ee20adecd updated bgplgd to use new interface 2022-06-15 15:36:13 +02:00
Annika Hannig
e61de03411 updated interface of state server source 2022-06-15 15:18:41 +02:00
Annika Hannig
fa0dbe3f26 updated to new source interface 2022-06-15 15:11:38 +02:00
Annika Hannig
a0c942b9a9 use context, updated to new source interface 2022-06-15 14:56:31 +02:00
Annika Hannig
9bf71a0fcf refactored client to use context only 2022-06-15 14:36:22 +02:00
Annika Hannig
55089fb6d3 added context to interface 2022-06-15 14:28:31 +02:00
Annika Hannig
d9e24fa18e prepared release 2022-06-03 15:01:35 +02:00
Annika Hannig
6632fc42d2
downgrade gulp 2022-05-16 15:06:38 +00:00
Annika Hannig
1cb7f70a0b set more aggressive GC 2022-04-01 11:29:52 +02:00
Annika Hannig
e638254610 improved debug messages 2022-04-01 11:29:23 +02:00
Annika Hannig
fa10dfb2d1 Merge branch 'feature/per-rs-table' into develop 2022-03-31 11:22:39 +02:00
Annika Hannig
776b7ca2e4 use per rs tables 2022-03-31 11:22:29 +02:00
Annika Hannig
b41ae0ae1a use per RS table 2022-03-18 11:04:52 +01:00
Annika Hannig
d54f78afa7
idea 2022-03-17 20:28:17 +00:00
Annika Hannig
dc9efca4b3
include neighbor id in pkey 2022-03-17 18:01:58 +00:00
Annika Hannig
46c7931344 Merge branch 'feature/stream-parsing' into develop 2022-03-17 17:34:18 +01:00
Annika Hannig
cf63016f2b initial streaming response parsing 2022-03-17 16:44:51 +01:00
Annika Hannig
93c41a7ca0 moved route parser body 2022-03-17 15:15:23 +01:00
Annika Hannig
5bd7ab511d good morning. 2022-03-17 09:56:52 +01:00
Annika Hannig
c3ed4445a7 Merge branch 'feature/optimize-all-routes-birdwatcher' into develop 2022-03-17 09:50:12 +01:00
Annika Hannig
4a681023fe removed debug code, free resources 2022-03-17 09:50:05 +01:00
Annika Hannig
e3044abbea async load filtered routes 2022-03-16 18:50:29 +01:00
Annika Hannig
2af2d18b85 log timings 2022-03-16 17:19:28 +01:00
Annika Hannig
3782c5dcbb public function for testing 2022-03-16 17:09:40 +01:00
Annika Hannig
00b267ce5d use default ports 2022-03-15 18:13:27 +01:00
Annika Hannig
10314c2997 added backend to compose 2022-03-15 18:11:27 +01:00
Annika Hannig
c0fa87e585 upgraded dependencies 2022-03-15 17:52:19 +01:00
Annika Hannig
a908ecffda build ui container 2022-03-15 17:49:26 +01:00
Annika Hannig
ab9ba3cdbc Merge branch 'develop' into feature/client-ui 2022-03-15 17:31:02 +01:00
Annika Hannig
6d3db74b54 use transactions in neighbors store 2022-03-14 15:45:07 +01:00
Annika Hannig
726c84b6a0 use transactions 2022-03-14 15:37:54 +01:00
Annika Hannig
f7ce30a5c3 formatting 2022-03-14 15:09:07 +01:00
Annika Hannig
98b8c07c11 updated test 2022-03-14 14:51:38 +01:00
Annika Hannig
3aa76fc45f keep replacement within a tx 2022-03-14 14:03:43 +01:00
Annika Hannig
bd79b4dafc benchmark inserts 2022-03-14 14:03:06 +01:00
Annika Hannig
bd2d0b61ee Merge branch 'develop' of ssh://github.com/alice-lg/alice-lg into develop 2022-03-14 09:02:46 +01:00
Annika Hannig
89fc7b443a
simplified expression 2022-02-28 14:39:13 +00:00
Annika Hannig
fe42b38eb6
added package documentation 2022-02-28 14:36:33 +00:00
Annika Hannig
2be8d9e23c
removed redundant return 2022-02-28 14:36:18 +00:00
Annika Hannig
6c58ed0d4c
handle error check 2022-02-28 14:27:14 +00:00
Annika Hannig
19582a6287
fixed test 2022-02-28 14:20:48 +00:00
Annika Hannig
aea2c18f59 initial benchmark 2022-02-12 16:47:43 +01:00
Annika Hannig
4115e37f60 optionally discard details and omit sorting 2022-02-10 18:38:32 +01:00
Annika Hannig
be439426c7 added change 2022-02-10 16:20:38 +01:00
Annika Hannig
2fc3faeee0 use serialized json for route details 2022-02-10 14:14:08 +01:00
Annika Hannig
377aaf63fe removed old comment 2022-02-10 11:29:13 +01:00
Annika Hannig
46225b7ab9 Merge branch 'feature/limit-refresh-paralellism' into develop 2022-02-09 13:16:41 +01:00
Annika Hannig
5890740754 updated changelog 2022-02-09 12:59:35 +01:00
Annika Hannig
deca20ad9f use limited refresh parallelism 2022-02-09 12:02:22 +01:00
Annika Hannig
a1f33d90f2 added tests 2022-02-09 11:56:03 +01:00
Annika Hannig
9d93f0c9c1 initial limit on parallelism 2022-02-08 21:55:26 +01:00
Annika Hannig
292c0e0547 added memory profiling 2022-02-08 17:29:31 +01:00
Annika Hannig
9a3938fb3f bump version 2022-02-08 16:28:44 +01:00
Annika Hannig
fb630f3057 Merge branch 'master' into develop 2022-02-08 14:02:41 +01:00
Annika Hannig
d7dba44915
Merge pull request #88 from nicko170/master
Fixes the spinny wheel when searching for a filtered prefix that does…
2022-02-08 14:00:23 +01:00
nicko170
bd674993f4
Fix to tag 2022-02-08 23:25:41 +11:00
nicko170
ac15d52c5e
Fix tag 2022-02-08 23:21:05 +11:00
nicko170
f05531979f
Adding tzdata into Dockerfile as it's required for runing alice-lg, and adding a tag for each build that's sortable 2022-02-08 23:14:17 +11:00
nicko170
02b828dfa6
Fixes the spinny wheel when searching for a filtered prefix that doesn't contain as-paths. 2022-02-08 23:03:31 +11:00
Annika Hannig
143a8027f7 changed config variable name 2022-02-01 19:24:20 +01:00
Annika Hannig
76e80e28d5 Merge branch 'feature/monitor-sessions-config' into develop 2022-02-01 18:38:48 +01:00
Annika Hannig
2f75f9f1db added note to changelog 2022-02-01 18:38:01 +01:00
Annika Hannig
673cd45561 test optional alt pipe config option 2022-02-01 18:36:57 +01:00
Annika Hannig
ae8c4306b6 use alternative pipe if suffix matches 2022-02-01 18:27:25 +01:00
Annika Hannig
ee9e2d93ec added optional AltPipe config 2022-02-01 18:20:07 +01:00
Annika Hannig
9941a41d35 added loader packages 2022-01-21 19:38:54 +01:00
Annika Hannig
a1ffa39dd0 moved to package 2022-01-21 19:38:11 +01:00
Annika Hannig
d715efa01a initial create react app, added container and container helpers 2022-01-21 19:35:46 +01:00
Annika Hannig
4e0803ab47 updated changelog 2022-01-21 18:45:23 +01:00
Annika Hannig
8e13197a4b Merge branch 'feature/postgres-stores' into develop 2022-01-21 18:38:59 +01:00
Annika Hannig
891acfbd65 show status over http 2022-01-14 11:04:14 +01:00
Annika Hannig
1c73eb4802 fixed tests, added db init flag 2022-01-13 18:28:24 +01:00
Annika Hannig
8d53d7e301 test delete stale 2022-01-13 17:08:43 +01:00
Annika Hannig
232e08f569 added find by prefix and added tests 2022-01-13 16:59:19 +01:00
Annika Hannig
96ee2a76aa added find by prefix 2022-01-13 16:16:11 +01:00
Annika Hannig
2cf3ad6b59 improved prefix query validation 2022-01-13 15:59:57 +01:00
Annika Hannig
c0546be4cc removed comment 2022-01-13 14:21:17 +01:00
Annika Hannig
1f5e5baaa6 psql shell helper 2022-01-13 14:20:47 +01:00
Annika Hannig
a768bf4896 added tests 2022-01-13 14:20:33 +01:00
Annika Hannig
fc533ce12f count neighbors 2022-01-13 13:58:56 +01:00
Annika Hannig
93135c16f6 bulk lookup GetNeighborsMap, routes and neighbors schema, updated keys 2022-01-12 18:26:42 +01:00
Annika Hannig
f9584e711a use config 2022-01-12 15:31:02 +01:00
Annika Hannig
5419f5cd58 updated 2022-01-12 13:58:45 +01:00
Annika Hannig
b609229245 updated config 2022-01-12 13:58:25 +01:00
Annika Hannig
85dfcdf8d9 added config options 2022-01-12 13:58:04 +01:00
Annika Hannig
2c7fab0a38 initial database connection and migration 2022-01-12 11:50:51 +01:00
Annika Hannig
f589322cfe added pgx dependency 2022-01-11 14:37:07 +01:00
Annika Hannig
3241a150aa initial schema and initial postgres connection 2022-01-11 14:36:49 +01:00
Annika Hannig
3af580e8c1 ignore trivial errors 2022-01-10 17:13:04 +01:00
Annika Hannig
cac7d44f87 fix load time when cache is not ready with openbgpd 2022-01-10 17:02:44 +01:00
Annika Hannig
4b180a7b34 breaking changes warning 2022-01-10 17:00:30 +01:00
Annika Hannig
ebd0d17a89 Merge branch 'feature/refactor-stores' into develop 2022-01-10 14:23:48 +01:00
Annika Hannig
b85a74924f fixed store tests 2021-12-08 16:16:39 +01:00
Annika Hannig
401128a543 updated test 2021-12-08 15:53:03 +01:00
Annika Hannig
6be8927089 import testdata 2021-12-08 15:49:33 +01:00
Annika Hannig
8e2cbe76e7 moved testdata 2021-12-08 14:19:30 +01:00
Annika Hannig
bcbc7961c1 updated tests, removed dead code 2021-12-08 14:15:55 +01:00
Annika Hannig
5fc5841ece finished refactoring 2021-12-07 19:11:11 +01:00
Annika Hannig
478f828e22 updated store usage 2021-12-06 10:03:17 +01:00
Annika Hannig
bf28a1af74 refatored store interface 2021-12-03 18:17:41 +01:00
Annika Hannig
3b25d472f5 commit 2021-12-02 16:11:15 +01:00
Annika Hannig
db0583d581 more rewrite 2021-11-24 17:59:32 +01:00
Annika Hannig
7f36756efc initial routes store refactoring 2021-11-24 11:48:49 +01:00
Annika Hannig
133f9505b7 initial routes store refactoring 2021-11-19 22:21:18 +01:00
Annika Hannig
7fb77bfb8f refactored neighbors store 2021-11-19 22:00:09 +01:00
Annika Hannig
15d8130727 initial refactored neighbors store 2021-11-19 19:08:59 +01:00
Annika Hannig
50f549ea6d fixed test and merged PR 2021-11-16 10:21:36 +01:00
Annika Hannig
454b8de596
Merge pull request #60 from CTassisF/fix-routes_pipe_filtered-endpoint
Remove trailing slash to avoid redirect on Birdwatcher's routes_pipe_filtered endpoint
2021-11-16 10:18:46 +01:00
Annika Hannig
4e7104ab81 fixed more linter errors 2021-11-15 21:43:22 +01:00
Annika Hannig
c5ea17695f fixed linter errors 2021-11-15 21:30:20 +01:00
Annika Hannig
720a3b764c added package doc 2021-11-15 21:29:54 +01:00
Annika Hannig
e2fa687b01 Merge branch 'master' into develop 2021-11-15 21:12:47 +01:00
Annika Hannig
42ab0ab3c0
Merge pull request #84 from Netnod/fix-issue-83-Erroneous-peer-description
Fix issue #83 and implement peer description comming from openbgpd
2021-11-15 11:18:43 +01:00
Emil Palm
a2b2024bb3 Fix issue #83 and implement peer description comming from openbgpd 2021-11-15 10:17:28 +01:00
Annika Hannig
1f5300a278 Merge branch 'master' into develop 2021-11-01 11:26:22 +01:00
Annika Hannig
03293389ad updated changelog 2021-11-01 11:19:06 +01:00
Annika Hannig
b05b8558a4 versionbump 2021-11-01 11:18:09 +01:00
Annika Hannig
17d6b7d9f0 fixed ext. community decoding error 2021-11-01 11:17:18 +01:00
Annika Hannig
48947100b0 Merge branch 'master' into develop 2021-10-29 19:53:35 +02:00
Annika Hannig
6b1627fc00 Merge branch 'feature/code-cleanup' into develop 2021-10-29 19:51:04 +02:00
Annika Hannig
5c6445264c fixed response meta 2021-10-29 19:39:41 +02:00
Annika Hannig
2c1e3903fc harmonized spelling of neighbor 2021-10-29 19:26:30 +02:00
Annika Hannig
b6835dfff1 fixed path to test config 2021-10-28 16:38:41 +02:00
Annika Hannig
4691279b3f merged configurable http timeout 2021-10-28 16:34:29 +02:00
Annika Hannig
3580b9adde Merge branch 'master' into develop 2021-10-28 16:17:20 +02:00
Annika Hannig
7cfcb9c6e4
Merge pull request #63 from daswafford/http-timeout
AliceLG - Configurable HTTP Timeout
2021-10-28 14:16:19 +00:00
Annika Hannig
d9aae37f7e
updated usage of Meta 2021-10-27 18:04:35 +00:00
Annika Hannig
b4aed3496c
fixed errors 2021-10-27 18:01:13 +00:00
Annika Hannig
330047fac1
refactored responses and http server 2021-10-27 17:54:51 +00:00
Annika Hannig
aa9292b74c
moved test 2021-10-27 14:49:58 +00:00
Annika Hannig
ed52257269
updated use of response meta 2021-10-27 14:48:52 +00:00
Annika Hannig
982107dbc2
resolved meta conundrum 2021-10-27 14:24:07 +00:00
Annika Hannig
783dd623e3 updated responses, might not be final 2021-10-26 23:11:42 +02:00
Annika Hannig
56126b933f fixed unexpected behaviour error 2021-10-26 18:05:11 +02:00
Annika Hannig
82595a3249 fixed linter errors 2021-10-26 18:00:53 +02:00
Annika Hannig
7b15bcad3c created server struct 2021-10-25 22:03:10 +02:00
Annika Hannig
f2f7ffc609 updated comments 2021-10-25 19:25:20 +02:00
Annika Hannig
5a333ecfc8 fixed test 2021-10-25 19:22:04 +02:00
Annika Hannig
1fc17d6b3e fixed linter errors and fixed tests 2021-10-22 22:51:11 +02:00
Annika Hannig
39e2605707 moved http api 2021-10-22 22:40:03 +02:00
Annika Hannig
977e4db816 fixed tests, fixed linter errors, restructred packages 2021-10-22 22:35:40 +02:00
Annika Hannig
58e3ea8aab fixed linting errors, improved documentation 2021-10-22 22:24:56 +02:00
Annika Hannig
c6741c87b0 fixed linting errors, moved version 2021-10-22 22:17:04 +02:00
Annika Hannig
2763614dc4
removed debug stuff 2021-10-20 20:34:32 +00:00
Annika Hannig
d52e06272d
moved stores to package 2021-10-20 20:26:37 +00:00
Annika Hannig
76236cb311
fixed naming 2021-10-20 18:36:51 +00:00
Annika Hannig
608f998d0a
updated initialization 2021-10-20 18:36:30 +00:00
Annika Hannig
5471f57dd4 fixed more linter errors 2021-10-15 22:14:59 +02:00
Annika Hannig
14d860d834 fixed linter errors 2021-10-15 22:04:59 +02:00
Annika Hannig
eb915be28f moved csv string list to decoders 2021-10-15 21:59:08 +02:00
Annika Hannig
3db5d9e989 moved config to separate package 2021-10-15 21:45:28 +02:00
Annika Hannig
dacda16d26 moved bgp communities map to api package 2021-10-15 21:44:50 +02:00
Annika Hannig
5fb298b65c fixed formatting 2021-10-15 21:35:43 +02:00
Annika Hannig
ddcd7ae21c neighbour to neighbor 2021-10-15 21:24:24 +02:00
Annika Hannig
c289155aaa explicitly ignore error 2021-10-15 19:48:15 +02:00
Annika Hannig
e9731b40ce moved testdata 2021-10-15 19:47:11 +02:00
Annika Hannig
18b9a6d205 cleaned up linter errors 2021-10-15 19:44:18 +02:00
Annika Hannig
7f46529352 fixed linter errors 2021-10-15 19:31:15 +02:00
Annika Hannig
a3cd1aec64 removed dead code 2021-10-15 19:25:39 +02:00
Annika Hannig
485fc94109 updated to api package changes 2021-10-15 19:24:22 +02:00
Annika Hannig
a81d68618d added package documentation 2021-10-15 19:15:06 +02:00
Annika Hannig
7861af5bb0 cleaned up gobgp source 2021-10-15 18:10:37 +02:00
Annika Hannig
6f76691bd0 updated test 2021-10-15 17:41:13 +02:00
Annika Hannig
1707cb3396 cleaned up code for openbgpd 2021-10-15 17:39:36 +02:00
Annika Hannig
669c54c64e simplified statement 2021-10-15 17:39:09 +02:00
Annika Hannig
e0391fec67 added package documentation 2021-10-15 17:36:43 +02:00
Annika Hannig
7d4b78c245 neighbour renaming 2021-10-15 17:24:37 +02:00
Annika Hannig
f4de929cc7 unified struct 2021-10-15 17:22:21 +02:00
Annika Hannig
0a529a55a5 added package level description 2021-10-15 17:18:58 +02:00
Annika Hannig
a1665a43de refactored for uniformity 2021-10-15 17:17:35 +02:00
Annika Hannig
164d3e9318 cleaned up caches package 2021-10-15 17:17:11 +02:00
Annika Hannig
bb28f4e8d3 fixed all linter errors and cleaned up neighbor / neighbour spelling 2021-10-15 17:09:52 +02:00
Annika Hannig
0d9962207c added linter configuration 2021-10-15 17:09:03 +02:00
Annika Hannig
45b0395f2e fixed linter error 2021-10-15 12:03:16 +02:00
Annika Hannig
c07ab88b96
Merge pull request #80 from CTassisF/master
Add lg.ix.br (IX.br) to the list of production examples of Alice-LG
2021-10-13 19:01:13 +00:00
César de Tassis Filho
875e1ef76e Add lg.ix.br (IX.br) to the list of production examples of Alice-LG 2021-10-13 15:36:13 -03:00
Annika Hannig
d27b0dca20 Merge branch 'master' into develop 2021-10-10 15:35:43 +02:00
Annika Hannig
a17ef713f9
Merge pull request #72 from bluikko/github-action-docker
Docker image build Action
2021-10-10 15:29:02 +02:00
bluikko
303a29248f
Add pushing the image to GitHub registry 2021-10-10 14:22:12 +07:00
bluikko
e36a1bc5e4
Merge branch 'alice-lg:master' into github-action-docker 2021-10-10 13:28:41 +07:00
Annika Hannig
efbe4427ff fixed name 2021-10-09 17:12:26 +02:00
Annika Hannig
7b1d876cd8 updated changelog 2021-10-09 17:03:43 +02:00
Annika Hannig
9d0069b59b Merge tag '5.0.0' into develop
5.0.0
2021-10-09 16:54:25 +02:00
Annika Hannig
05135eef0c Merge branch 'release/5.0.0' 2021-10-09 16:54:20 +02:00
Annika Hannig
928715d2ae removed version specifics 2021-10-09 16:54:13 +02:00
Annika Hannig
6328cc92d5 versionbump 2021-10-09 16:53:40 +02:00
Annika Hannig
6acdd0cbfa fixed irrexplorer links (fixes: #78) 2021-10-09 16:41:16 +02:00
Annika Hannig
3170ab95cc Merge branch 'master' into develop 2021-10-09 16:35:04 +02:00
Annika Hannig
520bbf195e
Merge pull request #64 from johannesmoos/bugfix/fix-readme-link
Fix example conf link; add some Alice prod examples
2021-10-09 16:34:37 +02:00
Annika Hannig
61b56a7d6c added support credits 2021-10-09 16:32:20 +02:00
Annika Hannig
5d77530214
Merge branch 'feature/configure-cache' into develop 2021-09-15 17:48:25 +02:00
Annika Hannig
8b5509f0c3
use cache size; default cache ttl fixed 2021-09-15 17:44:18 +02:00
Annika Hannig
9c193029a0
configure cache size for state server 2021-09-15 17:40:48 +02:00
Annika Hannig
629eca4f37
configure cache size for state server 2021-09-15 17:32:14 +02:00
Annika Hannig
2417c86718
added routes cache size config to bgplgd 2021-09-15 17:28:01 +02:00
Annika Hannig
09426f3b05
enable filtered routes 2021-09-15 17:08:53 +02:00
Annika Hannig
ab8698e112
fixed tests, added bgp community enumeration 2021-09-15 15:44:08 +02:00
Annika Hannig
a75c4dd14c
maintainer change 2021-07-09 17:49:57 +02:00
Annika Hannig
e0083b806f
updated license template, license is the same 2021-07-09 17:48:19 +02:00
Annika Hannig
10139bc527
fixes #57, do not render indicator when no reject candidates are configured 2021-07-09 15:42:22 +02:00
Annika Hannig
996465988f
added openbgpd config to docs 2021-07-05 19:43:19 +02:00
Annika Hannig
473054a919
Merge branch 'feature/sources/openbgpd' into develop 2021-07-05 19:34:22 +02:00
Annika Hannig
db9aaf8d5b
added bgplgd as separate source type for now 2021-07-05 19:34:05 +02:00
Annika Hannig
271175fcad
some fields are not available on openbgpd 2021-07-05 19:02:23 +02:00
Annika Hannig
20941b8bc0
added caching 2021-07-05 17:54:37 +02:00
Annika Hannig
8aa472c2e0
prepared bgplgd support; fixed cache info 2021-07-05 17:13:36 +02:00
Annika Hannig
a58b3128b3
added cache ttl to config 2021-07-05 16:21:46 +02:00
Annika Hannig
7cd4603232
improved documentation 2021-07-05 16:21:12 +02:00
Annika Hannig
d5500b8e0b
hide unsupported fields 2021-07-05 15:51:33 +02:00
Annika Hannig
9c21916135
improved handling of empty neighbors 2021-07-05 14:56:57 +02:00
Annika Hannig
ad544243ae
lazy loading not exported routes is not supported for openbgpd right now 2021-07-05 14:24:55 +02:00
Annika Hannig
36d78f0e74
added compat option 2021-07-05 11:55:59 +02:00
Annika Hannig
859be64879
added flags to example 2021-07-05 11:55:39 +02:00
Annika Hannig
14e9a87f1f
fixed route lookup 2021-07-02 15:32:51 +02:00
Annika Hannig
ea6b740f99
include communities 2021-07-02 15:14:38 +02:00
Annika Hannig
d75db85e96
just fixing some linter issues 2021-07-02 14:30:43 +02:00
Annika Hannig
c15f78d76f
expose backend type 2021-07-02 14:16:13 +02:00
Annika Hannig
aa85c3f13a
check if state is 'up' is now a function 2021-07-02 11:17:46 +02:00
Annika Hannig
d804c8a1ba
decode neighbors, add routeserverID 2021-07-02 11:16:35 +02:00
bluikko
776eebe276
Docker image build Action 2021-06-29 17:37:26 +07:00
Johannes Moos
49b60cd44b
Fix example conf link; add some Alice prod examples 2021-05-01 18:38:56 +02:00
Annika Hannig
2cc990693a
initial source with config 2021-04-26 22:01:26 +02:00
Annika Hannig
635c1e145e
add openbgpd source 2021-04-26 21:26:37 +02:00
Annika Hannig
38df30585d
Merge branch 'develop' into feature/sources/openbgpd 2021-04-26 21:07:06 +02:00
Annika Hannig
8533854a37
initial source 2021-04-26 21:02:06 +02:00
David Swafford
94b33b8da5 Update HTTP Timeout to 120s and add test verifying default value 2021-04-26 10:49:42 -07:00
Annika Hannig
1bc9f42271
decode routes response 2021-04-26 18:02:34 +02:00
Annika Hannig
ef8ee57726
decode list of string into list of int 2021-04-26 17:45:09 +02:00
Annika Hannig
953bb83eef
decode routes response 2021-04-26 17:44:25 +02:00
Annika Hannig
1ff744aa88
added rib example 2021-04-26 15:11:26 +02:00
Annika Hannig
fcca7795ad
implemented neighbours status 2021-04-26 11:40:12 +02:00
Annika Hannig
28db125987
added source stub 2021-04-26 11:21:37 +02:00
David Swafford
1aa728cfbc AliceLG - Make HTTP Timeout Configurable 2021-04-20 17:52:12 -07:00
César de Tassis Filho
5ee1232e98 Remove trailing slash to avoid redirect on Birdwatcher's routes_pipe_filtered endpoint 2021-04-17 01:33:56 -03:00
Annika Hannig
90927d832b
Merge tag '4.3.2' into develop
4.3.2
2021-04-16 12:14:25 +02:00
Annika Hannig
811e20147d
Merge branch 'release/4.3.2' 2021-04-16 12:14:17 +02:00
Annika Hannig
b0095524fe
make a frontend production build 2021-04-16 12:14:11 +02:00
Annika Hannig
14745acc77
fixed docker build 2021-04-16 11:57:23 +02:00
Annika Hannig
eef74b7e9b
build a local binary using docker 2021-04-16 11:10:39 +02:00
Annika Hannig
7711de1646
Merge tag '4.3.1' into develop
4.3.1
2021-04-16 10:27:01 +02:00
Annika Hannig
e7808438c9
Merge branch 'release/4.3.1' 2021-04-16 10:26:56 +02:00
Annika Hannig
554ceedaed
fixed makefile issues 2021-04-16 10:26:49 +02:00
Annika Hannig
1cfcd97593
adhere to semantic versioning 2021-04-16 10:25:44 +02:00
Annika Hannig
67715768b4
Merge branch 'master' into develop 2021-04-16 10:23:15 +02:00
Annika Hannig
d39d70b83e
fixed paths 2021-04-16 10:23:04 +02:00
Annika Hannig
a16870bfb5
allow for overriding docker with podman 2021-04-16 10:21:53 +02:00
Annika Hannig
fb7f51ad9e
fixed dockerfile 2021-04-15 20:05:30 +02:00
Annika Hannig
4b68b7d908
Merge tag '4.2.1' into develop
4.2.1
2021-04-15 19:41:32 +02:00
Annika Hannig
d0a48492c7
Merge branch 'release/4.2.1' 2021-04-15 19:41:27 +02:00
Annika Hannig
0c70a37fdb
fixed version info 2021-04-15 19:40:53 +02:00
Annika Hannig
9f85e19375
prepared release 2021-04-15 19:40:34 +02:00
Annika Hannig
c17b826703
Merge branch 'feature/config-master-table' into develop 2021-04-15 19:23:28 +02:00
Annika Hannig
02b3b77d62
this warrents a versionbump 2021-04-15 19:22:59 +02:00
Annika Hannig
8ffa646988
Merge branch 'develop' into feature/config-master-table 2021-04-15 19:22:04 +02:00
Annika Hannig
82be9f0308
added static build 2021-04-15 19:21:28 +02:00
Annika Hannig
3d77e502ca
fixed paths and imports 2021-04-15 19:06:34 +02:00
Annika Hannig
dfeff9d48f
configure main table 2021-04-15 18:59:22 +02:00
Annika Hannig
18adbb8f9f
Merge branch 'master' into develop 2021-04-15 18:33:57 +02:00
Annika Hannig
6642aa0a70
decode more neighbor summary 2021-04-15 18:31:12 +02:00
Annika Hannig
fb3bc1b745
added debugging helper 2021-04-15 17:46:52 +02:00
Annika Hannig
74c6213d32
decode neighbors 2021-04-15 16:47:27 +02:00
Annika Hannig
8a8b070ffe
Merge pull request #58 from daswafford/alice-lg-upstream-go-fmt
Alice-LG Upstream Go Formatting Changes
2021-04-15 10:10:03 +02:00
David Swafford
25be7971cf Alice-LG Upstream Go Formatting Changes 2021-04-14 13:09:40 -07:00
Annika Hannig
5271a4e0ba
added neighbors 2021-03-26 10:55:55 +01:00
Annika Hannig
ebaf40a180
initial status response 2021-03-26 09:58:26 +01:00
Annika Hannig
1ccf36db0c
added json body helper 2021-03-26 09:58:05 +01:00
Annika Hannig
553144f9fd
initial example data 2021-03-26 09:39:51 +01:00
Annika Hannig
1fa5b6a149
parse timeframe notation 2021-03-26 09:29:17 +01:00
Annika Hannig
8e2565b3f6
initial decode status 2021-03-25 21:48:18 +01:00
Annika Hannig
b6c5e6a9cb
fixed makefile 2021-03-25 21:48:05 +01:00
Annika Hannig
a7faf644b0
fixed documentation 2021-03-25 21:47:56 +01:00
Annika Hannig
18a4b67748
refactored decoders for reusability 2021-03-25 21:47:31 +01:00
Annika Hannig
9d7611916b
use package source 2021-03-24 15:43:00 +01:00
Annika Hannig
7533d1e198
initial openbgpd source 2021-03-24 14:26:46 +01:00
Annika Hannig
93501c6712
Merge branch 'feature/refactor/initial-package-refactoring' into develop 2021-03-22 17:37:15 +01:00
Annika Hannig
d725fc0b2b
refactored package structure 2021-03-22 17:35:20 +01:00
Annika Hannig
d3f22e7adc
moved testdata 2021-03-22 17:34:56 +01:00
Annika Hannig
c12bf7ac7c
ignore build artifacts 2021-03-22 17:34:36 +01:00
Annika Hannig
393625ee43
even more structural refactoring 2021-03-22 16:50:08 +01:00
Annika Hannig
0af0d9e183
moved backend to pkg 2021-03-22 16:25:47 +01:00
Annika Hannig
3432fc3af9
Merge branch 'feature/feature/maintenance-updates' into develop 2021-03-22 16:12:12 +01:00
Annika Hannig
81cf0a474a
removed broken docker image 2021-03-22 16:08:15 +01:00
Annika Hannig
39005b5f50
disabled gobgp example 2021-03-22 15:49:06 +01:00
Annika Hannig
0363c56c2f
removed rice.box dependency 2021-03-22 15:42:34 +01:00
Annika Hannig
c5f459329e
use go embed 2021-03-22 15:41:36 +01:00
Annika Hannig
2e48e42795
use embed in instead of rice.box 2021-03-22 15:11:31 +01:00
Annika Hannig
3f8540561e
this is a package now 2021-03-22 15:10:38 +01:00
Annika Hannig
733cc5b68f
updated coding style 2021-03-22 15:10:00 +01:00
Annika Hannig
492c7f9f93
dependencies update 2021-03-22 14:42:45 +01:00
Annika Hannig
2cbaacf907
Merge branch 'master' into develop 2021-03-22 12:04:49 +01:00
Annika Hannig
d8286c1fb0
Merge pull request #52 from mxsasha/patch-1
Update DE-CIX example URLs
2021-03-22 11:58:06 +01:00
Sasha Romijn
4302734a87
Update DE-CIX example URLs
The old example URLs seem broken in a few ways.
2021-02-12 19:25:23 +01:00
Annika Hannig
d196c88e29
version bump 2020-07-29 12:42:34 +02:00
Annika Hannig
028d13c99b
fixed test 2020-07-29 12:41:36 +02:00
Annika Hannig
6d61321346
Merge branch 'daswafford-gobgp-grpc-timeout' 2020-07-29 12:38:03 +02:00
Annika Hannig
607868f5de
removed birdwatcher config params, made processing_timeout optional 2020-07-29 12:34:55 +02:00
Annika Hannig
9d6baca1bb
added default value to config 2020-07-29 12:32:28 +02:00
Annika Hannig
d9a196f344
moved config options to the right source 2020-07-29 12:19:48 +02:00
Annika Hannig
6f81e79b2e
Merge branch 'gobgp-grpc-timeout' of https://github.com/daswafford/alice-lg into daswafford-gobgp-grpc-timeout 2020-07-29 12:09:03 +02:00
Annika Hannig
090b950748
added version 2020-07-29 12:08:51 +02:00
David Swafford
faae66f275 AliceLG - Configurable timeout for GoBGP gRPC calls 2020-07-22 12:09:14 -07:00
Annika Hannig
afc7baf79c
Merge pull request #32 from stv0g/fix-api-url
Fix api endpoints in README.md
2020-04-20 14:10:08 +02:00
Steffen Vogel
2024d59f57 fix api endpoints in readme 2020-03-12 00:32:39 +01:00
Matthias Hannig
c2eff61eb7
Update README.md
added hint to install yarn
2020-01-21 16:00:06 +01:00
Matthias Hannig
18d5f585f2
Merge tag '4.1.0' into develop
4.1.0
2019-12-23 19:46:02 +01:00
Matthias Hannig
40f182f20a
Merge branch 'release/4.1.0' 2019-12-23 19:45:57 +01:00
Matthias Hannig
176ad914de
versionbump 2019-12-23 19:45:45 +01:00
Matthias Hannig
3450db1587
updated changelog 2019-12-23 19:45:27 +01:00
Matthias Hannig
63c8b7478c
Merge branch 'feature/related-peers' into develop 2019-10-10 22:02:05 +02:00
Matthias Hannig
65d97aba98
stylesheet for related neighbors 2019-10-10 22:01:54 +02:00
Matthias Hannig
1c6df03d7d
use a link for down sessions 2019-10-10 22:00:40 +02:00
Matthias Hannig
f2dce93653
display route stats 2019-10-10 20:13:45 +02:00
Matthias Hannig
a72e17221e
display related neighbors 2019-10-08 17:30:24 +02:00
Matthias Hannig
5e4c88caff
load global related peers 2019-10-08 11:02:44 +02:00
Matthias Hannig
1d90ad4c09
sort neighbors 2019-10-07 23:22:01 +02:00
Matthias Hannig
b6272d884a
handle related peers 2019-10-07 22:06:20 +02:00
Matthias Hannig
ac2dcdfbc2
added related peers actions 2019-10-07 22:01:24 +02:00
Matthias Hannig
e0a8ef6239
created neighbor lookup endpoint 2019-10-07 18:29:25 +02:00
Matthias Hannig
901bd7b8ab
test neighbor filter in store 2019-10-07 18:21:28 +02:00
Matthias Hannig
9113290803
added neighbor filter 2019-10-07 18:13:08 +02:00
Matthias Hannig
ed2b4201a1
improved test 2019-10-07 18:07:20 +02:00
Matthias Hannig
2e94cd785b
filter neighbors 2019-10-07 17:32:56 +02:00
Matthias Hannig
f20cb744f5
use id compare 2019-10-05 12:16:57 +02:00
Matthias Hannig
0f2a76f4ac
use id suffix 2019-10-05 12:10:27 +02:00
Matthias Hannig
f3ef10c3e3
changed naming 2019-10-05 12:09:25 +02:00
Matthias Hannig
074fd9fc8b
include routeserver id 2019-10-05 11:56:38 +02:00
Matthias Hannig
f311315a27
added routeserver id to result 2019-10-05 11:53:38 +02:00
Matthias Hannig
1ed4744832
implement filterable interface 2019-09-27 11:17:48 +02:00
Matthias Hannig
0849ee23c1
prepared neighbors lookup 2019-09-25 01:53:21 +02:00
Matthias Hannig
93641863c5
added related neighbor to reponse 2019-09-24 21:40:49 +02:00
Matthias Hannig
ed3751ce3a
reduced image size 2019-09-24 16:58:24 +02:00
Matthias Hannig
2584cfcd80
Merge branch 'feature/containerization' into develop 2019-09-24 16:11:30 +02:00
Matthias Hannig
9d1d4358b1
build application 2019-09-24 16:11:21 +02:00
Matthias Hannig
c594fb3a67
ignore build artifacts 2019-09-24 15:49:44 +02:00
Matthias Hannig
f662e2f11c
Merge tag '4.0.3' into develop
Release 4.0.3
2019-09-09 16:02:12 +02:00
Matthias Hannig
2d717b91de
Merge branch 'release/4.0.3' 2019-09-09 16:02:06 +02:00
Matthias Hannig
cdcc52d0fd
versionbump 2019-09-09 16:01:58 +02:00
Matthias Hannig
13a733ecf2
Merge branch 'bugfix/multitable-birdwatcher-check-protocol-up' into develop 2019-09-09 16:00:21 +02:00
Matthias Hannig
e55c4a7d30
bird2 compatible check with protocol state 2019-09-09 15:59:54 +02:00
Matthias Hannig
d02ed4c049
added protocol state helper 2019-09-09 15:59:07 +02:00
Matthias Hannig
e750ed6ed2
Merge tag '4.0.2' into develop
Release 4.0.2
2019-09-09 14:58:03 +02:00
Matthias Hannig
c403436dcc
Merge branch 'release/4.0.2' 2019-09-09 14:57:49 +02:00
Matthias Hannig
45623133bb
updated version and changelog 2019-09-09 14:57:39 +02:00
Matthias Hannig
8da8a1684c
fixed bug in getMasterPipeName 2019-09-09 14:52:28 +02:00
Matthias Hannig
5e4e4154c6
added test for getMasterPipeName 2019-09-09 14:52:15 +02:00
Matthias Hannig
3be429e2ef
force lower case state 2019-08-14 16:04:16 +02:00
424 changed files with 32336 additions and 19646 deletions

2
.dockerignore Normal file
View File

@ -0,0 +1,2 @@
ui/node_modules/
ui/build/

49
.github/workflows/docker-image.yml vendored Normal file
View File

@ -0,0 +1,49 @@
name: Docker Image CI
on:
release:
types: [created]
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
jobs:
build-and-push-image:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Log in to the Container registry
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
- name: Generate branch tag
id: set_tag
run: |
echo "::set-output name=tag::${{ github.ref_name }}-$(git rev-parse --short HEAD)-$(date +%s)"
- name: Build and push Docker image
uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}, ${{ env.REGISTRY }}/${{env.IMAGE_NAME}}:${{ steps.set_tag.outputs.tag }}
labels: ${{ steps.meta.outputs.labels }}

37
.github/workflows/release.yml vendored Normal file
View File

@ -0,0 +1,37 @@
name: Release
on:
release:
types: [created]
permissions:
contents: write
jobs:
goreleaser:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Unshallow
run: git fetch --prune --unshallow
- name: Setup Go
uses: actions/setup-go@v3
with:
go-version-file: 'go.mod'
cache: true
- name: Setup NodeJS
uses: actions/setup-node@v3
with:
node-version: 16
- name: Run GoReleaser
uses: goreleaser/goreleaser-action@v3.2.0
with:
version: latest
args: release --rm-dist
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

81
.github/workflows/test.yml vendored Normal file
View File

@ -0,0 +1,81 @@
name: Test
on: [ push, workflow_dispatch ]
jobs:
test:
runs-on: ubuntu-latest
services:
postgres:
image: postgres
env:
POSTGRES_USER: alice
POSTGRES_PASSWORD: alice
POSTGRES_DB: alice
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
steps:
- uses: actions/checkout@v3
# Install development dependencies
- name: Setup Go
uses: actions/setup-go@v3
with:
go-version: '1.21.x'
- name: Setup NodeJS
uses: actions/setup-node@v3
with:
node-version: 16
- name: Add UI build stub
working-directory: ./ui
run: |
mkdir -p ./build
touch ./build/UI_BUILD_STUB
# Formatting
- name: Check formatting
run: |
test -z $(gofmt -l ./pkg)
test -z $(gofmt -l ./cmd)
# Vet
- name: Vet
run: |
go vet ./pkg/...
go vet ./cmd/...
# Lint
- name: Lint
run: |
go install golang.org/x/lint/golint@latest
golint -set_exit_status ./pkg/...
golint -set_exit_status ./cmd/...
# Test environment
- name: Setup Test Database
env:
PGHOST: localhost
PGPORT: 5432
PGUSER: alice
PGPASSWORD: alice
working-directory: ./db
run: |
./init.sh -c -t
# Run Tests
- name: UI Tests
run: make ui_test
- name: Backend Tests
run: make backend_test

4
.gitignore vendored
View File

@ -18,6 +18,7 @@ venv/
django_backend/birdseye/static
node_modules/
ui/node_modules/
builds/
@ -28,8 +29,9 @@ DIST/
var/
etc/alice-lg/alice.conf
.DS_Store
*coverage*
dist/

34
.goreleaser.yaml Normal file
View File

@ -0,0 +1,34 @@
# This is an example .goreleaser.yml file with some sensible defaults.
# Make sure to check the documentation at https://goreleaser.com
before:
hooks:
- go mod tidy
- make -C ui/
builds:
- main: ./cmd/alice-lg
env:
- CGO_ENABLED=0
goos:
- linux
- freebsd
- netbsd
- openbsd
goarch:
- amd64
- arm64
ldflags:
- -X "github.com/alice-lg/alice-lg/pkg/config.Version={{ .Version }}"
archives:
- name_template: 'alice-lg_{{ .Version }}_{{ .Os }}_{{ .Arch }}'
checksum:
name_template: 'checksums.txt'
algorithm: sha256
snapshot:
name_template: "{{ incpatch .Version }}-next"
changelog:
skip: true
release:
extra_files:
- glob: ./CHANGELOG.md

View File

@ -1,6 +1,147 @@
# Changelog
## 6.1.0 (2024-02-12)
* Added memory pools for deduplicating route information.
This drastically reduces the memory consumption.
* Single table birdwatcher source is now using stream
response parsing. This also reduces the memory consumption.
However, as there are now waiting times, CPU load can get
high. You can introduce a delay while parsing with the
`stream_parser_throttle` parameter in the config.
* Improved search performance and timeout handling.
* The BGP info modal can now be dismissed by pressing `esc`.
* Global search now supports querying for bgp communities.
Please set the `routes_store_query_limit` config variable.
Some communities might match a large number of routes.
* Examples for the global search can be added using the
theme's `Alice.updateContent` API:
`{lookup: {examples: [["asn", "AS2342"], ...]}}`. Valid types
are: `asn`, `community` `prefix` and `q`.
* Performance in search has been improved.
You can now set the `prefix_lookup_community_filter_cutoff`
config variable to prevent timeouts with large result sets.
* The configuration now supports defining variables like
`$ASN01 = 65535` which can be used for expressivly describing
communities. For now see `pkg/config/testdata/alice.conf` for
usage.
* Bugfixes:
- Fixed parsing and handling of ext community filters.
- Fixed stylesheet compatibility: For route flags, new SVG icons
are now wrapped in an `<i>` tag, to ensure backward compatiblity.
- Fixed trying to decode an 'undefined' value for a query filter.
- Spelling fixes
* Deprecations:
- The `/api/v1/routeservers/<rs>/neighbors/<id>/routes` endpoint
is removed.
## 6.0.0 (2022-11-10)
* Pure functional react UI!
Frontend is now using `create-react-app` for scripts and
contexts instead of redux.
**Theme compatibility**
- Stylesheets are compatible
- Content API is compatible
- API now provides `Alice.onLayoutReady((page) => ... )`
callback. This should be used to install additional
DOM event listeners for extensions.
So, if you want to inject additional dom nodes into
the UI and used something like:
`document.addEventListener("DOMContentLoaded", function() { ... }`
you now need to use the `Alice.onLayoutReady(function(main) { ... })`
callback.
## 5.1.1 (2022-06-21)
* Improved search query validation.
* Fixed http status response when validation fails.
Was Internal Server Error (500), now: Bad Request (400).
* Memory-Store is now using sync.Map to avoid timeouts
due to aggressive locking.
## 5.1.0 (2022-06-02)
* **BREAKING CHANGE** The spelling of "neighbors" is now harmonized.
Please update your config and replace e.g. `neighbour.asn`
with `neighbor.asn` (in case of java script errors).
This also applies to the API.
In the config `neighbors_store_refresh_interval` needs to be updated.
* Parallel route / neighbor store refreshs: Route servers are not
longer queried sequentially. A jitter is applied to not hit all
servers exactly at once.
* Parallelism can be tuned through the config parameters:
[server]
routes_store_refresh_parallelism = 5
neighbors_store_refresh_parallelism = 10000
A value of 1 is a sequential refresh.
* Postgres store backend: Not keeping routes and neighbors in
memory might reduce the memory footprint.
* Support for alternative pipe in `multi_table` birdwatcher
configurations.
* Reduced memory footprint by precomputing route details
## 5.0.1 (2021-11-01)
* Fixed parsing extended communities in openbgpd source causing a crash.
## 5.0.0 (2021-10-09)
* OpenBGPD support! Thanks to the Route Server Support Foundation
for sponsoring this feature!
* Backend cleanup and restructured go codebase.
This should improve a bit working with containers.
* Fixed links to the IRR Explorer.
## 4.3.0 (2021-04-15)
* Added configurable main table
## 4.2.0 (2020-07-29)
* Added GoBGP processing_timeout source config option
## 4.1.0 (2019-12-23)
* Added related neighbors feature
## 4.0.2, 4.0.3 (2019-09-09)
* Fixed issue with multitable bird: `getMasterPipeName` returned incorrect
pipe.
* Fixed state check in multitable bird source with bird2.
## 4.0.1 (2019-03-07)

47
Dockerfile Normal file
View File

@ -0,0 +1,47 @@
#
# Alice - The friendly BGP looking glass
#
# Build frontend first
FROM node:latest AS ui
# Install dependencies
WORKDIR /src/alice-lg/ui
ADD ui/package.json .
ADD ui/yarn.lock .
RUN yarn install
# Add frontend
ADD ui/ .
# Build frontend
RUN yarn build
# Build the backend
FROM golang:1.21 AS backend
# Install dependencies
WORKDIR /src/alice-lg
ADD go.mod .
ADD go.sum .
RUN go mod download
ADD . .
# Add client
COPY --from=ui /src/alice-lg/ui/build ui/build
WORKDIR /src/alice-lg/cmd/alice-lg
RUN make alpine
FROM alpine:latest
RUN apk add -U tzdata
COPY --from=backend /src/alice-lg/cmd/alice-lg/alice-lg-linux-amd64 /usr/bin/alice-lg
RUN ls -lsha /usr/bin/alice-lg
EXPOSE 7340:7340
CMD ["/usr/bin/alice-lg"]

40
LICENSE
View File

@ -1,33 +1,31 @@
BSD License
BSD 3-Clause License
Copyright (c) 2016-2018, Peering GmbH / ECIX
Copyright (c) 2018-present, Matthias Hannig
Copyright (c) 2018-present, Annika Hannig
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
THE POSSIBILITY OF SUCH DAMAGE.
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

116
Makefile
View File

@ -1,119 +1,43 @@
#
# Build the Alice Looking Glass
# -----------------------------
#
# Build the Alice Looking Glass locally
#
PROG=alice-lg
ARCH=amd64
SYSTEM_INIT=systemd
# == END BUILD CONFIGURATION ==
VERSION=$(shell cat ./VERSION)
# Specify build server for remotely building the RPM
# you can do this when you invoke the makefile
# using:
# make remote_rpm BUILD_SERVER=build-rpm.example.com
BUILD_SERVER=''
DIST=DIST/
REMOTE_DIST=$(PROG)-$(DIST)
RPM=$(PROG)-$(VERSION)-1.x86_64.rpm
LOCAL_RPMS=RPMS
all: alice
client_dev:
$(MAKE) -C client/
test: ui_test backend_test
client_prod:
$(MAKE) -C client/ client_prod
alice: ui backend
cp cmd/alice-lg/alice-lg-* bin/
backend_dev: client_dev
$(MAKE) -C backend/
ui:
$(MAKE) -C ui/
dev:
$(MAKE) -C backend/ osx-dev
ui_test:
$(MAKE) -C ui/ test
backend:
$(MAKE) -C cmd/alice-lg/ static
backend_prod: client_prod
$(MAKE) -C backend/ bundle
$(MAKE) -C backend/ linux
alice: client_prod backend_prod
mv backend/alice-lg-* bin/
dist: clean alice
mkdir -p $(DIST)opt/alice-lg/alice-lg/bin
mkdir -p $(DIST)etc/alice-lg
# Adding post install script
cp install/scripts/after_install $(DIST)/.
ifeq ($(SYSTEM_INIT), systemd)
# Installing systemd services
mkdir -p $(DIST)usr/lib/systemd/system/
cp install/systemd/* $(DIST)usr/lib/systemd/system/.
else
# Installing upstart configuration
mkdir -p $(DIST)/etc/init/
cp install/upstart/* $(DIST)etc/init/.
endif
# Copy example configuration
cp etc/alice-lg/alice.example.conf $(DIST)/etc/alice-lg/alice.example.conf
# Copy application
cp bin/$(PROG)-linux-$(ARCH) DIST/opt/alice-lg/alice-lg/bin/.
rpm: dist
# Clear tmp failed build (if any)
mkdir -p $(LOCAL_RPMS)
# Create RPM from dist
fpm -s dir -t rpm -n $(PROG) -v $(VERSION) -C $(DIST) \
--architecture $(ARCH) \
--config-files /etc/alice-lg/alice.example.conf \
--after-install $(DIST)/after_install \
opt/ etc/
mv $(RPM) $(LOCAL_RPMS)
build_server:
ifeq ($(BUILD_SERVER), '')
$(error BUILD_SERVER not configured)
endif
remote_rpm: build_server dist
mkdir -p $(LOCAL_RPMS)
# Copy distribution to build server
ssh $(BUILD_SERVER) -- rm -rf $(REMOTE_DIST)
scp -r $(DIST) $(BUILD_SERVER):$(REMOTE_DIST)
ssh $(BUILD_SERVER) -- fpm -s dir -t rpm -n $(PROG) -v $(VERSION) -C $(REMOTE_DIST) \
--architecture $(ARCH) \
--config-files /etc/alice-lg/alice.example.conf \
--after-install $(REMOTE_DIST)/after_install \
opt/ etc/
# Get rpm from server
scp $(BUILD_SERVER):$(RPM) $(LOCAL_RPMS)/.
backend_test:
mkdir -p ./ui/build
touch ./ui/build/UI_BUILD_STUB
go test ./pkg/...
rm ./ui/build/UI_BUILD_STUB
clean:
rm -f bin/alice-lg-linux-amd64
rm -f bin/alice-lg-osx-amd64
rm -rf $(DIST)
rm ./ui/build/UI_BUILD_STUB
.PHONY: backend ui clean

11
Makefile.docker Normal file
View File

@ -0,0 +1,11 @@
# Build Docker Image
APP_VERSION=$(shell cat ./VERSION)
all: image
image:
docker build . -t alice-lg:latest -t alice-lg:$(APP_VERSION)

178
README.md
View File

@ -2,24 +2,48 @@
__"No, no! The adventures first, explanations take such a dreadful time."__
_Lewis Carroll, Alice's Adventures in Wonderland & Through the Looking-Glass_
Take a look at an Alice-LG production examples at:
Take a look at Alice-LG production examples at:
- https://lg.de-cix.net/
- https://lg.ecix.net/
- https://lg.ams-ix.net
- https://lg.bcix.de/
- https://lg.megaport.com/
- https://lg.netnod.se/
- https://alice-rs.linx.net/
- https://lg.ix.br/
- https://lg.ix.asn.au/
- https://lg.ix.nz/
And checkout the API at:
- https://lg.ecix.net/api/config
- https://lg.ecix.net/api/routeservers
- https://lg.ecix.net/api/routeservers/0/status
- https://lg.ecix.net/api/routeservers/0/neighbours
- https://lg.ecix.net/api/routeservers/0/neighbours/ID109_AS31078/routes
- https://lg.ecix.net/api/lookup/prefix?q=217.115.0.0
- https://lg.de-cix.net/api/v1/config
- https://lg.de-cix.net/api/v1/routeservers
- https://lg.de-cix.net/api/v1/routeservers/rs1_fra_ipv4/status
- https://lg.de-cix.net/api/v1/routeservers/rs1_fra_ipv4/neighbors
- https://lg.de-cix.net/api/v1/routeservers/rs1_fra_ipv4/neighbors/R194_106/routes
- https://lg.de-cix.net/api/v1/lookup/prefix?q=217.115.0.0
## Breaking Changes
### 6.0.0
With the new functional react UI, the `DOMContentLoaded` event can no
longer be used for injecting additional content. Please use
`Alice.onLayoutReady(function(main) { ... });` instead.
### 5.1.0
The spelling of "neighbors" is now harmonized. Please update your config and
replace e.g. neighbour.asn with neighbor.asn (in case of java script errors).
## Explanations
Alice-LG is a BGP looking glass which gets its data from external APIs.
Currently Alice-LG supports the following APIs:
- [birdwatcher API](https://github.com/alice-lg/birdwatcher) for [BIRD](http://bird.network.cz/)
- [GoBGP](https://osrg.github.io/gobgp/)
- [bgplgd](https://man.openbsd.org/bgplgd) or [`openbgpd-state-server`](https://github.com/alice-lg/openbgpd-state-server) for [OpenBGP](https://www.openbgpd.org/)
### Birdwatcher
Normally you would first install the [birdwatcher API](https://github.com/alice-lg/birdwatcher) directly on the machine(s) where you run [BIRD](http://bird.network.cz/) on
and then install Alice-LG on a seperate public facing server and point her to the afore mentioned [birdwatcher API](https://github.com/alice-lg/birdwatcher).
@ -28,12 +52,19 @@ just prior to [RIPE73](https://ripe73.ripe.net/) in Madrid, Spain.
Major thanks to Barry O'Donovan who built the original [INEX Bird's Eye](https://github.com/inex/birdseye) BIRD API of which Alice-LG is a spinnoff
### GoBGP
Alice-LG supports direct integration with GoBGP instances using gRPC.
See the configuration section for more detail.
### OpenBGPD
Alice-LG supports OpenBGP via [`bgplgd`](https://man.openbsd.org/bgplgd)
and [`openbgpd-state-server`](https://github.com/alice-lg/openbgpd-state-server).
## Building Alice-LG from scratch
__These examples include setting up your Go environment, if you already have set that up then you can obviously skip that__
In case you have trouble with `npm` and `gulp` you can try using `yarn`.
### CentOS 7:
### CentOS:
First add the following lines at the end of your `~/.bash_profile`:
```bash
GOPATH=$HOME/go
@ -47,10 +78,8 @@ source ~/.bash_profile
# Install frontend build dependencies
sudo yum install golang npm
sudo npm install --global gulp-cli
sudo npm install --global yarn
go get github.com/GeertJohan/go.rice
go get github.com/GeertJohan/go.rice/rice
mkdir -p ~/go/bin ~/go/pkg ~/go/src/github.com/alice-lg/
cd ~/go/src/github.com/alice-lg
@ -64,7 +93,7 @@ Your Alice-LG source will now be located at `~/go/src/github.com/alice-lg/alice-
## Configuration
An example configuration can be found at
[etc/alice-lg/alice.example.conf](https://github.com/alice-lg/alice-lg/blob/readme_update/etc/alice-lg/alice.example.conf).
[etc/alice-lg/alice.example.conf](https://github.com/alice-lg/alice-lg/blob/master/etc/alice-lg/alice.example.conf).
You can copy it to any of the following locations:
@ -73,13 +102,15 @@ You can copy it to any of the following locations:
/etc/alice-lg/alice.conf # global
You will have to edit the configuration file as you need to point Alice-LG to the correct [APIs](https://github.com/alice-lg/birdwatcher):
You will have to edit the configuration file as you need to point Alice-LG to the correct backend source. Multiple sources can be configured.
[Birdwatcher](https://github.com/alice-lg/birdwatcher):
```ini
[source.rs1-example-v4]
name = rs1.example.com (IPv4)
[source.rs1-example-v4.birdwatcher]
api = http://rs1.example.com:29184/
neighbors_refresh_timeout = 2
# show_last_reboot = true
# timezone = UTC
# type = single_table / multi_table
@ -94,6 +125,52 @@ name = rs1.example.com (IPv6)
api = http://rs1.example.com:29186/
```
[GoBGP](https://osrg.github.io/gobgp/):
```ini
[source.rs2-example]
name = rs2.example.com
group = AMS
[source.rs2-example.gobgp]
# Host is the IP (or DNS name) and port for the remote GoBGP daemon
host = rs2.example.com:50051
# ProcessingTimeout is a timeout in seconds configured per gRPC call to a given GoBGP daemon
processing_timeout = 300
```
Configure TLS with:
```ini
tls_crt = /path/to/cert
tls_common_name = "common name"
```
You can disable TLS with `insecure = true`.
[OpenBGPD](https://www.openbgpd.org/) via `openbgpd-state-server`:
```ini
[source.rs-example]
name = rs-example.openbgpd-state-server
[source.rs-example.openbgpd-state-server]
api = http://rs23.example.net:29111/api
# Optional response cache time in seconds
# Default: disabled (0)
cache_ttl = 100
```
[OpenBGPD](https://www.openbgpd.org/) via `bgplgd`:
```ini
[source.rs-example]
name = rs-example.openbgpd-bgplgd
[source.rs-example.openbgpd-bgplgd]
api = http://rs23.example.net/bgplgd
# Optional response cache time in seconds
# Default: disabled (0)
cache_ttl = 100
```
## Running
Launch the server by running
@ -101,27 +178,6 @@ Launch the server by running
./bin/alice-lg-linux-amd64
## Deployment
We added a `Makefile` for packaging Alice as an RPM using [fpm](https://github.com/jordansissel/fpm).
If you have all tools available locally, you can just type:
make rpm
If you want to build the package on a remote machine, just use
make remote_rpm BUILD_SERVER=my-rpm-building-server.example.com
which will copy the dist to the remote server and executes fpm via ssh.
You can specify which system integration to use:
Set the `SYSTEM_INIT` variable to `upstart` or `systemd` (default)
prior to building the RPM.
make remote_rpm BUILD_SERVER=rpmbuild.example.com SYSTEM_INIT=upstart
## Customization
Alice now supports custom themes!
@ -154,46 +210,43 @@ Alice.updateContent({
```
A callback for running custom javascript after the base application
was initialized can be installed using:
```javascript
Alice.onLayoutReady(function(page) {
// page is the layout HTML root element
});
```
For an example check out: https://github.com/alice-lg/alice-theme-example
## Hacking
The client is a Single Page React Application.
All sources are available in `client/`.
Install build tools as needed:
npm install -g gulp-cli
All sources are available in `ui/`.
`Yarn` is required for building the UI.
Create a fresh UI build with
```bash
cd client/
make client
cd ui/
make
```
This will install all dependencies and run `gulp`.
This will install all dependencies with `yarn install` and run `yarn build`.
While working on the UI you might want to use `make watch`,
which will keep the `gulp watch` task up and running.
As this is a `create-react-app` application, react-scripts are present
and you can just run a development server using `yarn start`.
### Docker
For convenience we added a `Dockerfile` for building the frontend / client.
All this available as a containerized environment:
Create a fresh UI build using docker with
```bash
cd client/
Running `docker-compose up` in the `./dev` will build and start the
backend and run a webpack dev server for the UI.
# Dev build:
make -f Makefile.docker client
The UI is then available on http://localhost:3000/ and on http://localhost:7340/
the backend will serve the API.
# Production build:
make -f Makefile.docker client_prod
```
You can use gulp with docker for watching the files while developing aswell:
```bash
make -f Makefile.docker watch
```
## Sponsors
@ -205,3 +258,6 @@ The development of Alice is now sponsored by
</p>
Many thanks go out to [ECIX](https://www.ecix.net), where this project originated and was backed over the last two years.
Support for **OpenBGPD** was sponsored by the [Route Server Support Foundation](https://www.rssf.nl/).

View File

@ -1 +1 @@
4.0.0
6.1.0

7
backend/.gitignore vendored
View File

@ -1,7 +0,0 @@
# Ignore static build in repo
rice-box.go
# Ignore builds
alice-lg-*

View File

@ -1,14 +0,0 @@
FROM golang:1.10
# Add project (for prefetching dependencies)
ADD . /go/src/github.com/alice-lg/alice-lg/backend
RUN cd /go/src/github.com/alice-lg/alice-lg/backend && go get -v .
RUN go get github.com/GeertJohan/go.rice/rice
RUN go install github.com/GeertJohan/go.rice/rice
WORKDIR /go/src/github.com/alice-lg/alice-lg
VOLUME ["/go/src/github.com/alice-lg/alice-lg"]

View File

@ -1,64 +0,0 @@
#
# Alice LG Backend
# ----------------
#
PROG=alice-lg
ARCH=amd64
APP_VERSION=$(shell cat ../VERSION)
VERSION=$(APP_VERSION)_$(shell git rev-parse --short HEAD)
LOCAL_RPMS=RPMS
# OS Detection
UNAME=$(shell uname)
ifeq ($(UNAME), Darwin)
TARGET=osx
else
TARGET=linux
endif
LDFLAGS=-ldflags="-X main.version=$(APP_VERSION)"
FILES=$(shell find . -depth 1 ! -name "*_test.go" -name "*.go")
all: $(TARGET)
@echo "Built $(VERSION) @ $(TARGET)"
deps:
GO111MODULE=on go get -v .
osx-dev: deps
GO111MODULE=on go run $(FILES)
osx: deps
GO111MODULE=on GOARCH=$(ARCH) GOOS=darwin go build $(LDFLAGS) -o $(PROG)-osx-$(ARCH)
linux: deps
GO111MODULE=on GOARCH=$(ARCH) GOOS=linux go build $(LDFLAGS) -o $(PROG)-linux-$(ARCH)
bundle:
rice embed-go
test:
GO111MODULE=on go test -v
cd api/ && GO111MODULE=on go test -v
cd caches/ && GO111MODULE=on go test -v
cd sources/birdwatcher && GO111MODULE=on go test -v
dev: clean all
prod: clean bundle $(TARGET)
@echo "Build $(VERSION) [production] @ $(TARGET)"
clean:
rm -f rice-box.go
rm -f $(PROG)-osx-$(ARCH)
rm -f $(PROG)-linux-$(ARCH)
coverage:
go test -coverprofile=coverage.out
go tool cover -func=coverage.out

View File

@ -1,28 +0,0 @@
DOCKER_IMAGE := alice-lg-golang:latest
DOCKER_EXEC := docker run --rm -t -i \
--user 1000 \
-v `pwd`/../:/go/src/github.com/alice-lg/alice-lg \
alice-lg-golang:latest /bin/sh -c
image:
docker build . -t alice-lg-golang:latest
# Somehow rice.box does not work when the binary was built
# using docker. However it does work when used for embedding.
# So, making a production build is doable, an dev build however
# is not.
#
# dev:
# $(DOCKER_EXEC) "cd backend && make dev"
prod: image
$(DOCKER_EXEC) "cd backend && make prod"
all: prod

View File

@ -1,256 +0,0 @@
package api
import (
"fmt"
"time"
)
// General api response
type Response interface{}
// Details, usually the original backend response
type Details map[string]interface{}
// Error Handling
type ErrorResponse struct {
Message string `json:"message"`
Code int `json:"code"`
Tag string `json:"tag"`
RouteserverId string `json:"routeserver_id"`
}
// Cache aware api response
type CacheableResponse interface {
CacheTtl() time.Duration
}
// Config
type ConfigResponse struct {
Asn int `json:"asn"`
RejectReasons map[string]interface{} `json:"reject_reasons"`
Noexport Noexport `json:"noexport"`
NoexportReasons map[string]interface{} `json:"noexport_reasons"`
RejectCandidates RejectCandidates `json:"reject_candidates"`
Rpki Rpki `json:"rpki"`
BgpCommunities map[string]interface{} `json:"bgp_communities"`
NeighboursColumns map[string]string `json:"neighbours_columns"`
NeighboursColumnsOrder []string `json:"neighbours_columns_order"`
RoutesColumns map[string]string `json:"routes_columns"`
RoutesColumnsOrder []string `json:"routes_columns_order"`
LookupColumns map[string]string `json:"lookup_columns"`
LookupColumnsOrder []string `json:"lookup_columns_order"`
PrefixLookupEnabled bool `json:"prefix_lookup_enabled"`
}
type Noexport struct {
LoadOnDemand bool `json:"load_on_demand"`
}
type RejectCandidates struct {
Communities map[string]interface{} `json:"communities"`
}
type Rpki struct {
Enabled bool `json:"enabled"`
Valid []string `json:"valid"`
Unknown []string `json:"unknown"`
NotChecked []string `json:"not_checked"`
Invalid []string `json:"invalid"`
}
// Status
type ApiStatus struct {
Version string `json:"version"`
CacheStatus CacheStatus `json:"cache_status"`
ResultFromCache bool `json:"result_from_cache"`
Ttl time.Time `json:"ttl"`
}
type CacheStatus struct {
CachedAt time.Time `json:"cached_at"`
OrigTtl int `json:"orig_ttl"`
}
type Status struct {
ServerTime time.Time `json:"server_time"`
LastReboot time.Time `json:"last_reboot"`
LastReconfig time.Time `json:"last_reconfig"`
Message string `json:"message"`
RouterId string `json:"router_id"`
Version string `json:"version"`
Backend string `json:"backend"`
}
type StatusResponse struct {
Api ApiStatus `json:"api"`
Status Status `json:"status"`
}
// Routeservers
type Routeserver struct {
Id string `json:"id"`
Name string `json:"name"`
Group string `json:"group"`
Blackholes []string `json:"blackholes"`
Order int `json:"-"`
}
type Routeservers []Routeserver
// Implement sorting interface for routeservers
func (rs Routeservers) Len() int {
return len(rs)
}
func (rs Routeservers) Less(i, j int) bool {
return rs[i].Order < rs[j].Order
}
func (rs Routeservers) Swap(i, j int) {
rs[i], rs[j] = rs[j], rs[i]
}
type RouteserversResponse struct {
Routeservers []Routeserver `json:"routeservers"`
}
// BGP
type Community []int
func (com Community) String() string {
res := ""
for _, v := range com {
res += fmt.Sprintf(":%d", v)
}
return res[1:]
}
type Communities []Community
/*
Deduplicate communities
*/
func (communities Communities) Unique() Communities {
seen := map[string]bool{}
result := make(Communities, 0, len(communities))
for _, com := range communities {
key := com.String()
if _, ok := seen[key]; !ok {
// We have not seen this community yet
result = append(result, com)
seen[key] = true
}
}
return result
}
type ExtCommunity []interface{}
func (com ExtCommunity) String() string {
res := ""
for _, v := range com {
res += fmt.Sprintf(":%v", v)
}
return res[1:]
}
type ExtCommunities []ExtCommunity
func (communities ExtCommunities) Unique() ExtCommunities {
seen := map[string]bool{}
result := make(ExtCommunities, 0, len(communities))
for _, com := range communities {
key := com.String()
if _, ok := seen[key]; !ok {
// We have not seen this community yet
result = append(result, com)
seen[key] = true
}
}
return result
}
type BgpInfo struct {
Origin string `json:"origin"`
AsPath []int `json:"as_path"`
NextHop string `json:"next_hop"`
Communities Communities `json:"communities"`
LargeCommunities Communities `json:"large_communities"`
ExtCommunities ExtCommunities `json:"ext_communities"`
LocalPref int `json:"local_pref"`
Med int `json:"med"`
}
func (bgp BgpInfo) HasCommunity(community Community) bool {
if len(community) != 2 {
return false // This can never match.
}
for _, com := range bgp.Communities {
if len(com) != len(community) {
continue // This can't match.
}
if com[0] == community[0] &&
com[1] == community[1] {
return true
}
}
return false
}
func (bgp BgpInfo) HasExtCommunity(community ExtCommunity) bool {
if len(community) != 3 {
return false // This can never match.
}
for _, com := range bgp.ExtCommunities {
if len(com) != len(community) {
continue // This can't match.
}
if com[0] == community[0] &&
com[1] == community[1] &&
com[2] == community[2] {
return true
}
}
return false
}
func (bgp BgpInfo) HasLargeCommunity(community Community) bool {
// TODO: This is an almost 1:1 match to the function above.
if len(community) != 3 {
return false // This can never match.
}
for _, com := range bgp.LargeCommunities {
if len(com) != len(community) {
continue // This can't match.
}
if com[0] == community[0] &&
com[1] == community[1] &&
com[2] == community[2] {
return true
}
}
return false
}

View File

@ -1,81 +0,0 @@
package api
import (
"time"
)
// Neighbours
type Neighbours []*Neighbour
type Neighbour struct {
Id string `json:"id"`
// Mandatory fields
Address string `json:"address"`
Asn int `json:"asn"`
State string `json:"state"`
Description string `json:"description"`
RoutesReceived int `json:"routes_received"`
RoutesFiltered int `json:"routes_filtered"`
RoutesExported int `json:"routes_exported"`
RoutesPreferred int `json:"routes_preferred"`
RoutesAccepted int `json:"routes_accepted"`
Uptime time.Duration `json:"uptime"`
LastError string `json:"last_error"`
// Original response
Details map[string]interface{} `json:"details"`
}
// Implement sorting interface for routes
func (neighbours Neighbours) Len() int {
return len(neighbours)
}
func (neighbours Neighbours) Less(i, j int) bool {
return neighbours[i].Asn < neighbours[j].Asn
}
func (neighbours Neighbours) Swap(i, j int) {
neighbours[i], neighbours[j] = neighbours[j], neighbours[i]
}
type NeighboursResponse struct {
Api ApiStatus `json:"api"`
Neighbours Neighbours `json:"neighbours"`
}
// Neighbours response is cacheable
func (self *NeighboursResponse) CacheTtl() time.Duration {
now := time.Now().UTC()
return self.Api.Ttl.Sub(now)
}
type NeighboursLookupResults map[string]Neighbours
type NeighboursStatus []*NeighbourStatus
type NeighbourStatus struct {
Id string `json:"id"`
State string `json:"state"`
Since time.Duration `json:"uptime"`
}
// Implement sorting interface for status
func (neighbours NeighboursStatus) Len() int {
return len(neighbours)
}
func (neighbours NeighboursStatus) Less(i, j int) bool {
return neighbours[i].Id < neighbours[j].Id
}
func (neighbours NeighboursStatus) Swap(i, j int) {
neighbours[i], neighbours[j] = neighbours[j], neighbours[i]
}
type NeighboursStatusResponse struct {
Api ApiStatus `json:"api"`
Neighbours NeighboursStatus `json:"neighbours"`
}

View File

@ -1,191 +0,0 @@
package api
import (
"time"
)
// Prefixes
type Route struct {
Id string `json:"id"`
NeighbourId string `json:"neighbour_id"`
Network string `json:"network"`
Interface string `json:"interface"`
Gateway string `json:"gateway"`
Metric int `json:"metric"`
Bgp BgpInfo `json:"bgp"`
Age time.Duration `json:"age"`
Type []string `json:"type"` // [BGP, unicast, univ]
Primary bool `json:"primary"`
Details Details `json:"details"`
}
// Implement Filterable interface for routes
func (self *Route) MatchSourceId(id string) bool {
return true // A route has no source info so we exclude this filter
}
func (self *Route) MatchAsn(asn int) bool {
return true // Same here
}
// Only community filters are interesting at this point:
func (self *Route) MatchCommunity(community Community) bool {
return self.Bgp.HasCommunity(community)
}
func (self *Route) MatchExtCommunity(community ExtCommunity) bool {
return self.Bgp.HasExtCommunity(community)
}
func (self *Route) MatchLargeCommunity(community Community) bool {
return self.Bgp.HasLargeCommunity(community)
}
type Routes []*Route
// Implement sorting interface for routes
func (routes Routes) Len() int {
return len(routes)
}
func (routes Routes) Less(i, j int) bool {
return routes[i].Network < routes[j].Network
}
func (routes Routes) Swap(i, j int) {
routes[i], routes[j] = routes[j], routes[i]
}
type RoutesResponse struct {
Api ApiStatus `json:"api"`
Imported Routes `json:"imported"`
Filtered Routes `json:"filtered"`
NotExported Routes `json:"not_exported"`
}
func (self *RoutesResponse) CacheTtl() time.Duration {
now := time.Now().UTC()
return self.Api.Ttl.Sub(now)
}
type TimedResponse struct {
RequestDuration float64 `json:"request_duration_ms"`
}
type Pagination struct {
Page int `json:"page"`
PageSize int `json:"page_size"`
TotalPages int `json:"total_pages"`
TotalResults int `json:"total_results"`
}
type PaginatedResponse struct {
Pagination Pagination `json:"pagination"`
}
type FilterableResponse struct {
FiltersAvailable *SearchFilters `json:"filters_available"`
FiltersApplied *SearchFilters `json:"filters_applied"`
}
type PaginatedRoutesResponse struct {
*RoutesResponse
TimedResponse
FilterableResponse
Pagination Pagination `json:"pagination"`
}
// Lookup Prefixes
type LookupRoute struct {
Id string `json:"id"`
NeighbourId string `json:"neighbour_id"`
Neighbour *Neighbour `json:"neighbour"`
State string `json:"state"` // Filtered, Imported, ...
Routeserver Routeserver `json:"routeserver"`
Network string `json:"network"`
Interface string `json:"interface"`
Gateway string `json:"gateway"`
Metric int `json:"metric"`
Bgp BgpInfo `json:"bgp"`
Age time.Duration `json:"age"`
Type []string `json:"type"` // [BGP, unicast, univ]
Primary bool `json:"primary"`
Details Details `json:"details"`
}
// Implement Filterable interface for lookup routes
func (self *LookupRoute) MatchSourceId(id string) bool {
return self.Routeserver.Id == id
}
func (self *LookupRoute) MatchAsn(asn int) bool {
return self.Neighbour.Asn == asn
}
// Only community filters are interesting at this point:
func (self *LookupRoute) MatchCommunity(community Community) bool {
return self.Bgp.HasCommunity(community)
}
func (self *LookupRoute) MatchExtCommunity(community ExtCommunity) bool {
return self.Bgp.HasExtCommunity(community)
}
func (self *LookupRoute) MatchLargeCommunity(community Community) bool {
return self.Bgp.HasLargeCommunity(community)
}
// Implement sorting interface for lookup routes
func (routes LookupRoutes) Len() int {
return len(routes)
}
func (routes LookupRoutes) Less(i, j int) bool {
return routes[i].Network < routes[j].Network
}
func (routes LookupRoutes) Swap(i, j int) {
routes[i], routes[j] = routes[j], routes[i]
}
type LookupRoutes []*LookupRoute
// TODO: Naming is a bit yuck
type LookupRoutesResponse struct {
*PaginatedResponse
Routes LookupRoutes `json:"routes"`
}
// TODO: Refactor this (might be legacy)
type RoutesLookupResponse struct {
Api ApiStatus `json:"api"`
Routes LookupRoutes `json:"routes"`
}
type RoutesLookupResponseGlobal struct {
Routes LookupRoutes `json:"routes"`
// Pagination
TotalRoutes int `json:"total_routes"`
Limit int `json:"limit"`
Offset int `json:"offset"`
// Meta
Time float64 `json:"query_duration_ms"`
}
type PaginatedRoutesLookupResponse struct {
TimedResponse
FilterableResponse
Api ApiStatus `json:"api"` // Add to provide cache status information
Imported *LookupRoutesResponse `json:"imported"`
Filtered *LookupRoutesResponse `json:"filtered"`
}

View File

@ -1,543 +0,0 @@
package api
import (
"fmt"
"log"
"net/url"
)
const (
SEARCH_KEY_SOURCES = "sources"
SEARCH_KEY_ASNS = "asns"
SEARCH_KEY_COMMUNITIES = "communities"
SEARCH_KEY_EXT_COMMUNITIES = "ext_communities"
SEARCH_KEY_LARGE_COMMUNITIES = "large_communities"
)
/*
API Search
* Helper methods for searching
* Handle filter criteria
*/
type Filterable interface {
MatchSourceId(sourceId string) bool
MatchAsn(asn int) bool
MatchCommunity(community Community) bool
MatchExtCommunity(community ExtCommunity) bool
MatchLargeCommunity(community Community) bool
}
type FilterValue interface{}
type SearchFilter struct {
Cardinality int `json:"cardinality"`
Name string `json:"name"`
Value FilterValue `json:"value"`
}
type SearchFilterCmpFunc func(a FilterValue, b FilterValue) bool
func searchFilterCmpInt(a FilterValue, b FilterValue) bool {
return a.(int) == b.(int)
}
func searchFilterCmpString(a FilterValue, b FilterValue) bool {
return a.(string) == b.(string)
}
func searchFilterCmpCommunity(a FilterValue, b FilterValue) bool {
ca := a.(Community)
cb := b.(Community)
if len(ca) != len(cb) {
return false
}
// Compare components
for i, _ := range ca {
if ca[i] != cb[i] {
return false
}
}
return true
}
func searchFilterCmpExtCommunity(a FilterValue, b FilterValue) bool {
ca := a.(ExtCommunity)
cb := b.(ExtCommunity)
if len(ca) != len(cb) || len(ca) != 3 || len(cb) != 3 {
return false
}
return ca[0] == cb[0] && ca[1] == cb[1] && ca[2] == cb[2]
}
func (self *SearchFilter) Equal(other *SearchFilter) bool {
var cmp SearchFilterCmpFunc
switch other.Value.(type) {
case Community:
cmp = searchFilterCmpCommunity
break
case ExtCommunity:
cmp = searchFilterCmpExtCommunity
break
case int:
cmp = searchFilterCmpInt
break
case string:
cmp = searchFilterCmpString
break
}
if cmp == nil {
log.Println("Unknown search filter value type")
return false
}
return cmp(self.Value, other.Value)
}
/*
Search Filter Groups
*/
type SearchFilterGroup struct {
Key string `json:"key"`
Filters []*SearchFilter `json:"filters"`
filtersIdx map[string]int
}
func (self *SearchFilterGroup) FindFilter(filter *SearchFilter) *SearchFilter {
for _, f := range self.Filters {
if f.Equal(filter) == true {
return f
}
}
return nil
}
func (self *SearchFilterGroup) Contains(filter *SearchFilter) bool {
return self.FindFilter(filter) != nil
}
func (self *SearchFilterGroup) GetFilterByValue(value interface{}) *SearchFilter {
// I've tried it with .(fmt.Stringer), but int does not implement this...
// So whatever. I'm using the trick of letting Sprintf choose the right
// conversion. If this is too expensive, we need to refactor this.
// TODO: profile this.
idx, ok := self.filtersIdx[fmt.Sprintf("%v", value)]
if !ok {
return nil // We don't have this particular filter
}
return self.Filters[idx]
}
func (self *SearchFilterGroup) AddFilter(filter *SearchFilter) {
// Check if a filter with this value is present, if not:
// append and update index; otherwise incrementc cardinality
if presentFilter := self.GetFilterByValue(filter.Value); presentFilter != nil {
presentFilter.Cardinality++
return
}
// Insert filter
idx := len(self.Filters)
filter.Cardinality = 1
self.Filters = append(self.Filters, filter)
self.filtersIdx[fmt.Sprintf("%v", filter.Value)] = idx
}
func (self *SearchFilterGroup) AddFilters(filters []*SearchFilter) {
for _, filter := range filters {
self.AddFilter(filter)
}
}
func (self *SearchFilterGroup) rebuildIndex() {
self.filtersIdx = map[string]int{}
for i, filter := range self.Filters {
self.filtersIdx[fmt.Sprintf("%v", filter.Value)] = i
}
}
/*
Search comparators
*/
type SearchFilterComparator func(route Filterable, value interface{}) bool
func searchFilterMatchSource(route Filterable, value interface{}) bool {
sourceId, ok := value.(string)
if !ok {
return false
}
return route.MatchSourceId(sourceId)
}
func searchFilterMatchAsn(route Filterable, value interface{}) bool {
asn, ok := value.(int)
if !ok {
return false
}
return route.MatchAsn(asn)
}
func searchFilterMatchCommunity(route Filterable, value interface{}) bool {
community, ok := value.(Community)
if !ok {
return false
}
return route.MatchCommunity(community)
}
func searchFilterMatchExtCommunity(route Filterable, value interface{}) bool {
community, ok := value.(ExtCommunity)
if !ok {
return false
}
return route.MatchExtCommunity(community)
}
func searchFilterMatchLargeCommunity(route Filterable, value interface{}) bool {
community, ok := value.(Community)
if !ok {
return false
}
return route.MatchLargeCommunity(community)
}
func selectCmpFuncByKey(key string) SearchFilterComparator {
var cmp SearchFilterComparator
switch key {
case SEARCH_KEY_SOURCES:
cmp = searchFilterMatchSource
break
case SEARCH_KEY_ASNS:
cmp = searchFilterMatchAsn
break
case SEARCH_KEY_COMMUNITIES:
cmp = searchFilterMatchCommunity
break
case SEARCH_KEY_EXT_COMMUNITIES:
cmp = searchFilterMatchExtCommunity
break
case SEARCH_KEY_LARGE_COMMUNITIES:
cmp = searchFilterMatchLargeCommunity
break
default:
cmp = nil
}
return cmp
}
func (self *SearchFilterGroup) MatchAny(route Filterable) bool {
// Check if we have any filter to match
if len(self.Filters) == 0 {
return true // no filter, everything matches
}
// Get comparator
cmp := selectCmpFuncByKey(self.Key)
if cmp == nil {
return false // This should not have happened!
}
// Check if any of the given filters matches
for _, filter := range self.Filters {
if cmp(route, filter.Value) {
return true
}
}
return false
}
func (self *SearchFilterGroup) MatchAll(route Filterable) bool {
// Check if we have any filter to match
if len(self.Filters) == 0 {
return true // no filter, everything matches. Like above.
}
// Get comparator
cmp := selectCmpFuncByKey(self.Key)
if cmp == nil {
return false // This again should not have happened!
}
// Assert that all filters match.
for _, filter := range self.Filters {
if !cmp(route, filter.Value) {
return false
}
}
// Everythings fine.
return true
}
type SearchFilters []*SearchFilterGroup
func NewSearchFilters() *SearchFilters {
// Define groups: CAVEAT! the order is relevant
groups := &SearchFilters{
&SearchFilterGroup{
Key: SEARCH_KEY_SOURCES,
Filters: []*SearchFilter{},
filtersIdx: make(map[string]int),
},
&SearchFilterGroup{
Key: SEARCH_KEY_ASNS,
Filters: []*SearchFilter{},
filtersIdx: make(map[string]int),
},
&SearchFilterGroup{
Key: SEARCH_KEY_COMMUNITIES,
Filters: []*SearchFilter{},
filtersIdx: make(map[string]int),
},
&SearchFilterGroup{
Key: SEARCH_KEY_EXT_COMMUNITIES,
Filters: []*SearchFilter{},
filtersIdx: make(map[string]int),
},
&SearchFilterGroup{
Key: SEARCH_KEY_LARGE_COMMUNITIES,
Filters: []*SearchFilter{},
filtersIdx: make(map[string]int),
},
}
return groups
}
func (self *SearchFilters) GetGroupByKey(key string) *SearchFilterGroup {
// This is an optimization (this is basically a fixed hash map,
// with hash(key) = position(key)
switch key {
case SEARCH_KEY_SOURCES:
return (*self)[0]
case SEARCH_KEY_ASNS:
return (*self)[1]
case SEARCH_KEY_COMMUNITIES:
return (*self)[2]
case SEARCH_KEY_EXT_COMMUNITIES:
return (*self)[3]
case SEARCH_KEY_LARGE_COMMUNITIES:
return (*self)[4]
}
return nil
}
/*
Update filter struct to include route:
- Extract ASN, source, bgp communites,
- Find Filter in group, increment result count if required.
*/
func (self *SearchFilters) UpdateFromLookupRoute(route *LookupRoute) {
// Add source
self.GetGroupByKey(SEARCH_KEY_SOURCES).AddFilter(&SearchFilter{
Name: route.Routeserver.Name,
Value: route.Routeserver.Id,
})
// Add ASN from neighbor
self.GetGroupByKey(SEARCH_KEY_ASNS).AddFilter(&SearchFilter{
Name: route.Neighbour.Description,
Value: route.Neighbour.Asn,
})
// Add communities
communities := self.GetGroupByKey(SEARCH_KEY_COMMUNITIES)
for _, c := range route.Bgp.Communities.Unique() {
communities.AddFilter(&SearchFilter{
Name: c.String(),
Value: c,
})
}
extCommunities := self.GetGroupByKey(SEARCH_KEY_EXT_COMMUNITIES)
for _, c := range route.Bgp.ExtCommunities.Unique() {
extCommunities.AddFilter(&SearchFilter{
Name: c.String(),
Value: c,
})
}
largeCommunities := self.GetGroupByKey(SEARCH_KEY_LARGE_COMMUNITIES)
for _, c := range route.Bgp.LargeCommunities.Unique() {
largeCommunities.AddFilter(&SearchFilter{
Name: c.String(),
Value: c,
})
}
}
// This is the same as above, but only the communities
// are considered.
func (self *SearchFilters) UpdateFromRoute(route *Route) {
// Add communities
communities := self.GetGroupByKey(SEARCH_KEY_COMMUNITIES)
for _, c := range route.Bgp.Communities.Unique() {
communities.AddFilter(&SearchFilter{
Name: c.String(),
Value: c,
})
}
extCommunities := self.GetGroupByKey(SEARCH_KEY_EXT_COMMUNITIES)
for _, c := range route.Bgp.ExtCommunities.Unique() {
extCommunities.AddFilter(&SearchFilter{
Name: c.String(),
Value: c,
})
}
largeCommunities := self.GetGroupByKey(SEARCH_KEY_LARGE_COMMUNITIES)
for _, c := range route.Bgp.LargeCommunities.Unique() {
largeCommunities.AddFilter(&SearchFilter{
Name: c.String(),
Value: c,
})
}
}
/*
Build filter struct from query params:
For example a query string of:
asns=2342,23123&communities=23:42&large_communities=23:42:42
yields a filtering struct of
Groups[
Group{"sources", []},
Group{"asns", [Filter{Value: 2342},
Filter{Value: 23123}]},
Group{"communities", ...
}
*/
func FiltersFromQuery(query url.Values) (*SearchFilters, error) {
queryFilters := NewSearchFilters()
for key, _ := range query {
value := query.Get(key)
switch key {
case SEARCH_KEY_SOURCES:
filters, err := parseQueryValueList(parseStringValue, value)
if err != nil {
return nil, err
}
queryFilters.GetGroupByKey(SEARCH_KEY_SOURCES).AddFilters(filters)
break
case SEARCH_KEY_ASNS:
filters, err := parseQueryValueList(parseIntValue, value)
if err != nil {
return nil, err
}
queryFilters.GetGroupByKey(SEARCH_KEY_ASNS).AddFilters(filters)
break
case SEARCH_KEY_COMMUNITIES:
filters, err := parseQueryValueList(parseCommunityValue, value)
if err != nil {
return nil, err
}
queryFilters.GetGroupByKey(SEARCH_KEY_COMMUNITIES).AddFilters(filters)
break
case SEARCH_KEY_EXT_COMMUNITIES:
filters, err := parseQueryValueList(parseExtCommunityValue, value)
if err != nil {
return nil, err
}
queryFilters.GetGroupByKey(SEARCH_KEY_EXT_COMMUNITIES).AddFilters(filters)
break
case SEARCH_KEY_LARGE_COMMUNITIES:
filters, err := parseQueryValueList(parseCommunityValue, value)
if err != nil {
return nil, err
}
queryFilters.GetGroupByKey(SEARCH_KEY_LARGE_COMMUNITIES).AddFilters(filters)
break
}
}
return queryFilters, nil
}
/*
Match a route. Check if route matches all filters.
Unless all filters are blank.
*/
func (self *SearchFilters) MatchRoute(route Filterable) bool {
sources := self.GetGroupByKey(SEARCH_KEY_SOURCES)
if !sources.MatchAny(route) {
return false
}
asns := self.GetGroupByKey(SEARCH_KEY_ASNS)
if !asns.MatchAny(route) {
return false
}
communities := self.GetGroupByKey(SEARCH_KEY_COMMUNITIES)
if !communities.MatchAll(route) {
return false
}
extCommunities := self.GetGroupByKey(SEARCH_KEY_EXT_COMMUNITIES)
if !extCommunities.MatchAll(route) {
return false
}
largeCommunities := self.GetGroupByKey(SEARCH_KEY_LARGE_COMMUNITIES)
if !largeCommunities.MatchAll(route) {
return false
}
return true
}
func (self *SearchFilters) Sub(other *SearchFilters) *SearchFilters {
result := make(SearchFilters, len(*self))
for id, group := range *self {
otherGroup := (*other)[id]
diff := &SearchFilterGroup{
Key: group.Key,
Filters: []*SearchFilter{},
}
// Combine filters
for _, f := range group.Filters {
if otherGroup.Contains(f) {
continue // Let's skip this
}
diff.Filters = append(diff.Filters, f)
}
diff.rebuildIndex()
result[id] = diff
}
return &result
}
func (self *SearchFilters) MergeProperties(other *SearchFilters) {
for id, group := range *self {
otherGroup := (*other)[id]
for _, filter := range group.Filters {
otherFilter := otherGroup.FindFilter(filter)
if otherFilter == nil {
// Filter not present on other side, ignore this.
continue
}
filter.Name = otherFilter.Name
filter.Cardinality = otherFilter.Cardinality
}
}
}

View File

@ -1,60 +0,0 @@
package main
import (
"github.com/alice-lg/alice-lg/backend/api"
"github.com/julienschmidt/httprouter"
"net/http"
)
// Handle Status Endpoint, this is intended for
// monitoring and service health checks
func apiStatusShow(_req *http.Request, _params httprouter.Params) (api.Response, error) {
status, err := NewAppStatus()
return status, err
}
// Handle status
func apiStatus(_req *http.Request, params httprouter.Params) (api.Response, error) {
rsId, err := validateSourceId(params.ByName("id"))
if err != nil {
return nil, err
}
source := AliceConfig.SourceInstanceById(rsId)
if source == nil {
return nil, SOURCE_NOT_FOUND_ERROR
}
result, err := source.Status()
if err != nil {
apiLogSourceError("status", rsId, err)
}
return result, err
}
// Handle Config Endpoint
func apiConfigShow(_req *http.Request, _params httprouter.Params) (api.Response, error) {
result := api.ConfigResponse{
Asn: AliceConfig.Server.Asn,
BgpCommunities: AliceConfig.Ui.BgpCommunities,
RejectReasons: AliceConfig.Ui.RoutesRejections.Reasons,
Noexport: api.Noexport{
LoadOnDemand: AliceConfig.Ui.RoutesNoexports.LoadOnDemand,
},
NoexportReasons: AliceConfig.Ui.RoutesNoexports.Reasons,
RejectCandidates: api.RejectCandidates{
Communities: AliceConfig.Ui.RoutesRejectCandidates.Communities,
},
Rpki: api.Rpki(AliceConfig.Ui.Rpki),
RoutesColumns: AliceConfig.Ui.RoutesColumns,
RoutesColumnsOrder: AliceConfig.Ui.RoutesColumnsOrder,
NeighboursColumns: AliceConfig.Ui.NeighboursColumns,
NeighboursColumnsOrder: AliceConfig.Ui.NeighboursColumnsOrder,
LookupColumns: AliceConfig.Ui.LookupColumns,
LookupColumnsOrder: AliceConfig.Ui.LookupColumnsOrder,
PrefixLookupEnabled: AliceConfig.Server.EnablePrefixLookup,
}
return result, nil
}

View File

@ -1,56 +0,0 @@
package main
import (
"github.com/alice-lg/alice-lg/backend/api"
"github.com/julienschmidt/httprouter"
"net/http"
"sort"
)
// Handle get neighbors on routeserver
func apiNeighborsList(_req *http.Request, params httprouter.Params) (api.Response, error) {
rsId, err := validateSourceId(params.ByName("id"))
if err != nil {
return nil, err
}
var neighborsResponse *api.NeighboursResponse
// Try to fetch neighbors from store, only fall back
// to RS query if store is not ready yet
sourceStatus := AliceNeighboursStore.SourceStatus(rsId)
if sourceStatus.State == STATE_READY {
neighbors := AliceNeighboursStore.GetNeighborsAt(rsId)
// Make response
neighborsResponse = &api.NeighboursResponse{
Api: api.ApiStatus{
Version: version,
CacheStatus: api.CacheStatus{
OrigTtl: 0,
CachedAt: sourceStatus.LastRefresh,
},
ResultFromCache: true, // you bet!
Ttl: sourceStatus.LastRefresh.Add(
AliceNeighboursStore.refreshInterval),
},
Neighbours: neighbors,
}
} else {
source := AliceConfig.SourceInstanceById(rsId)
if source == nil {
return nil, SOURCE_NOT_FOUND_ERROR
}
neighborsResponse, err = source.Neighbours()
if err != nil {
apiLogSourceError("neighbors", rsId, err)
return nil, err
}
}
// Sort result
sort.Sort(&neighborsResponse.Neighbours)
return neighborsResponse, nil
}

View File

@ -1,36 +0,0 @@
package main
import (
"github.com/alice-lg/alice-lg/backend/api"
"github.com/julienschmidt/httprouter"
"net/http"
"sort"
)
// Handle Routeservers List
func apiRouteserversList(_req *http.Request, _params httprouter.Params) (api.Response, error) {
// Get list of sources from config,
routeservers := api.Routeservers{}
sources := AliceConfig.Sources
for _, source := range sources {
routeservers = append(routeservers, api.Routeserver{
Id: source.Id,
Name: source.Name,
Group: source.Group,
Blackholes: source.Blackholes,
Order: source.Order,
})
}
// Assert routeserver ordering
sort.Sort(routeservers)
// Make routeservers response
response := api.RouteserversResponse{
Routeservers: routeservers,
}
return response, nil
}

View File

@ -1,136 +0,0 @@
package main
import (
"github.com/alice-lg/alice-lg/backend/api"
"github.com/julienschmidt/httprouter"
"net/http"
"sort"
"time"
)
// Handle global lookup
func apiLookupPrefixGlobal(
req *http.Request,
params httprouter.Params,
) (api.Response, error) {
// TODO: This function is too long
// Get prefix to query
q, err := validateQueryString(req, "q")
if err != nil {
return nil, err
}
q, err = validatePrefixQuery(q)
if err != nil {
return nil, err
}
// Check what we want to query
// Prefix -> fetch prefix
// _ -> fetch neighbours and routes
lookupPrefix := MaybePrefix(q)
// Measure response time
t0 := time.Now()
// Get additional filter criteria
filtersApplied, err := api.FiltersFromQuery(req.URL.Query())
if err != nil {
return nil, err
}
// Perform query
var routes api.LookupRoutes
if lookupPrefix {
routes = AliceRoutesStore.LookupPrefix(q)
} else {
neighbours := AliceNeighboursStore.LookupNeighbours(q)
routes = AliceRoutesStore.LookupPrefixForNeighbours(neighbours)
}
// Split routes
// TODO: Refactor at neighbors store
totalResults := len(routes)
imported := make(api.LookupRoutes, 0, totalResults)
filtered := make(api.LookupRoutes, 0, totalResults)
// Now, as we have allocated even more space process routes by, splitting,
// filtering and updating the available filters...
filtersAvailable := api.NewSearchFilters()
for _, r := range routes {
if !filtersApplied.MatchRoute(r) {
continue // Exclude route from results set
}
switch r.State {
case "filtered":
filtered = append(filtered, r)
break
case "imported":
imported = append(imported, r)
break
}
filtersAvailable.UpdateFromLookupRoute(r)
}
// Remove applied filters from available
filtersApplied.MergeProperties(filtersAvailable)
filtersAvailable = filtersAvailable.Sub(filtersApplied)
// Homogenize results
sort.Sort(imported)
sort.Sort(filtered)
// Paginate results
pageImported := apiQueryMustInt(req, "page_imported", 0)
pageSizeImported := AliceConfig.Ui.Pagination.RoutesAcceptedPageSize
routesImported, paginationImported := apiPaginateLookupRoutes(
imported, pageImported, pageSizeImported,
)
pageFiltered := apiQueryMustInt(req, "page_filtered", 0)
pageSizeFiltered := AliceConfig.Ui.Pagination.RoutesFilteredPageSize
routesFiltered, paginationFiltered := apiPaginateLookupRoutes(
filtered, pageFiltered, pageSizeFiltered,
)
// Calculate query duration
queryDuration := time.Since(t0)
// Make response
response := api.PaginatedRoutesLookupResponse{
Api: api.ApiStatus{
CacheStatus: api.CacheStatus{
CachedAt: AliceRoutesStore.CachedAt(),
},
ResultFromCache: true, // Well.
Ttl: AliceRoutesStore.CacheTtl(),
},
TimedResponse: api.TimedResponse{
RequestDuration: DurationMs(queryDuration),
},
Imported: &api.LookupRoutesResponse{
Routes: routesImported,
PaginatedResponse: &api.PaginatedResponse{
Pagination: paginationImported,
},
},
Filtered: &api.LookupRoutesResponse{
Routes: routesFiltered,
PaginatedResponse: &api.PaginatedResponse{
Pagination: paginationFiltered,
},
},
FilterableResponse: api.FilterableResponse{
FiltersAvailable: filtersAvailable,
FiltersApplied: filtersApplied,
},
}
return response, nil
}

View File

@ -1,72 +0,0 @@
package main
// Improve error handling
// Create api.ErrorResponses based on errors returned from server.
// Strip out potentially sensitive information, eg. connection errors
// to internal IP addresses.
import (
"net/http"
"net/url"
"strings"
"github.com/alice-lg/alice-lg/backend/api"
)
type ResourceNotFoundError struct{}
func (self *ResourceNotFoundError) Error() string {
return "resource not found"
}
var SOURCE_NOT_FOUND_ERROR = &ResourceNotFoundError{}
const (
GENERIC_ERROR_TAG = "GENERIC_ERROR"
CONNECTION_REFUSED_TAG = "CONNECTION_REFUSED"
CONNECTION_TIMEOUT_TAG = "CONNECTION_TIMEOUT"
RESOURCE_NOT_FOUND_TAG = "NOT_FOUND"
)
const (
GENERIC_ERROR_CODE = 42
CONNECTION_REFUSED_CODE = 100
CONNECTION_TIMEOUT_CODE = 101
RESOURCE_NOT_FOUND_CODE = 404
)
const (
ERROR_STATUS = http.StatusInternalServerError
RESOURCE_NOT_FOUND_STATUS = http.StatusNotFound
)
func apiErrorResponse(routeserverId string, err error) (api.ErrorResponse, int) {
code := GENERIC_ERROR_CODE
message := err.Error()
tag := GENERIC_ERROR_TAG
status := ERROR_STATUS
switch e := err.(type) {
case *ResourceNotFoundError:
tag = RESOURCE_NOT_FOUND_TAG
code = RESOURCE_NOT_FOUND_CODE
status = RESOURCE_NOT_FOUND_STATUS
case *url.Error:
if strings.Contains(message, "connection refused") {
tag = CONNECTION_REFUSED_TAG
code = CONNECTION_REFUSED_CODE
message = "Connection refused while dialing the API"
} else if e.Timeout() {
tag = CONNECTION_TIMEOUT_TAG
code = CONNECTION_TIMEOUT_CODE
message = "Connection timed out when connecting to the backend API"
}
}
return api.ErrorResponse{
Code: code,
Tag: tag,
Message: message,
RouteserverId: routeserverId,
}, status
}

View File

@ -1,25 +0,0 @@
package main
import (
"fmt"
"testing"
)
func TestApiLogSourceError(t *testing.T) {
err := fmt.Errorf("an unexpected error occured")
conf := &Config{
Sources: []*SourceConfig{
&SourceConfig{
Id: "rs1v4",
Name: "rs1.example.net (IPv4)",
},
},
}
AliceConfig = conf
apiLogSourceError("foo.bar", "rs1v4", 23, "Test")
apiLogSourceError("foo.bam", "rs1v4", err)
apiLogSourceError("foo.baz", "rs1v4", 23, 42, "foo", err)
}

View File

@ -1,81 +0,0 @@
package main
import (
"fmt"
"strconv"
"net/http"
)
// Helper: Validate source Id
func validateSourceId(id string) (string, error) {
if len(id) > 42 {
return "unknown", fmt.Errorf("Source ID too long with length: %d", len(id))
}
return id, nil
}
// Helper: Validate query string
func validateQueryString(req *http.Request, key string) (string, error) {
query := req.URL.Query()
values, ok := query[key]
if !ok {
return "", fmt.Errorf("Query param %s is missing.", key)
}
if len(values) != 1 {
return "", fmt.Errorf("Query param %s is ambigous.", key)
}
value := values[0]
if value == "" {
return "", fmt.Errorf("Query param %s may not be empty.", key)
}
return value, nil
}
// Helper: Validate prefix query
func validatePrefixQuery(value string) (string, error) {
// We should at least provide 2 chars
if len(value) < 2 {
return "", fmt.Errorf("Query too short")
}
// Query constraints: Should at least include a dot or colon
/* let's try without this :)
if strings.Index(value, ".") == -1 &&
strings.Index(value, ":") == -1 {
return "", fmt.Errorf("Query needs at least a ':' or '.'")
}
*/
return value, nil
}
// Get pagination parameters: limit and offset
// Refer to defaults if none are given.
func validatePaginationParams(req *http.Request, limit, offset int) (int, int, error) {
query := req.URL.Query()
queryLimit, ok := query["limit"]
if ok {
limit, _ = strconv.Atoi(queryLimit[0])
}
queryOffset, ok := query["offset"]
if ok {
offset, _ = strconv.Atoi(queryOffset[0])
}
// Cap limit to [1, 1000]
if limit < 1 {
limit = 1
}
if limit > 500 {
limit = 500
}
return limit, offset, nil
}

View File

@ -1,53 +0,0 @@
package caches
import (
"github.com/alice-lg/alice-lg/backend/api"
)
/*
The birdwatcher already caches the responses from
bird and provides the API consumers with information
on how long the information is valid.
However, to avoid unnecessary network requests to the
birdwatcher, we keep a local cache. (This comes in handy
when we are paginating the results for better client performance.)
*/
type NeighborsCache struct {
response *api.NeighboursResponse
disabled bool
}
func NewNeighborsCache(disabled bool) *NeighborsCache {
cache := &NeighborsCache{
response: nil,
disabled: disabled,
}
return cache
}
func (self *NeighborsCache) Get() *api.NeighboursResponse {
if self.disabled {
return nil
}
if self.response == nil {
return nil
}
if self.response.CacheTtl() < 0 {
return nil
}
return self.response
}
func (self *NeighborsCache) Set(response *api.NeighboursResponse) {
if self.disabled {
return
}
self.response = response
}

View File

@ -1,94 +0,0 @@
package caches
import (
"github.com/alice-lg/alice-lg/backend/api"
"sync"
"time"
)
/*
Routes Cache:
Keep a kv map with neighborId <-> api.RoutesResponse
TTL is derived from the api.RoutesResponse.
To avoid memory issues, we only keep N responses (MRU) (per RS).
*/
type RoutesCache struct {
responses map[string]*api.RoutesResponse
accessedAt LRUMap
disabled bool
size int
sync.Mutex
}
func NewRoutesCache(disabled bool, size int) *RoutesCache {
cache := &RoutesCache{
responses: make(map[string]*api.RoutesResponse),
accessedAt: make(map[string]time.Time),
disabled: disabled,
size: size,
}
return cache
}
func (self *RoutesCache) Get(neighborId string) *api.RoutesResponse {
if self.disabled {
return nil
}
self.Lock()
defer self.Unlock()
response, ok := self.responses[neighborId]
if !ok {
return nil
}
if response.CacheTtl() < 0 {
return nil
}
self.accessedAt[neighborId] = time.Now()
return response
}
func (self *RoutesCache) Set(neighborId string, response *api.RoutesResponse) {
if self.disabled {
return
}
self.Lock()
defer self.Unlock()
if len(self.responses) > self.size {
// delete LRU
lru := self.accessedAt.LRU()
delete(self.accessedAt, lru)
delete(self.responses, lru)
}
self.accessedAt[neighborId] = time.Now()
self.responses[neighborId] = response
}
func (self *RoutesCache) Expire() int {
self.Lock()
defer self.Unlock()
expiredKeys := []string{}
for key, response := range self.responses {
if response.CacheTtl() < 0 {
expiredKeys = append(expiredKeys, key)
}
}
for _, key := range expiredKeys {
delete(self.responses, key)
}
return len(expiredKeys)
}

View File

@ -1,753 +0,0 @@
package main
import (
"fmt"
"log"
"os"
"strings"
"github.com/alice-lg/alice-lg/backend/sources"
"github.com/alice-lg/alice-lg/backend/sources/birdwatcher"
"github.com/alice-lg/alice-lg/backend/sources/gobgp"
"github.com/go-ini/ini"
)
const SOURCE_UNKNOWN = 0
const SOURCE_BIRDWATCHER = 1
const SOURCE_GOBGP = 2
type ServerConfig struct {
Listen string `ini:"listen_http"`
EnablePrefixLookup bool `ini:"enable_prefix_lookup"`
NeighboursStoreRefreshInterval int `ini:"neighbours_store_refresh_interval"`
RoutesStoreRefreshInterval int `ini:"routes_store_refresh_interval"`
Asn int `ini:"asn"`
EnableNeighborsStatusRefresh bool `ini:"enable_neighbors_status_refresh"`
}
type HousekeepingConfig struct {
Interval int `ini:"interval"`
ForceReleaseMemory bool `ini:"force_release_memory"`
}
type RejectionsConfig struct {
Reasons BgpCommunities
}
type NoexportsConfig struct {
Reasons BgpCommunities
LoadOnDemand bool `ini:"load_on_demand"`
}
type RejectCandidatesConfig struct {
Communities BgpCommunities
}
type RpkiConfig struct {
// Define communities
Enabled bool `ini:"enabled"`
Valid []string `ini:"valid"`
Unknown []string `ini:"unknown"`
NotChecked []string `ini:"not_checked"`
Invalid []string `ini:"invalid"`
}
type UiConfig struct {
RoutesColumns map[string]string
RoutesColumnsOrder []string
NeighboursColumns map[string]string
NeighboursColumnsOrder []string
LookupColumns map[string]string
LookupColumnsOrder []string
RoutesRejections RejectionsConfig
RoutesNoexports NoexportsConfig
RoutesRejectCandidates RejectCandidatesConfig
BgpCommunities BgpCommunities
Rpki RpkiConfig
Theme ThemeConfig
Pagination PaginationConfig
}
type ThemeConfig struct {
Path string `ini:"path"`
BasePath string `ini:"url_base"` // Optional, default: /theme
}
type PaginationConfig struct {
RoutesFilteredPageSize int `ini:"routes_filtered_page_size"`
RoutesAcceptedPageSize int `ini:"routes_accepted_page_size"`
RoutesNotExportedPageSize int `ini:"routes_not_exported_page_size"`
}
type SourceConfig struct {
Id string
Order int
Name string
Group string
// Blackhole IPs
Blackholes []string
// Source configurations
Type int
Birdwatcher birdwatcher.Config
GoBGP gobgp.Config
// Source instance
instance sources.Source
}
type Config struct {
Server ServerConfig
Housekeeping HousekeepingConfig
Ui UiConfig
Sources []*SourceConfig
File string
}
// Get source by id
func (self *Config) SourceById(sourceId string) *SourceConfig {
for _, sourceConfig := range self.Sources {
if sourceConfig.Id == sourceId {
return sourceConfig
}
}
return nil
}
// Get instance by id
func (self *Config) SourceInstanceById(sourceId string) sources.Source {
sourceConfig := self.SourceById(sourceId)
if sourceConfig == nil {
return nil // Nothing to do here.
}
// Get instance from config
return sourceConfig.getInstance()
}
// Get sources keys form ini
func getSourcesKeys(config *ini.File) []string {
sources := []string{}
sections := config.SectionStrings()
for _, section := range sections {
if strings.HasPrefix(section, "source") {
sources = append(sources, section)
}
}
return sources
}
func isSourceBase(section *ini.Section) bool {
return len(strings.Split(section.Name(), ".")) == 2
}
// Get backend configuration type
func getBackendType(section *ini.Section) int {
name := section.Name()
if strings.HasSuffix(name, "birdwatcher") {
return SOURCE_BIRDWATCHER
} else if strings.HasSuffix(name, "gobgp") {
return SOURCE_GOBGP
}
return SOURCE_UNKNOWN
}
// Get UI config: Routes Columns Default
func getRoutesColumnsDefaults() (map[string]string, []string, error) {
columns := map[string]string{
"network": "Network",
"bgp.as_path": "AS Path",
"gateway": "Gateway",
"interface": "Interface",
}
order := []string{"network", "bgp.as_path", "gateway", "interface"}
return columns, order, nil
}
// Get UI config: Routes Columns
// The columns displayed in the frontend.
// The columns are ordered as in the config file.
//
// In case the configuration is empty, fall back to
// the defaults as defined in getRoutesColumnsDefault()
//
func getRoutesColumns(config *ini.File) (map[string]string, []string, error) {
columns := make(map[string]string)
order := []string{}
section := config.Section("routes_columns")
keys := section.Keys()
if len(keys) == 0 {
return getRoutesColumnsDefaults()
}
for _, key := range keys {
columns[key.Name()] = section.Key(key.Name()).MustString("")
order = append(order, key.Name())
}
return columns, order, nil
}
// Get UI config: Get Neighbours Columns Defaults
func getNeighboursColumnsDefaults() (map[string]string, []string, error) {
columns := map[string]string{
"address": "Neighbour",
"asn": "ASN",
"state": "State",
"Uptime": "Uptime",
"Description": "Description",
"routes_received": "Routes Recv.",
"routes_filtered": "Routes Filtered",
}
order := []string{
"address", "asn", "state",
"Uptime", "Description", "routes_received", "routes_filtered",
}
return columns, order, nil
}
// Get UI config: Get Neighbours Columns
// basically the same as with the routes columns.
func getNeighboursColumns(config *ini.File) (
map[string]string,
[]string,
error,
) {
columns := make(map[string]string)
order := []string{}
section := config.Section("neighbours_columns")
keys := section.Keys()
if len(keys) == 0 {
return getNeighboursColumnsDefaults()
}
for _, key := range keys {
columns[key.Name()] = section.Key(key.Name()).MustString("")
order = append(order, key.Name())
}
return columns, order, nil
}
// Get UI config: Get Prefix search / Routes lookup columns
// As these differ slightly from our routes in the response
// (e.g. the neighbor and source rs is referenced as a nested object)
// we provide an additional configuration for this
func getLookupColumnsDefaults() (map[string]string, []string, error) {
columns := map[string]string{
"network": "Network",
"gateway": "Gateway",
"neighbour.asn": "ASN",
"neighbour.description": "Neighbor",
"bgp.as_path": "AS Path",
"routeserver.name": "RS",
}
order := []string{
"network",
"gateway",
"bgp.as_path",
"neighbour.asn",
"neighbour.description",
"routeserver.name",
}
return columns, order, nil
}
func getLookupColumns(config *ini.File) (
map[string]string,
[]string,
error,
) {
columns := make(map[string]string)
order := []string{}
section := config.Section("lookup_columns")
keys := section.Keys()
if len(keys) == 0 {
return getLookupColumnsDefaults()
}
for _, key := range keys {
columns[key.Name()] = section.Key(key.Name()).MustString("")
order = append(order, key.Name())
}
return columns, order, nil
}
// Helper parse communities from a section body
func parseAndMergeCommunities(
communities BgpCommunities, body string,
) BgpCommunities {
// Parse and merge communities
lines := strings.Split(body, "\n")
for _, line := range lines {
kv := strings.SplitN(line, "=", 2)
if len(kv) != 2 {
log.Println("Skipping malformed BGP community:", line)
continue
}
community := strings.TrimSpace(kv[0])
label := strings.TrimSpace(kv[1])
communities.Set(community, label)
}
return communities
}
// Get UI config: Bgp Communities
func getBgpCommunities(config *ini.File) BgpCommunities {
// Load defaults
communities := MakeWellKnownBgpCommunities()
communitiesConfig := config.Section("bgp_communities")
if communitiesConfig == nil {
return communities // nothing else to do here, go with the default
}
return parseAndMergeCommunities(communities, communitiesConfig.Body())
}
// Get UI config: Get rejections
func getRoutesRejections(config *ini.File) (RejectionsConfig, error) {
reasonsConfig := config.Section("rejection_reasons")
if reasonsConfig == nil {
return RejectionsConfig{}, nil
}
reasons := parseAndMergeCommunities(
make(BgpCommunities),
reasonsConfig.Body())
rejectionsConfig := RejectionsConfig{
Reasons: reasons,
}
return rejectionsConfig, nil
}
// Get UI config: Get no export config
func getRoutesNoexports(config *ini.File) (NoexportsConfig, error) {
baseConfig := config.Section("noexport")
reasonsConfig := config.Section("noexport_reasons")
// Map base configuration
noexportsConfig := NoexportsConfig{}
baseConfig.MapTo(&noexportsConfig)
reasons := parseAndMergeCommunities(
make(BgpCommunities),
reasonsConfig.Body())
noexportsConfig.Reasons = reasons
return noexportsConfig, nil
}
// Get UI config: Reject candidates
func getRejectCandidatesConfig(config *ini.File) (RejectCandidatesConfig, error) {
candidateCommunities := config.Section(
"rejection_candidates").Key("communities").String()
if candidateCommunities == "" {
return RejectCandidatesConfig{}, nil
}
communities := BgpCommunities{}
for i, c := range strings.Split(candidateCommunities, ",") {
communities.Set(c, fmt.Sprintf("reject-candidate-%d", i+1))
}
conf := RejectCandidatesConfig{
Communities: communities,
}
return conf, nil
}
// Get UI config: RPKI configuration
func getRpkiConfig(config *ini.File) (RpkiConfig, error) {
var rpki RpkiConfig
// Defaults taken from:
// https://www.euro-ix.net/en/forixps/large-bgp-communities/
section := config.Section("rpki")
section.MapTo(&rpki)
fallbackAsn, err := getOwnASN(config)
if err != nil {
log.Println(
"Own ASN is not configured.",
"This might lead to unexpected behaviour with BGP large communities",
)
}
ownAsn := fmt.Sprintf("%d", fallbackAsn)
// Fill in defaults or postprocess config value
if len(rpki.Valid) == 0 {
rpki.Valid = []string{ownAsn, "1000", "1"}
} else {
rpki.Valid = strings.SplitN(rpki.Valid[0], ":", 3)
}
if len(rpki.Unknown) == 0 {
rpki.Unknown = []string{ownAsn, "1000", "2"}
} else {
rpki.Unknown = strings.SplitN(rpki.Unknown[0], ":", 3)
}
if len(rpki.NotChecked) == 0 {
rpki.NotChecked = []string{ownAsn, "1000", "3"}
} else {
rpki.NotChecked = strings.SplitN(rpki.NotChecked[0], ":", 3)
}
// As the euro-ix document states, this can be a range.
if len(rpki.Invalid) == 0 {
rpki.Invalid = []string{ownAsn, "1000", "4", "*"}
} else {
// Preprocess
rpki.Invalid = strings.SplitN(rpki.Invalid[0], ":", 3)
tokens := []string{}
if len(rpki.Invalid) != 3 {
// This is wrong, we should have three parts (RS):1000:[range]
return rpki, fmt.Errorf("Unexpected rpki.Invalid configuration: %v", rpki.Invalid)
} else {
tokens = strings.Split(rpki.Invalid[2], "-")
}
rpki.Invalid = append([]string{rpki.Invalid[0], rpki.Invalid[1]}, tokens...)
}
return rpki, nil
}
// Helper: Get own ASN from ini
// This is now easy, since we enforce an ASN in
// the [server] section.
func getOwnASN(config *ini.File) (int, error) {
server := config.Section("server")
asn := server.Key("asn").MustInt(-1)
if asn == -1 {
return 0, fmt.Errorf("Could not get own ASN from config")
}
return asn, nil
}
// Get UI config: Theme settings
func getThemeConfig(config *ini.File) ThemeConfig {
baseConfig := config.Section("theme")
themeConfig := ThemeConfig{}
baseConfig.MapTo(&themeConfig)
if themeConfig.BasePath == "" {
themeConfig.BasePath = "/theme"
}
return themeConfig
}
// Get UI config: Pagination settings
func getPaginationConfig(config *ini.File) PaginationConfig {
baseConfig := config.Section("pagination")
paginationConfig := PaginationConfig{}
baseConfig.MapTo(&paginationConfig)
return paginationConfig
}
// Get the UI configuration from the config file
func getUiConfig(config *ini.File) (UiConfig, error) {
uiConfig := UiConfig{}
// Get route columns
routesColumns, routesColumnsOrder, err := getRoutesColumns(config)
if err != nil {
return uiConfig, err
}
// Get neighbours table columns
neighboursColumns,
neighboursColumnsOrder,
err := getNeighboursColumns(config)
if err != nil {
return uiConfig, err
}
// Lookup table columns
lookupColumns, lookupColumnsOrder, err := getLookupColumns(config)
if err != nil {
return uiConfig, err
}
// Get rejections and reasons
rejections, err := getRoutesRejections(config)
if err != nil {
return uiConfig, err
}
noexports, err := getRoutesNoexports(config)
if err != nil {
return uiConfig, err
}
// Get reject candidates
rejectCandidates, _ := getRejectCandidatesConfig(config)
// RPKI filter config
rpki, err := getRpkiConfig(config)
if err != nil {
return uiConfig, err
}
// Theme configuration: Theming is optional, if no settings
// are found, it will be ignored
themeConfig := getThemeConfig(config)
// Pagination
paginationConfig := getPaginationConfig(config)
// Make config
uiConfig = UiConfig{
RoutesColumns: routesColumns,
RoutesColumnsOrder: routesColumnsOrder,
NeighboursColumns: neighboursColumns,
NeighboursColumnsOrder: neighboursColumnsOrder,
LookupColumns: lookupColumns,
LookupColumnsOrder: lookupColumnsOrder,
RoutesRejections: rejections,
RoutesNoexports: noexports,
RoutesRejectCandidates: rejectCandidates,
BgpCommunities: getBgpCommunities(config),
Rpki: rpki,
Theme: themeConfig,
Pagination: paginationConfig,
}
return uiConfig, nil
}
func getSources(config *ini.File) ([]*SourceConfig, error) {
sources := []*SourceConfig{}
order := 0
sourceSections := config.ChildSections("source")
for _, section := range sourceSections {
if !isSourceBase(section) {
continue
}
// Derive source-id from name
sourceId := section.Name()[len("source:"):]
// Try to get child configs and determine
// Source type
sourceConfigSections := section.ChildSections()
if len(sourceConfigSections) == 0 {
// This source has no configured backend
return sources, fmt.Errorf("%s has no backend configuration", section.Name())
}
if len(sourceConfigSections) > 1 {
// The source is ambiguous
return sources, fmt.Errorf("%s has ambigous backends", section.Name())
}
// Configure backend
backendConfig := sourceConfigSections[0]
backendType := getBackendType(backendConfig)
if backendType == SOURCE_UNKNOWN {
return sources, fmt.Errorf("%s has an unsupported backend", section.Name())
}
// Make config
sourceName := section.Key("name").MustString("Unknown Source")
sourceGroup := section.Key("group").MustString("")
sourceBlackholes := TrimmedStringList(
section.Key("blackholes").MustString(""))
config := &SourceConfig{
Id: sourceId,
Order: order,
Name: sourceName,
Group: sourceGroup,
Blackholes: sourceBlackholes,
Type: backendType,
}
// Set backend
switch backendType {
case SOURCE_BIRDWATCHER:
sourceType := backendConfig.Key("type").MustString("")
peerTablePrefix := backendConfig.Key("peer_table_prefix").MustString("T")
pipeProtocolPrefix := backendConfig.Key("pipe_protocol_prefix").MustString("M")
if sourceType != "single_table" &&
sourceType != "multi_table" {
log.Fatal("Configuration error (birdwatcher source) unknown birdwatcher type:", sourceType)
}
log.Println("Adding birdwatcher source of type", sourceType,
"with peer_table_prefix", peerTablePrefix,
"and pipe_protocol_prefix", pipeProtocolPrefix)
c := birdwatcher.Config{
Id: config.Id,
Name: config.Name,
Timezone: "UTC",
ServerTime: "2006-01-02T15:04:05.999999999Z07:00",
ServerTimeShort: "2006-01-02",
ServerTimeExt: "Mon, 02 Jan 2006 15:04:05 -0700",
Type: sourceType,
PeerTablePrefix: peerTablePrefix,
PipeProtocolPrefix: pipeProtocolPrefix,
}
backendConfig.MapTo(&c)
config.Birdwatcher = c
case SOURCE_GOBGP:
c := gobgp.Config{
Id: config.Id,
Name: config.Name,
}
backendConfig.MapTo(&c)
config.GoBGP = c
}
// Add to list of sources
sources = append(sources, config)
order++
}
return sources, nil
}
// Try to load configfiles as specified in the files
// list. For example:
//
// ./etc/alice-lg/alice.conf
// /etc/alice-lg/alice.conf
// ./etc/alice-lg/alice.local.conf
//
func loadConfig(file string) (*Config, error) {
// Try to get config file, fallback to alternatives
file, err := getConfigFile(file)
if err != nil {
return nil, err
}
// Load configuration, but handle bgp communities section
// with our own parser
parsedConfig, err := ini.LoadSources(ini.LoadOptions{
UnparseableSections: []string{
"bgp_communities",
"rejection_reasons",
"noexport_reasons",
},
}, file)
if err != nil {
return nil, err
}
// Map sections
server := ServerConfig{}
parsedConfig.Section("server").MapTo(&server)
housekeeping := HousekeepingConfig{}
parsedConfig.Section("housekeeping").MapTo(&housekeeping)
// Get all sources
sources, err := getSources(parsedConfig)
if err != nil {
return nil, err
}
// Get UI configurations
ui, err := getUiConfig(parsedConfig)
if err != nil {
return nil, err
}
config := &Config{
Server: server,
Housekeeping: housekeeping,
Ui: ui,
Sources: sources,
File: file,
}
return config, nil
}
// Get source instance from config
func (self *SourceConfig) getInstance() sources.Source {
if self.instance != nil {
return self.instance
}
var instance sources.Source
switch self.Type {
case SOURCE_BIRDWATCHER:
instance = birdwatcher.NewBirdwatcher(self.Birdwatcher)
case SOURCE_GOBGP:
instance = gobgp.NewGoBGP(self.GoBGP)
}
self.instance = instance
return instance
}
// Get configuration file with fallbacks
func getConfigFile(filename string) (string, error) {
// Check if requested file is present
if _, err := os.Stat(filename); os.IsNotExist(err) {
// Fall back to local filename
filename = ".." + filename
}
if _, err := os.Stat(filename); os.IsNotExist(err) {
filename = strings.Replace(filename, ".conf", ".local.conf", 1)
}
if _, err := os.Stat(filename); os.IsNotExist(err) {
return "not_found", fmt.Errorf("could not find any configuration file")
}
return filename, nil
}

View File

@ -1,176 +0,0 @@
package main
import (
"testing"
)
// Test configuration loading and parsing
// using the default config
func TestLoadConfigs(t *testing.T) {
config, err := loadConfig("../etc/alice-lg/alice.example.conf")
if err != nil {
t.Error("Could not load test config:", err)
}
if config.Server.Listen == "" {
t.Error("Listen string not present.")
}
if len(config.Ui.RoutesColumns) == 0 {
t.Error("Route columns settings missing")
}
if len(config.Ui.RoutesRejections.Reasons) == 0 {
t.Error("Rejection reasons missing")
}
// Check communities
label, err := config.Ui.BgpCommunities.Lookup("1:23")
if err != nil {
t.Error(err)
}
if label != "some tag" {
t.Error("expcted to find example community 1:23 with 'some tag'",
"but got:", label)
}
}
func TestSourceConfigDefaultsOverride(t *testing.T) {
config, err := loadConfig("../etc/alice-lg/alice.example.conf")
if err != nil {
t.Error("Could not load test config:", err)
}
// Get sources
rs1 := config.Sources[0]
rs2 := config.Sources[1]
// Source 1 should be on default time
// Source 2 should have an override
// For now it should be sufficient to test if
// the serverTime(rs1) != serverTime(rs2)
if rs1.Birdwatcher.ServerTime == rs2.Birdwatcher.ServerTime {
t.Error("Server times should be different between",
"source 1 and 2 in example configuration",
"(alice.example.conf)")
}
// Check presence of timezone, default: UTC (rs1)
// override: Europe/Bruessels (rs2)
if rs1.Birdwatcher.Timezone != "UTC" {
t.Error("Expected RS1 Timezone to be default: UTC")
}
if rs2.Birdwatcher.Timezone != "Europe/Brussels" {
t.Error("Expected 'Europe/Brussels', got", rs2.Birdwatcher.Timezone)
}
}
func TestRejectAndNoexportReasons(t *testing.T) {
config, err := loadConfig("../etc/alice-lg/alice.example.conf")
if err != nil {
t.Error("Could not load test config:", err)
}
// Rejection reasons
description, err := config.Ui.RoutesRejections.Reasons.Lookup("23:42:1")
if err != nil {
t.Error(err)
}
if description != "Some made up reason" {
t.Error("Unexpected reason for 23:42:1 -", description)
}
// Noexport reasons
description, err = config.Ui.RoutesNoexports.Reasons.Lookup("23:46:1")
if err != nil {
t.Error(err)
}
if description != "Some other made up reason" {
t.Error("Unexpected reason for 23:46:1 -", description)
}
}
func TestBlackholeParsing(t *testing.T) {
config, err := loadConfig("../etc/alice-lg/alice.example.conf")
if err != nil {
t.Error("Could not load test config:", err)
}
// Get first source
rs1 := config.Sources[0]
if len(rs1.Blackholes) != 2 {
t.Error("Rs1 should have configured 2 blackholes. Got:", rs1.Blackholes)
return
}
if rs1.Blackholes[0] != "10.23.6.666" {
t.Error("Unexpected blackhole, got:", rs1.Blackholes[0])
}
}
func TestOwnASN(t *testing.T) {
config, err := loadConfig("../etc/alice-lg/alice.example.conf")
if err != nil {
t.Error("Could not load test config:", err)
}
if config.Server.Asn != 9033 {
t.Error("Expected a set server asn")
}
}
func TestRpkiConfig(t *testing.T) {
config, err := loadConfig("../etc/alice-lg/alice.example.conf")
if err != nil {
t.Error("Could not load test config:", err)
}
if len(config.Ui.Rpki.Valid) != 3 {
t.Error("Unexpected RPKI:VALID,", config.Ui.Rpki.Valid)
}
if len(config.Ui.Rpki.Invalid) != 4 {
t.Error("Unexpected RPKI:INVALID,", config.Ui.Rpki.Invalid)
return // We would fail hard later
}
// Check fallback
if config.Ui.Rpki.NotChecked[0] != "9033" {
t.Error(
"Expected NotChecked to fall back to defaults, got:",
config.Ui.Rpki.NotChecked,
)
}
// Check range postprocessing
if config.Ui.Rpki.Invalid[3] != "*" {
t.Error("Missing wildcard from config")
}
t.Log(config.Ui.Rpki)
}
func TestRejectCandidatesConfig(t *testing.T) {
config, err := loadConfig("../etc/alice-lg/alice.example.conf")
if err != nil {
t.Error("Could not load test config:", err)
return
}
t.Log(config.Ui.RoutesRejectCandidates.Communities)
description, err := config.Ui.RoutesRejectCandidates.Communities.Lookup("23:42:46")
if err != nil {
t.Error(err)
}
if description != "reject-candidate-3" {
t.Error("expected 23:42:46 to be a 'reject-candidate'")
}
}

View File

@ -1,34 +0,0 @@
package main
import (
"log"
"time"
"runtime/debug"
)
func Housekeeping(config *Config) {
for {
if config.Housekeeping.Interval > 0 {
time.Sleep(time.Duration(config.Housekeeping.Interval) * time.Minute)
} else {
time.Sleep(5 * time.Minute)
}
log.Println("Housekeeping started")
// Expire the caches
log.Println("Expiring caches")
for _, source := range config.Sources {
count := source.getInstance().ExpireCaches()
log.Println("Expired", count, "entries for source", source.Name)
}
if config.Housekeeping.ForceReleaseMemory {
// Trigger a GC and SCVG run
log.Println("Freeing memory")
debug.FreeOSMemory()
}
}
}

View File

@ -1,69 +0,0 @@
package main
import (
"flag"
"log"
"net/http"
"github.com/julienschmidt/httprouter"
)
var AliceConfig *Config
var AliceRoutesStore *RoutesStore
var AliceNeighboursStore *NeighboursStore
func main() {
var err error
// Handle commandline parameters
configFilenameFlag := flag.String(
"config", "/etc/alice-lg/alice.conf",
"Alice looking glass configuration file",
)
flag.Parse()
// Load configuration
AliceConfig, err = loadConfig(*configFilenameFlag)
if err != nil {
log.Fatal(err)
}
// Say hi
printBanner()
log.Println("Using configuration:", AliceConfig.File)
// Setup local routes store
AliceRoutesStore = NewRoutesStore(AliceConfig)
if AliceConfig.Server.EnablePrefixLookup == true {
AliceRoutesStore.Start()
}
// Setup local neighbours store
AliceNeighboursStore = NewNeighboursStore(AliceConfig)
if AliceConfig.Server.EnablePrefixLookup == true {
AliceNeighboursStore.Start()
}
// Start the Housekeeping
go Housekeeping(AliceConfig)
// Setup request routing
router := httprouter.New()
// Serve static content
err = webRegisterAssets(AliceConfig.Ui, router)
if err != nil {
log.Fatal(err)
}
err = apiRegisterEndpoints(router)
if err != nil {
log.Fatal(err)
}
// Start http server
log.Fatal(http.ListenAndServe(AliceConfig.Server.Listen, router))
}

View File

@ -1,284 +0,0 @@
package main
import (
"log"
"regexp"
"strconv"
"sync"
"time"
"github.com/alice-lg/alice-lg/backend/api"
)
var REGEX_MATCH_ASLOOKUP = regexp.MustCompile(`(?i)^AS(\d+)`)
type NeighboursIndex map[string]*api.Neighbour
type NeighboursStore struct {
neighboursMap map[string]NeighboursIndex
configMap map[string]*SourceConfig
statusMap map[string]StoreStatus
refreshInterval time.Duration
refreshNeighborStatus bool
sync.RWMutex
}
func NewNeighboursStore(config *Config) *NeighboursStore {
// Build source mapping
neighboursMap := make(map[string]NeighboursIndex)
configMap := make(map[string]*SourceConfig)
statusMap := make(map[string]StoreStatus)
for _, source := range config.Sources {
sourceId := source.Id
configMap[sourceId] = source
statusMap[sourceId] = StoreStatus{
State: STATE_INIT,
}
neighboursMap[sourceId] = make(NeighboursIndex)
}
// Set refresh interval, default to 5 minutes when
// interval is set to 0
refreshInterval := time.Duration(
config.Server.NeighboursStoreRefreshInterval) * time.Minute
if refreshInterval == 0 {
refreshInterval = time.Duration(5) * time.Minute
}
refreshNeighborStatus := config.Server.EnableNeighborsStatusRefresh
store := &NeighboursStore{
neighboursMap: neighboursMap,
statusMap: statusMap,
configMap: configMap,
refreshInterval: refreshInterval,
refreshNeighborStatus: refreshNeighborStatus,
}
return store
}
func (self *NeighboursStore) Start() {
log.Println("Starting local neighbours store")
log.Println("Neighbours Store refresh interval set to:", self.refreshInterval)
go self.init()
}
func (self *NeighboursStore) init() {
// Perform initial update
self.update()
// Initial logging
self.Stats().Log()
// Periodically update store
for {
time.Sleep(self.refreshInterval)
self.update()
}
}
func (self *NeighboursStore) SourceStatus(sourceId string) StoreStatus {
self.RLock()
status := self.statusMap[sourceId]
self.RUnlock()
return status
}
// Get state by source Id
func (self *NeighboursStore) SourceState(sourceId string) int {
status := self.SourceStatus(sourceId)
return status.State
}
// Update all neighbors
func (self *NeighboursStore) update() {
successCount := 0
errorCount := 0
t0 := time.Now()
for sourceId, _ := range self.neighboursMap {
// Get current state
if self.statusMap[sourceId].State == STATE_UPDATING {
continue // nothing to do here. really.
}
// Start updating
self.Lock()
self.statusMap[sourceId] = StoreStatus{
State: STATE_UPDATING,
}
self.Unlock()
sourceConfig := self.configMap[sourceId]
source := sourceConfig.getInstance()
neighboursRes, err := source.Neighbours()
if err != nil {
log.Println(
"Refreshing the neighbors store failed for:",
sourceConfig.Name, "(", sourceConfig.Id, ")",
"with:", err,
"- NEXT STATE: ERROR",
)
// That's sad.
self.Lock()
self.statusMap[sourceId] = StoreStatus{
State: STATE_ERROR,
LastError: err,
LastRefresh: time.Now(),
}
self.Unlock()
errorCount++
continue
}
neighbours := neighboursRes.Neighbours
// Update data
// Make neighbours index
index := make(NeighboursIndex)
for _, neighbour := range neighbours {
index[neighbour.Id] = neighbour
}
self.Lock()
self.neighboursMap[sourceId] = index
// Update state
self.statusMap[sourceId] = StoreStatus{
LastRefresh: time.Now(),
State: STATE_READY,
}
self.Unlock()
successCount++
}
refreshDuration := time.Since(t0)
log.Println(
"Refreshed neighbors store for", successCount, "of", successCount+errorCount,
"sources with", errorCount, "error(s) in", refreshDuration,
)
}
func (self *NeighboursStore) GetNeighborsAt(sourceId string) api.Neighbours {
self.RLock()
neighborsIdx := self.neighboursMap[sourceId]
self.RUnlock()
var neighborsStatus map[string]api.NeighbourStatus
if self.refreshNeighborStatus {
sourceConfig := self.configMap[sourceId]
source := sourceConfig.getInstance()
neighborsStatusData, err := source.NeighboursStatus()
if err == nil {
neighborsStatus = make(map[string]api.NeighbourStatus, len(neighborsStatusData.Neighbours))
for _, neighbor := range neighborsStatusData.Neighbours {
neighborsStatus[neighbor.Id] = *neighbor
}
}
}
neighbors := make(api.Neighbours, 0, len(neighborsIdx))
for _, neighbor := range neighborsIdx {
if self.refreshNeighborStatus {
if _, ok := neighborsStatus[neighbor.Id]; ok {
self.Lock()
neighbor.State = neighborsStatus[neighbor.Id].State
self.Unlock()
}
}
neighbors = append(neighbors, neighbor)
}
return neighbors
}
func (self *NeighboursStore) GetNeighbourAt(
sourceId string,
id string,
) *api.Neighbour {
// Lookup neighbour on RS
self.RLock()
neighborsIdx := self.neighboursMap[sourceId]
self.RUnlock()
return neighborsIdx[id]
}
func (self *NeighboursStore) LookupNeighboursAt(
sourceId string,
query string,
) api.Neighbours {
results := api.Neighbours{}
self.RLock()
neighbours := self.neighboursMap[sourceId]
self.RUnlock()
asn := -1
if REGEX_MATCH_ASLOOKUP.MatchString(query) {
groups := REGEX_MATCH_ASLOOKUP.FindStringSubmatch(query)
if a, err := strconv.Atoi(groups[1]); err == nil {
asn = a
}
}
for _, neighbour := range neighbours {
if asn >= 0 && neighbour.Asn == asn { // only executed if valid AS query is detected
results = append(results, neighbour)
} else if ContainsCi(neighbour.Description, query) {
results = append(results, neighbour)
} else {
continue
}
}
return results
}
func (self *NeighboursStore) LookupNeighbours(
query string,
) api.NeighboursLookupResults {
// Create empty result set
results := make(api.NeighboursLookupResults)
for sourceId, _ := range self.neighboursMap {
results[sourceId] = self.LookupNeighboursAt(sourceId, query)
}
return results
}
// Build some stats for monitoring
func (self *NeighboursStore) Stats() NeighboursStoreStats {
totalNeighbours := 0
rsStats := []RouteServerNeighboursStats{}
self.RLock()
for sourceId, neighbours := range self.neighboursMap {
status := self.statusMap[sourceId]
totalNeighbours += len(neighbours)
serverStats := RouteServerNeighboursStats{
Name: self.configMap[sourceId].Name,
State: stateToString(status.State),
Neighbours: len(neighbours),
UpdatedAt: status.LastRefresh,
}
rsStats = append(rsStats, serverStats)
}
self.RUnlock()
storeStats := NeighboursStoreStats{
TotalNeighbours: totalNeighbours,
RouteServers: rsStats,
}
return storeStats
}

View File

@ -1,160 +0,0 @@
package main
import (
"github.com/alice-lg/alice-lg/backend/api"
"sort"
"testing"
)
/*
Start the global neighbours store,
because the route store in the tests have
this as a dependency.
*/
func startTestNeighboursStore() {
store := makeTestNeighboursStore()
AliceNeighboursStore = store
}
/*
Make a store and populate it with data
*/
func makeTestNeighboursStore() *NeighboursStore {
// Populate neighbours
rs1 := NeighboursIndex{
"ID2233_AS2342": &api.Neighbour{
Id: "ID2233_AS2342",
Description: "PEER AS2342 192.9.23.42 Customer Peer 1",
},
"ID2233_AS2343": &api.Neighbour{
Id: "ID2233_AS2343",
Description: "PEER AS2343 192.9.23.43 Different Peer 1",
},
"ID2233_AS2344": &api.Neighbour{
Id: "ID2233_AS2344",
Description: "PEER AS2344 192.9.23.44 3rd Peer from the sun",
},
}
rs2 := NeighboursIndex{
"ID2233_AS2342": &api.Neighbour{
Id: "ID2233_AS2342",
Description: "PEER AS2342 192.9.23.42 Customer Peer 1",
},
"ID2233_AS4223": &api.Neighbour{
Id: "ID2233_AS4223",
Description: "PEER AS4223 192.9.42.23 Cloudfoo Inc.",
},
}
// Create store
store := &NeighboursStore{
neighboursMap: map[string]NeighboursIndex{
"rs1": rs1,
"rs2": rs2,
},
statusMap: map[string]StoreStatus{
"rs1": StoreStatus{
State: STATE_READY,
},
"rs2": StoreStatus{
State: STATE_INIT,
},
},
}
return store
}
func TestGetSourceState(t *testing.T) {
store := makeTestNeighboursStore()
if store.SourceState("rs1") != STATE_READY {
t.Error("Expected Source(1) to be STATE_READY")
}
if store.SourceState("rs2") == STATE_READY {
t.Error("Expected Source(2) to be NOT STATE_READY")
}
}
func TestGetNeighbourAt(t *testing.T) {
store := makeTestNeighboursStore()
neighbour := store.GetNeighbourAt("rs1", "ID2233_AS2343")
if neighbour.Id != "ID2233_AS2343" {
t.Error("Expected another peer in GetNeighbourAt")
}
}
func TestGetNeighbors(t *testing.T) {
store := makeTestNeighboursStore()
neighbors := store.GetNeighborsAt("rs2")
if len(neighbors) != 2 {
t.Error("Expected 2 neighbors, got:", len(neighbors))
}
sort.Sort(neighbors)
if neighbors[0].Id != "ID2233_AS2342" {
t.Error("Expected neighbor: ID2233_AS2342, got:",
neighbors[0])
}
neighbors = store.GetNeighborsAt("rs3")
if len(neighbors) != 0 {
t.Error("Unknown source should have yielded zero results")
}
}
func TestNeighbourLookupAt(t *testing.T) {
store := makeTestNeighboursStore()
expected := []string{
"ID2233_AS2342",
"ID2233_AS2343",
}
neighbours := store.LookupNeighboursAt("rs1", "peer 1")
// Make index
index := NeighboursIndex{}
for _, n := range neighbours {
index[n.Id] = n
}
for _, id := range expected {
_, ok := index[id]
if !ok {
t.Error("Expected", id, "to be in result set")
}
}
}
func TestNeighbourLookup(t *testing.T) {
store := makeTestNeighboursStore()
// First result set: "Peer 1"
_ = store
results := store.LookupNeighbours("Cloudfoo")
// Peer should be present at RS2
neighbours, ok := results["rs2"]
if !ok {
t.Error("Lookup on rs2 unsuccessful.")
}
if len(neighbours) > 1 {
t.Error("Lookup should match exact 1 peer.")
}
n := neighbours[0]
if n.Id != "ID2233_AS4223" {
t.Error("Wrong peer in lookup response")
}
}

View File

@ -1,384 +0,0 @@
package main
import (
"log"
"strings"
"sync"
"time"
"github.com/alice-lg/alice-lg/backend/api"
)
type RoutesStore struct {
routesMap map[string]*api.RoutesResponse
statusMap map[string]StoreStatus
configMap map[string]*SourceConfig
refreshInterval time.Duration
lastRefresh time.Time
sync.RWMutex
}
func NewRoutesStore(config *Config) *RoutesStore {
// Build mapping based on source instances
routesMap := make(map[string]*api.RoutesResponse)
statusMap := make(map[string]StoreStatus)
configMap := make(map[string]*SourceConfig)
for _, source := range config.Sources {
id := source.Id
configMap[id] = source
routesMap[id] = &api.RoutesResponse{}
statusMap[id] = StoreStatus{
State: STATE_INIT,
}
}
// Set refresh interval as duration, fall back to
// five minutes if no interval is set.
refreshInterval := time.Duration(
config.Server.RoutesStoreRefreshInterval) * time.Minute
if refreshInterval == 0 {
refreshInterval = time.Duration(5) * time.Minute
}
store := &RoutesStore{
routesMap: routesMap,
statusMap: statusMap,
configMap: configMap,
refreshInterval: refreshInterval,
}
return store
}
func (self *RoutesStore) Start() {
log.Println("Starting local routes store")
log.Println("Routes Store refresh interval set to:", self.refreshInterval)
go self.init()
}
// Service initialization
func (self *RoutesStore) init() {
// Initial refresh
self.update()
// Initial stats
self.Stats().Log()
// Periodically update store
for {
time.Sleep(self.refreshInterval)
self.update()
}
}
// Update all routes
func (self *RoutesStore) update() {
successCount := 0
errorCount := 0
t0 := time.Now()
for sourceId, _ := range self.routesMap {
sourceConfig := self.configMap[sourceId]
source := sourceConfig.getInstance()
// Get current update state
if self.statusMap[sourceId].State == STATE_UPDATING {
continue // nothing to do here
}
// Set update state
self.Lock()
self.statusMap[sourceId] = StoreStatus{
State: STATE_UPDATING,
}
self.Unlock()
routes, err := source.AllRoutes()
if err != nil {
log.Println(
"Refreshing the routes store failed for:", sourceConfig.Name,
"(", sourceConfig.Id, ")",
"with:", err,
"- NEXT STATE: ERROR",
)
self.Lock()
self.statusMap[sourceId] = StoreStatus{
State: STATE_ERROR,
LastError: err,
LastRefresh: time.Now(),
}
self.Unlock()
errorCount++
continue
}
self.Lock()
// Update data
self.routesMap[sourceId] = routes
// Update state
self.statusMap[sourceId] = StoreStatus{
LastRefresh: time.Now(),
State: STATE_READY,
}
self.lastRefresh = time.Now().UTC()
self.Unlock()
successCount++
}
refreshDuration := time.Since(t0)
log.Println(
"Refreshed routes store for", successCount, "of", successCount+errorCount,
"sources with", errorCount, "error(s) in", refreshDuration,
)
}
// Calculate store insights
func (self *RoutesStore) Stats() RoutesStoreStats {
totalImported := 0
totalFiltered := 0
rsStats := []RouteServerRoutesStats{}
self.RLock()
for sourceId, routes := range self.routesMap {
status := self.statusMap[sourceId]
totalImported += len(routes.Imported)
totalFiltered += len(routes.Filtered)
serverStats := RouteServerRoutesStats{
Name: self.configMap[sourceId].Name,
Routes: RoutesStats{
Filtered: len(routes.Filtered),
Imported: len(routes.Imported),
},
State: stateToString(status.State),
UpdatedAt: status.LastRefresh,
}
rsStats = append(rsStats, serverStats)
}
self.RUnlock()
// Make stats
storeStats := RoutesStoreStats{
TotalRoutes: RoutesStats{
Imported: totalImported,
Filtered: totalFiltered,
},
RouteServers: rsStats,
}
return storeStats
}
// Provide cache status
func (self *RoutesStore) CachedAt() time.Time {
return self.lastRefresh
}
func (self *RoutesStore) CacheTtl() time.Time {
return self.lastRefresh.Add(self.refreshInterval)
}
// Lookup routes transform
func routeToLookupRoute(
source *SourceConfig,
state string,
route *api.Route,
) *api.LookupRoute {
// Get neighbour
neighbour := AliceNeighboursStore.GetNeighbourAt(source.Id, route.NeighbourId)
// Make route
lookup := &api.LookupRoute{
Id: route.Id,
NeighbourId: route.NeighbourId,
Neighbour: neighbour,
Routeserver: api.Routeserver{
Id: source.Id,
Name: source.Name,
},
State: state,
Network: route.Network,
Interface: route.Interface,
Gateway: route.Gateway,
Metric: route.Metric,
Bgp: route.Bgp,
Age: route.Age,
Type: route.Type,
Primary: route.Primary,
}
return lookup
}
// Routes filter
func filterRoutesByPrefix(
source *SourceConfig,
routes api.Routes,
prefix string,
state string,
) api.LookupRoutes {
results := api.LookupRoutes{}
for _, route := range routes {
// Naiive filtering:
if strings.HasPrefix(strings.ToLower(route.Network), prefix) {
lookup := routeToLookupRoute(source, state, route)
results = append(results, lookup)
}
}
return results
}
func filterRoutesByNeighbourIds(
source *SourceConfig,
routes api.Routes,
neighbourIds []string,
state string,
) api.LookupRoutes {
results := api.LookupRoutes{}
for _, route := range routes {
// Filtering:
if MemberOf(neighbourIds, route.NeighbourId) == true {
lookup := routeToLookupRoute(source, state, route)
results = append(results, lookup)
}
}
return results
}
// Single RS lookup by neighbour id
func (self *RoutesStore) LookupNeighboursPrefixesAt(
sourceId string,
neighbourIds []string,
) chan api.LookupRoutes {
response := make(chan api.LookupRoutes)
go func() {
self.RLock()
source := self.configMap[sourceId]
routes := self.routesMap[sourceId]
self.RUnlock()
filtered := filterRoutesByNeighbourIds(
source,
routes.Filtered,
neighbourIds,
"filtered")
imported := filterRoutesByNeighbourIds(
source,
routes.Imported,
neighbourIds,
"imported")
var result api.LookupRoutes
result = append(filtered, imported...)
response <- result
}()
return response
}
// Single RS lookup
func (self *RoutesStore) LookupPrefixAt(
sourceId string,
prefix string,
) chan api.LookupRoutes {
response := make(chan api.LookupRoutes)
go func() {
self.RLock()
config := self.configMap[sourceId]
routes := self.routesMap[sourceId]
self.RUnlock()
filtered := filterRoutesByPrefix(
config,
routes.Filtered,
prefix,
"filtered")
imported := filterRoutesByPrefix(
config,
routes.Imported,
prefix,
"imported")
var result api.LookupRoutes
result = append(filtered, imported...)
response <- result
}()
return response
}
func (self *RoutesStore) LookupPrefix(prefix string) api.LookupRoutes {
result := api.LookupRoutes{}
responses := []chan api.LookupRoutes{}
// Normalize prefix to lower case
prefix = strings.ToLower(prefix)
// Dispatch
self.RLock()
for sourceId, _ := range self.routesMap {
res := self.LookupPrefixAt(sourceId, prefix)
responses = append(responses, res)
}
self.RUnlock()
// Collect
for _, response := range responses {
routes := <-response
result = append(result, routes...)
close(response)
}
return result
}
func (self *RoutesStore) LookupPrefixForNeighbours(
neighbours api.NeighboursLookupResults,
) api.LookupRoutes {
result := api.LookupRoutes{}
responses := []chan api.LookupRoutes{}
// Dispatch
for sourceId, locals := range neighbours {
lookupNeighbourIds := []string{}
for _, n := range locals {
lookupNeighbourIds = append(lookupNeighbourIds, n.Id)
}
res := self.LookupNeighboursPrefixesAt(sourceId, lookupNeighbourIds)
responses = append(responses, res)
}
// Collect
for _, response := range responses {
routes := <-response
result = append(result, routes...)
close(response)
}
return result
}

View File

@ -1,224 +0,0 @@
package main
import (
"log"
"os"
"strings"
"testing"
"encoding/json"
"io/ioutil"
"github.com/alice-lg/alice-lg/backend/api"
"github.com/alice-lg/alice-lg/backend/sources/birdwatcher"
)
//
// Api Tets Helpers
//
func loadTestRoutesResponse() *api.RoutesResponse {
file, err := os.Open("testdata/api/routes_response.json")
if err != nil {
log.Panic("could not load test data:", err)
}
defer file.Close()
data, err := ioutil.ReadAll(file)
if err != nil {
log.Panic("could not read test data:", err)
}
response := &api.RoutesResponse{}
err = json.Unmarshal(data, &response)
if err != nil {
log.Panic("could not unmarshal response test data:", err)
}
return response
}
/*
Check for presence of network in result set
*/
func testCheckPrefixesPresence(prefixes, resultset []string, t *testing.T) {
// Check prefixes
presence := map[string]bool{}
for _, prefix := range prefixes {
presence[prefix] = false
}
for _, prefix := range resultset {
// Check if prefixes are all accounted for
for net, _ := range presence {
if prefix == net {
presence[net] = true
}
}
}
for net, present := range presence {
if present == false {
t.Error(net, "not found in result set")
}
}
}
//
// Route Store Tests
//
func makeTestRoutesStore() *RoutesStore {
rs1RoutesResponse := loadTestRoutesResponse()
// Build mapping based on source instances:
// rs : <response>
statusMap := make(map[string]StoreStatus)
routesMap := map[string]*api.RoutesResponse{
"rs1": rs1RoutesResponse,
}
configMap := map[string]*SourceConfig{
"rs1": &SourceConfig{
Id: "rs1",
Name: "rs1.test",
Type: SOURCE_BIRDWATCHER,
Birdwatcher: birdwatcher.Config{
Api: "http://localhost:2342",
Timezone: "UTC",
ServerTime: "2006-01-02T15:04:05",
ServerTimeShort: "2006-01-02",
ServerTimeExt: "Mon, 02 Jan 2006 15:04: 05 -0700",
},
},
}
store := &RoutesStore{
routesMap: routesMap,
statusMap: statusMap,
configMap: configMap,
}
return store
}
func TestRoutesStoreStats(t *testing.T) {
store := makeTestRoutesStore()
stats := store.Stats()
// Check total routes
// There should be 8 imported, and 1 filtered route
if stats.TotalRoutes.Imported != 8 {
t.Error(
"expected 8 imported routes, got:",
stats.TotalRoutes.Imported,
)
}
if stats.TotalRoutes.Filtered != 1 {
t.Error(
"expected 1 filtered route, got:",
stats.TotalRoutes.Filtered,
)
}
}
func TestLookupPrefixAt(t *testing.T) {
startTestNeighboursStore()
store := makeTestRoutesStore()
query := "193.200."
results := store.LookupPrefixAt("rs1", query)
prefixes := <-results
// Check results
for _, prefix := range prefixes {
if strings.HasPrefix(prefix.Network, query) == false {
t.Error(
"All network addresses should start with the",
"queried prefix",
)
}
}
}
func TestLookupPrefix(t *testing.T) {
startTestNeighboursStore()
store := makeTestRoutesStore()
query := "193.200."
results := store.LookupPrefix(query)
if len(results) == 0 {
t.Error("Expected lookup results. None present.")
return
}
// Check results
for _, prefix := range results {
if strings.HasPrefix(prefix.Network, query) == false {
t.Error(
"All network addresses should start with the",
"queried prefix",
)
}
}
}
func TestLookupNeighboursPrefixesAt(t *testing.T) {
startTestNeighboursStore()
store := makeTestRoutesStore()
// Query
results := store.LookupNeighboursPrefixesAt("rs1", []string{
"ID163_AS31078",
})
// Check prefixes
presence := []string{
"193.200.230.0/24", "193.34.24.0/22", "31.220.136.0/21",
}
resultset := []string{}
for _, prefix := range <-results {
resultset = append(resultset, prefix.Network)
}
testCheckPrefixesPresence(presence, resultset, t)
}
func TestLookupPrefixForNeighbours(t *testing.T) {
// Construct a neighbours lookup result
neighbours := api.NeighboursLookupResults{
"rs1": api.Neighbours{
&api.Neighbour{
Id: "ID163_AS31078",
},
},
}
startTestNeighboursStore()
store := makeTestRoutesStore()
// Query
results := store.LookupPrefixForNeighbours(neighbours)
// We should have retrived 8 prefixes,
if len(results) != 8 {
t.Error("Expected result lenght: 8, got:", len(results))
}
presence := []string{
"193.200.230.0/24", "193.34.24.0/22", "31.220.136.0/21",
}
resultset := []string{}
for _, prefix := range results {
resultset = append(resultset, prefix.Network)
}
testCheckPrefixesPresence(presence, resultset, t)
}

View File

@ -1,63 +0,0 @@
package birdwatcher
// Http Birdwatcher Client
import (
"encoding/json"
"io/ioutil"
"net/http"
"time"
)
type ClientResponse map[string]interface{}
type Client struct {
Api string
}
func NewClient(api string) *Client {
client := &Client{
Api: api,
}
return client
}
// Make API request, parse response and return map or error
func (self *Client) Get(client *http.Client, url string) (ClientResponse, error) {
res, err := client.Get(url)
if err != nil {
return ClientResponse{}, err
}
// Read body
defer res.Body.Close()
payload, err := ioutil.ReadAll(res.Body)
if err != nil {
return ClientResponse{}, err
}
// Decode json payload
result := make(ClientResponse)
err = json.Unmarshal(payload, &result)
if err != nil {
return ClientResponse{}, err
}
return result, nil
}
// Make API request, parse response and return map or error
func (self *Client) GetJson(endpoint string) (ClientResponse, error) {
client := &http.Client{}
return self.Get(client, self.Api + endpoint)
}
// Make API request, parse response and return map or error
func (self *Client) GetJsonTimeout(timeout time.Duration, endpoint string) (ClientResponse, error) {
client := &http.Client{
Timeout: timeout,
}
return self.Get(client, self.Api + endpoint)
}

View File

@ -1,378 +0,0 @@
package birdwatcher
// Parsers and helpers
import (
"fmt"
"log"
"sort"
"strconv"
"time"
"github.com/alice-lg/alice-lg/backend/api"
)
// Convert server time string to time
func parseServerTime(value interface{}, layout, timezone string) (time.Time, error) {
svalue, ok := value.(string)
if !ok {
return time.Time{}, nil
}
loc, err := time.LoadLocation(timezone)
if err != nil {
return time.Time{}, err
}
t, err := time.ParseInLocation(layout, svalue, loc)
if err != nil {
return time.Time{}, err
}
return t.UTC(), nil
}
// Make api status from response:
// The api status is always included in a birdwatcher response
func parseApiStatus(bird ClientResponse, config Config) (api.ApiStatus, error) {
birdApi, ok := bird["api"].(map[string]interface{})
if !ok {
// Define error status
status := api.ApiStatus{
Version: "unknown / error",
ResultFromCache: false,
Ttl: time.Now(),
}
// Try to retrieve the real error from server
birdErr, ok := bird["error"].(string)
if !ok {
// Unknown error
return status, fmt.Errorf("Invalid API response received from server")
}
return status, fmt.Errorf(birdErr)
}
// Parse TTL
ttl, err := parseServerTime(
bird["ttl"],
config.ServerTime,
config.Timezone,
)
if err != nil {
return api.ApiStatus{}, err
}
// Parse Cache Status
cacheStatus, _ := parseCacheStatus(birdApi, config)
status := api.ApiStatus{
Version: birdApi["Version"].(string),
ResultFromCache: birdApi["result_from_cache"].(bool),
Ttl: ttl,
CacheStatus: cacheStatus,
}
return status, nil
}
// Parse cache status from api response
func parseCacheStatus(cacheStatus map[string]interface{}, config Config) (api.CacheStatus, error) {
cache, ok := cacheStatus["cache_status"].(map[string]interface{})
if !ok {
return api.CacheStatus{}, fmt.Errorf("Invalid Cache Status")
}
cachedAt, ok := cache["cached_at"].(map[string]interface{})
if !ok {
return api.CacheStatus{}, fmt.Errorf("Invalid Cache Status")
}
cachedAtTime, err := parseServerTime(cachedAt["date"], config.ServerTime, config.Timezone)
if err != nil {
return api.CacheStatus{}, err
}
status := api.CacheStatus{
CachedAt: cachedAtTime,
// We ommit OrigTTL for now...
}
return status, nil
}
// Parse birdwatcher status
func parseBirdwatcherStatus(bird ClientResponse, config Config) (api.Status, error) {
birdStatus := bird["status"].(map[string]interface{})
// Get special fields
serverTime, _ := parseServerTime(
birdStatus["current_server"],
config.ServerTimeShort,
config.Timezone,
)
lastReboot, _ := parseServerTime(
birdStatus["last_reboot"],
config.ServerTimeShort,
config.Timezone,
)
if config.ShowLastReboot == false {
lastReboot = time.Time{}
}
lastReconfig, _ := parseServerTime(
birdStatus["last_reconfig"],
config.ServerTimeExt,
config.Timezone,
)
// Make status response
status := api.Status{
ServerTime: serverTime,
LastReboot: lastReboot,
LastReconfig: lastReconfig,
Backend: "bird",
Version: mustString(birdStatus["version"], "unknown"),
Message: mustString(birdStatus["message"], "unknown"),
RouterId: mustString(birdStatus["router_id"], "unknown"),
}
return status, nil
}
// Parse neighbour uptime
func parseRelativeServerTime(uptime interface{}, config Config) time.Duration {
serverTime, _ := parseServerTime(uptime, config.ServerTimeShort, config.Timezone)
return time.Since(serverTime)
}
// Parse neighbours response
func parseNeighbours(bird ClientResponse, config Config) (api.Neighbours, error) {
neighbours := api.Neighbours{}
protocols := bird["protocols"].(map[string]interface{})
// Iterate over protocols map:
for protocolId, proto := range protocols {
protocol := proto.(map[string]interface{})
routes := protocol["routes"].(map[string]interface{})
uptime := parseRelativeServerTime(protocol["state_changed"], config)
lastError := mustString(protocol["last_error"], "")
routesReceived := float64(0)
if routes != nil {
if _, ok := routes["imported"]; ok {
routesReceived = routesReceived + routes["imported"].(float64)
}
if _, ok := routes["filtered"]; ok {
routesReceived = routesReceived + routes["filtered"].(float64)
}
}
neighbour := &api.Neighbour{
Id: protocolId,
Address: mustString(protocol["neighbor_address"], "error"),
Asn: mustInt(protocol["neighbor_as"], 0),
State: mustString(protocol["state"], "unknown"),
Description: mustString(protocol["description"], "no description"),
//TODO make these changes configurable
RoutesReceived: mustInt(routesReceived, 0),
RoutesAccepted: mustInt(routes["imported"], 0),
RoutesFiltered: mustInt(routes["filtered"], 0),
RoutesExported: mustInt(routes["exported"], 0), //TODO protocol_exported?
RoutesPreferred: mustInt(routes["preferred"], 0),
Uptime: uptime,
LastError: lastError,
Details: protocol,
}
neighbours = append(neighbours, neighbour)
}
sort.Sort(neighbours)
return neighbours, nil
}
// Parse neighbours response
func parseNeighboursShort(bird ClientResponse, config Config) (api.NeighboursStatus, error) {
neighbours := api.NeighboursStatus{}
protocols := bird["protocols"].(map[string]interface{})
// Iterate over protocols map:
for protocolId, proto := range protocols {
protocol := proto.(map[string]interface{})
uptime := parseRelativeServerTime(protocol["since"], config)
neighbour := &api.NeighbourStatus{
Id: protocolId,
State: mustString(protocol["state"], "unknown"),
Since: uptime,
}
neighbours = append(neighbours, neighbour)
}
sort.Sort(neighbours)
return neighbours, nil
}
// Parse route bgp info
func parseRouteBgpInfo(data interface{}) api.BgpInfo {
bgpData, ok := data.(map[string]interface{})
if !ok {
// Info is missing
return api.BgpInfo{}
}
asPath := mustIntList(bgpData["as_path"])
communities := parseBgpCommunities(bgpData["communities"])
largeCommunities := parseBgpCommunities(bgpData["large_communities"])
extCommunities := parseExtBgpCommunities(bgpData["ext_communities"])
localPref, _ := strconv.Atoi(mustString(bgpData["local_pref"], "0"))
med, _ := strconv.Atoi(mustString(bgpData["med"], "0"))
bgp := api.BgpInfo{
Origin: mustString(bgpData["origin"], "unknown"),
AsPath: asPath,
NextHop: mustString(bgpData["next_hop"], "unknown"),
LocalPref: localPref,
Med: med,
Communities: communities,
ExtCommunities: extCommunities,
LargeCommunities: largeCommunities,
}
return bgp
}
// Extract bgp communities from response
func parseBgpCommunities(data interface{}) []api.Community {
communities := []api.Community{}
ldata, ok := data.([]interface{})
if !ok { // We don't have any
return []api.Community{}
}
for _, c := range ldata {
cdata := c.([]interface{})
community := api.Community{}
for _, cinfo := range cdata {
community = append(community, int(cinfo.(float64)))
}
communities = append(communities, community)
}
return communities
}
// Extract extended communtieis
func parseExtBgpCommunities(data interface{}) []api.ExtCommunity {
communities := []api.ExtCommunity{}
ldata, ok := data.([]interface{})
if !ok { // We don't have any
return communities
}
for _, c := range ldata {
cdata := c.([]interface{})
if len(cdata) != 3 {
log.Println("Ignoring malformed ext community:", cdata)
continue
}
communities = append(communities, api.ExtCommunity{
cdata[0],
cdata[1],
cdata[2],
})
}
return communities
}
// Parse partial routes response
func parseRoutesData(birdRoutes []interface{}, config Config) api.Routes {
routes := api.Routes{}
for _, data := range birdRoutes {
rdata := data.(map[string]interface{})
age := parseRelativeServerTime(rdata["age"], config)
rtype := mustStringList(rdata["type"])
bgpInfo := parseRouteBgpInfo(rdata["bgp"])
route := &api.Route{
Id: mustString(rdata["network"], "unknown"),
NeighbourId: mustString(rdata["from_protocol"], "unknown neighbour"),
Network: mustString(rdata["network"], "unknown net"),
Interface: mustString(rdata["interface"], "unknown interface"),
Gateway: mustString(rdata["gateway"], "unknown gateway"),
Metric: mustInt(rdata["metric"], -1),
Primary: mustBool(rdata["primary"], false),
Age: age,
Type: rtype,
Bgp: bgpInfo,
Details: rdata,
}
routes = append(routes, route)
}
return routes
}
// Parse routes response
func parseRoutes(bird ClientResponse, config Config) (api.Routes, error) {
birdRoutes, ok := bird["routes"].([]interface{})
if !ok {
return api.Routes{}, fmt.Errorf("Routes response missing")
}
routes := parseRoutesData(birdRoutes, config)
// Sort routes
sort.Sort(routes)
return routes, nil
}
func parseRoutesDump(bird ClientResponse, config Config) (*api.RoutesResponse, error) {
result := &api.RoutesResponse{}
apiStatus, err := parseApiStatus(bird, config)
if err != nil {
return result, err
}
result.Api = apiStatus
// Fetch imported routes
importedRoutes, ok := bird["imported"].([]interface{})
if !ok {
return result, fmt.Errorf("Imported routes missing")
}
// Sort routes by network for faster querying
imported := parseRoutesData(importedRoutes, config)
sort.Sort(imported)
result.Imported = imported
// Fetch filtered routes
filteredRoutes, ok := bird["filtered"].([]interface{})
if !ok {
return result, fmt.Errorf("Filtered routes missing")
}
filtered := parseRoutesData(filteredRoutes, config)
sort.Sort(filtered)
result.Filtered = filtered
return result, nil
}

View File

@ -1,301 +0,0 @@
package birdwatcher
import (
"github.com/alice-lg/alice-lg/backend/api"
"github.com/alice-lg/alice-lg/backend/caches"
"github.com/alice-lg/alice-lg/backend/sources"
"fmt"
"sort"
"time"
)
type Birdwatcher interface {
sources.Source
}
type GenericBirdwatcher struct {
config Config
client *Client
// Caches: Neighbors
neighborsCache *caches.NeighborsCache
// Caches: Routes
routesRequiredCache *caches.RoutesCache
routesNotExportedCache *caches.RoutesCache
// Mutices:
routesFetchMutex *LockMap
}
func NewBirdwatcher(config Config) Birdwatcher {
client := NewClient(config.Api)
// Cache settings:
// TODO: Maybe read from config file
neighborsCacheDisable := false
routesCacheDisabled := false
routesCacheMaxSize := 128
// Initialize caches
neighborsCache := caches.NewNeighborsCache(neighborsCacheDisable)
routesRequiredCache := caches.NewRoutesCache(
routesCacheDisabled, routesCacheMaxSize)
routesNotExportedCache := caches.NewRoutesCache(
routesCacheDisabled, routesCacheMaxSize)
var birdwatcher Birdwatcher
if config.Type == "single_table" {
singleTableBirdwatcher := new(SingleTableBirdwatcher)
singleTableBirdwatcher.config = config
singleTableBirdwatcher.client = client
singleTableBirdwatcher.neighborsCache = neighborsCache
singleTableBirdwatcher.routesRequiredCache = routesRequiredCache
singleTableBirdwatcher.routesNotExportedCache = routesNotExportedCache
singleTableBirdwatcher.routesFetchMutex = NewLockMap()
birdwatcher = singleTableBirdwatcher
} else if config.Type == "multi_table" {
multiTableBirdwatcher := new(MultiTableBirdwatcher)
multiTableBirdwatcher.config = config
multiTableBirdwatcher.client = client
multiTableBirdwatcher.neighborsCache = neighborsCache
multiTableBirdwatcher.routesRequiredCache = routesRequiredCache
multiTableBirdwatcher.routesNotExportedCache = routesNotExportedCache
multiTableBirdwatcher.routesFetchMutex = NewLockMap()
birdwatcher = multiTableBirdwatcher
}
return birdwatcher
}
func (self *GenericBirdwatcher) filterProtocols(protocols map[string]interface{}, protocol string) map[string]interface{} {
response := make(map[string]interface{})
response["protocols"] = make(map[string]interface{})
for protocolId, protocolData := range protocols {
if protocolData.(map[string]interface{})["bird_protocol"] == protocol {
response["protocols"].(map[string]interface{})[protocolId] = protocolData
}
}
return response
}
func (self *GenericBirdwatcher) filterProtocolsBgp(bird ClientResponse) map[string]interface{} {
return self.filterProtocols(bird["protocols"].(map[string]interface{}), "BGP")
}
func (self *GenericBirdwatcher) filterProtocolsPipe(bird ClientResponse) map[string]interface{} {
return self.filterProtocols(bird["protocols"].(map[string]interface{}), "Pipe")
}
func (self *GenericBirdwatcher) filterRoutesByPeerOrLearntFrom(routes api.Routes, peer string, learntFrom string) api.Routes {
result_routes := make(api.Routes, 0, len(routes))
// Choose routes with next_hop == gateway of this neighbour
for _, route := range routes {
if (route.Gateway == peer) ||
(route.Gateway == learntFrom) ||
(route.Details["learnt_from"] == peer) {
result_routes = append(result_routes, route)
}
}
// Sort routes for deterministic ordering
sort.Sort(result_routes)
routes = result_routes
return routes
}
func (self *GenericBirdwatcher) filterRoutesByDuplicates(routes api.Routes, filterRoutes api.Routes) api.Routes {
result_routes := make(api.Routes, 0, len(routes))
routesMap := make(map[string]*api.Route) // for O(1) access
for _, route := range routes {
routesMap[route.Id] = route
}
// Remove routes from "routes" that are contained within filterRoutes
for _, filterRoute := range filterRoutes {
if _, ok := routesMap[filterRoute.Id]; ok {
delete(routesMap, filterRoute.Id)
}
}
for _, route := range routesMap {
result_routes = append(result_routes, route)
}
// Sort routes for deterministic ordering
sort.Sort(result_routes)
routes = result_routes
return routes
}
func (self *GenericBirdwatcher) filterRoutesByNeighborId(routes api.Routes, neighborId string) api.Routes {
result_routes := make(api.Routes, 0, len(routes))
// Choose routes with next_hop == gateway of this neighbour
for _, route := range routes {
if route.Details["from_protocol"] == neighborId {
result_routes = append(result_routes, route)
}
}
// Sort routes for deterministic ordering
sort.Sort(result_routes)
routes = result_routes
return routes
}
func (self *GenericBirdwatcher) fetchProtocolsShort() (*api.ApiStatus, map[string]interface{}, error) {
// Query birdwatcher
timeout := 2 * time.Second
if self.config.NeighborsRefreshTimeout > 0 {
timeout = time.Duration(self.config.NeighborsRefreshTimeout) * time.Second
}
bird, err := self.client.GetJsonTimeout(timeout, "/protocols/short?uncached=true")
if err != nil {
return nil, nil, err
}
// Use api status from first request
apiStatus, err := parseApiStatus(bird, self.config)
if err != nil {
return nil, nil, err
}
if _, ok := bird["protocols"]; !ok {
return nil, nil, fmt.Errorf("Failed to fetch protocols")
}
return &apiStatus, bird, nil
}
func (self *GenericBirdwatcher) ExpireCaches() int {
count := self.routesRequiredCache.Expire()
count += self.routesNotExportedCache.Expire()
return count
}
func (self *GenericBirdwatcher) Status() (*api.StatusResponse, error) {
// Query birdwatcher
bird, err := self.client.GetJson("/status")
if err != nil {
return nil, err
}
// Use api status from first request
apiStatus, err := parseApiStatus(bird, self.config)
if err != nil {
return nil, err
}
// Parse the status
birdStatus, err := parseBirdwatcherStatus(bird, self.config)
if err != nil {
return nil, err
}
response := &api.StatusResponse{
Api: apiStatus,
Status: birdStatus,
}
return response, nil
}
// Get live neighbor status
func (self *GenericBirdwatcher) NeighboursStatus() (*api.NeighboursStatusResponse, error) {
// Query birdwatcher
apiStatus, birdProtocols, err := self.fetchProtocolsShort()
if err != nil {
return nil, err
}
// Parse the neighbors short
neighbours, err := parseNeighboursShort(birdProtocols, self.config)
if err != nil {
return nil, err
}
response := &api.NeighboursStatusResponse{
Api: *apiStatus,
Neighbours: neighbours,
}
return response, nil // dereference for now
}
// Make routes lookup
func (self *GenericBirdwatcher) LookupPrefix(prefix string) (*api.RoutesLookupResponse, error) {
// Get RS info
rs := api.Routeserver{
Id: self.config.Id,
Name: self.config.Name,
}
// Query prefix on RS
bird, err := self.client.GetJson("/routes/prefix?prefix=" + prefix)
if err != nil {
return nil, err
}
// Parse API status
apiStatus, err := parseApiStatus(bird, self.config)
if err != nil {
return nil, err
}
// Parse routes
routes, err := parseRoutes(bird, self.config)
// Add corresponding neighbour and source rs to result
results := api.LookupRoutes{}
for _, src := range routes {
// Okay. This is actually really hacky.
// A less bruteforce approach would be highly appreciated
route := &api.LookupRoute{
Id: src.Id,
Routeserver: rs,
NeighbourId: src.NeighbourId,
Network: src.Network,
Interface: src.Interface,
Gateway: src.Gateway,
Metric: src.Metric,
Bgp: src.Bgp,
Age: src.Age,
Type: src.Type,
Details: src.Details,
}
results = append(results, route)
}
// Make result
response := &api.RoutesLookupResponse{
Api: apiStatus,
Routes: results,
}
return response, nil
}

View File

@ -1,527 +0,0 @@
package birdwatcher
import (
"github.com/alice-lg/alice-lg/backend/api"
"strings"
"fmt"
"sort"
"log"
)
type MultiTableBirdwatcher struct {
GenericBirdwatcher
}
func (self *MultiTableBirdwatcher) getMasterPipeName(table string) string {
if strings.HasPrefix(table, self.config.PeerTablePrefix) {
return self.config.PipeProtocolPrefix + table[1:]
} else {
return ""
}
}
func (self *MultiTableBirdwatcher) parseProtocolToTableTree(bird ClientResponse) map[string]interface{} {
protocols := bird["protocols"].(map[string]interface{})
response := make(map[string]interface{})
for _, protocolData := range protocols {
protocol := protocolData.(map[string]interface{})
if protocol["bird_protocol"] == "BGP" {
table := protocol["table"].(string)
neighborAddress := protocol["neighbor_address"].(string)
if _, ok := response[table]; !ok {
response[table] = make(map[string]interface{})
}
if _, ok := response[table].(map[string]interface{})[neighborAddress]; !ok {
response[table].(map[string]interface{})[neighborAddress] = make(map[string]interface{})
}
response[table].(map[string]interface{})[neighborAddress] = protocol
}
}
return response
}
func (self *MultiTableBirdwatcher) fetchProtocols() (*api.ApiStatus, map[string]interface{}, error) {
// Query birdwatcher
bird, err := self.client.GetJson("/protocols")
if err != nil {
return nil, nil, err
}
// Use api status from first request
apiStatus, err := parseApiStatus(bird, self.config)
if err != nil {
return nil, nil, err
}
if _, ok := bird["protocols"]; !ok {
return nil, nil, fmt.Errorf("Failed to fetch protocols")
}
return &apiStatus, bird, nil
}
func (self *MultiTableBirdwatcher) fetchReceivedRoutes(neighborId string) (*api.ApiStatus, api.Routes, error) {
// Query birdwatcher
_, birdProtocols, err := self.fetchProtocols()
if err != nil {
return nil, nil, err
}
protocols := birdProtocols["protocols"].(map[string]interface{})
if _, ok := protocols[neighborId]; !ok {
return nil, nil, fmt.Errorf("Invalid Neighbor")
}
peer := protocols[neighborId].(map[string]interface{})["neighbor_address"].(string)
// Query birdwatcher
bird, err := self.client.GetJson("/routes/peer/" + peer)
if err != nil {
return nil, nil, err
}
// Use api status from first request
apiStatus, err := parseApiStatus(bird, self.config)
if err != nil {
return nil, nil, err
}
// Parse the routes
received, err := parseRoutes(bird, self.config)
if err != nil {
log.Println("WARNING Could not retrieve received routes:", err)
log.Println("Is the 'routes_peer' module active in birdwatcher?")
return &apiStatus, nil, err
}
return &apiStatus, received, nil
}
func (self *MultiTableBirdwatcher) fetchFilteredRoutes(neighborId string) (*api.ApiStatus, api.Routes, error) {
// Query birdwatcher
_, birdProtocols, err := self.fetchProtocols()
if err != nil {
return nil, nil, err
}
protocols := birdProtocols["protocols"].(map[string]interface{})
if _, ok := protocols[neighborId]; !ok {
return nil, nil, fmt.Errorf("Invalid Neighbor")
}
// Stage 1 filters
birdFiltered, err := self.client.GetJson("/routes/filtered/" + neighborId)
if err != nil {
log.Println("WARNING Could not retrieve filtered routes:", err)
log.Println("Is the 'routes_filtered' module active in birdwatcher?")
return nil, nil, err
}
// Use api status from first request
apiStatus, err := parseApiStatus(birdFiltered, self.config)
if err != nil {
return nil, nil, err
}
// Parse the routes
filtered := parseRoutesData(birdFiltered["routes"].([]interface{}), self.config)
// Stage 2 filters
table := protocols[neighborId].(map[string]interface{})["table"].(string)
pipeName := self.getMasterPipeName(table)
// If there is no pipe to master, there is nothing left to do
if pipeName == "" {
return &apiStatus, filtered, nil
}
// Query birdwatcher
birdPipeFiltered, err := self.client.GetJson("/routes/pipe/filtered/?table=" + table + "&pipe=" + pipeName)
if err != nil {
log.Println("WARNING Could not retrieve filtered routes:", err)
log.Println("Is the 'pipe_filtered' module active in birdwatcher?")
return &apiStatus, nil, err
}
// Parse the routes
pipeFiltered := parseRoutesData(birdPipeFiltered["routes"].([]interface{}), self.config)
// Sort routes for deterministic ordering
filtered = append(filtered, pipeFiltered...)
sort.Sort(filtered)
return &apiStatus, filtered, nil
}
func (self *MultiTableBirdwatcher) fetchNotExportedRoutes(neighborId string) (*api.ApiStatus, api.Routes, error) {
// Query birdwatcher
_, birdProtocols, err := self.fetchProtocols()
if err != nil {
return nil, nil, err
}
protocols := birdProtocols["protocols"].(map[string]interface{})
if _, ok := protocols[neighborId]; !ok {
return nil, nil, fmt.Errorf("Invalid Neighbor")
}
table := protocols[neighborId].(map[string]interface{})["table"].(string)
pipeName := self.getMasterPipeName(table)
// Query birdwatcher
bird, err := self.client.GetJson("/routes/noexport/" + pipeName)
// Use api status from first request
apiStatus, err := parseApiStatus(bird, self.config)
if err != nil {
return nil, nil, err
}
notExported, err := parseRoutes(bird, self.config)
if err != nil {
log.Println("WARNING Could not retrieve routes not exported:", err)
log.Println("Is the 'routes_noexport' module active in birdwatcher?")
}
return &apiStatus, notExported, nil
}
/*
RoutesRequired is a specialized request to fetch:
- RoutesExported and
- RoutesFiltered
from Birdwatcher. As the not exported routes can be very many
these are optional and can be loaded on demand using the
RoutesNotExported() API.
A route deduplication is applied.
*/
func (self *MultiTableBirdwatcher) fetchRequiredRoutes(neighborId string) (*api.RoutesResponse, error) {
// Allow only one concurrent request for this neighbor
// to our backend server.
self.routesFetchMutex.Lock(neighborId)
defer self.routesFetchMutex.Unlock(neighborId)
// Check if we have a cache hit
response := self.routesRequiredCache.Get(neighborId)
if response != nil {
return response, nil
}
// First: get routes received
apiStatus, receivedRoutes, err := self.fetchReceivedRoutes(neighborId)
if err != nil {
return nil, err
}
// Second: get routes filtered
_, filteredRoutes, err := self.fetchFilteredRoutes(neighborId)
if err != nil {
return nil, err
}
// Perform route deduplication
importedRoutes := api.Routes{}
if len(receivedRoutes) > 0 {
peer := receivedRoutes[0].Gateway
learntFrom := mustString(receivedRoutes[0].Details["learnt_from"], peer)
filteredRoutes = self.filterRoutesByPeerOrLearntFrom(filteredRoutes, peer, learntFrom)
importedRoutes = self.filterRoutesByDuplicates(receivedRoutes, filteredRoutes)
}
response = &api.RoutesResponse{
Api: *apiStatus,
Imported: importedRoutes,
Filtered: filteredRoutes,
}
// Cache result
self.routesRequiredCache.Set(neighborId, response)
return response, nil
}
// Get neighbors from protocols
func (self *MultiTableBirdwatcher) Neighbours() (*api.NeighboursResponse, error) {
// Check if we hit the cache
response := self.neighborsCache.Get()
if response != nil {
return response, nil
}
// Query birdwatcher
apiStatus, birdProtocols, err := self.fetchProtocols()
if err != nil {
return nil, err
}
// Parse the neighbors
neighbours, err := parseNeighbours(self.filterProtocolsBgp(birdProtocols), self.config)
if err != nil {
return nil, err
}
pipes := self.filterProtocolsPipe(birdProtocols)["protocols"].(map[string]interface{})
tree := self.parseProtocolToTableTree(birdProtocols)
// Now determine the session count for each neighbor and check if the pipe
// did filter anything
filtered := make(map[string]int)
for table, _ := range tree {
allRoutesImported := int64(0)
pipeRoutesImported := int64(0)
// Sum up all routes from all peers for a table
for _, protocol := range tree[table].(map[string]interface{}) {
// Skip peers that are not up (start/down)
if protocol.(map[string]interface{})["state"].(string) != "up" {
continue
}
allRoutesImported += int64(protocol.(map[string]interface{})["routes"].(map[string]interface{})["imported"].(float64))
pipeName := self.getMasterPipeName(table)
if _, ok := pipes[pipeName]; ok {
if _, ok := pipes[pipeName].(map[string]interface{})["routes"].(map[string]interface{})["imported"]; ok {
pipeRoutesImported = int64(pipes[pipeName].(map[string]interface{})["routes"].(map[string]interface{})["imported"].(float64))
} else {
continue
}
} else {
continue
}
}
// If no routes were imported, there is nothing left to filter
if allRoutesImported == 0 {
continue
}
// If the pipe did not filter anything, there is nothing left to do
if pipeRoutesImported == allRoutesImported {
continue
}
if len(tree[table].(map[string]interface{})) == 1 {
// Single router
for _, protocol := range tree[table].(map[string]interface{}) {
filtered[protocol.(map[string]interface{})["protocol"].(string)] = int(allRoutesImported-pipeRoutesImported)
}
} else {
// Multiple routers
if pipeRoutesImported == 0 {
// 0 is a special condition, which means that the pipe did filter ALL routes of
// all peers. Therefore we already know the amount of filtered routes and don't have
// to query birdwatcher again.
for _, protocol := range tree[table].(map[string]interface{}) {
// Skip peers that are not up (start/down)
if protocol.(map[string]interface{})["state"].(string) != "up" {
continue
}
filtered[protocol.(map[string]interface{})["protocol"].(string)] = int(protocol.(map[string]interface{})["routes"].(map[string]interface{})["imported"].(float64))
}
} else {
// Otherwise the pipe did import at least some routes which means that
// we have to query birdwatcher to get the count for each peer.
for neighborAddress, protocol := range tree[table].(map[string]interface{}) {
table := protocol.(map[string]interface{})["table"].(string)
pipe := self.getMasterPipeName(table)
count, err := self.client.GetJson("/routes/pipe/filtered/count?table=" + table + "&pipe=" + pipe + "&address=" + neighborAddress)
if err != nil {
log.Println("WARNING Could not retrieve filtered routes count:", err)
log.Println("Is the 'pipe_filtered_count' module active in birdwatcher?")
return nil, err
}
if _, ok := count["routes"]; ok {
filtered[protocol.(map[string]interface{})["protocol"].(string)] = int(count["routes"].(float64))
}
}
}
}
}
// Update the results with the information about filtered routes from the pipe
for _, neighbor := range neighbours {
if pipeRoutesFiltered, ok := filtered[neighbor.Id]; ok {
neighbor.RoutesAccepted -= pipeRoutesFiltered
neighbor.RoutesFiltered += pipeRoutesFiltered
}
}
response = &api.NeighboursResponse{
Api: *apiStatus,
Neighbours: neighbours,
}
// Cache result
self.neighborsCache.Set(response)
return response, nil // dereference for now
}
// Get filtered and exported routes
func (self *MultiTableBirdwatcher) Routes(neighbourId string) (*api.RoutesResponse, error) {
response := &api.RoutesResponse{}
// Fetch required routes first (received and filtered)
// However: Store in separate cache for faster access
required, err := self.fetchRequiredRoutes(neighbourId)
if err != nil {
return nil, err
}
// Optional: NoExport
_, notExported, err := self.fetchNotExportedRoutes(neighbourId)
if err != nil {
return nil, err
}
response.Api = required.Api
response.Imported = required.Imported
response.Filtered = required.Filtered
response.NotExported = notExported
return response, nil
}
// Get all received routes
func (self *MultiTableBirdwatcher) RoutesReceived(neighborId string) (*api.RoutesResponse, error) {
response := &api.RoutesResponse{}
// Check if we have a cache hit
cachedRoutes := self.routesRequiredCache.Get(neighborId)
if cachedRoutes != nil {
response.Api = cachedRoutes.Api
response.Imported = cachedRoutes.Imported
return response, nil
}
// Fetch required routes first (received and filtered)
routes, err := self.fetchRequiredRoutes(neighborId)
if err != nil {
return nil, err
}
response.Api = routes.Api
response.Imported = routes.Imported
return response, nil
}
// Get all filtered routes
func (self *MultiTableBirdwatcher) RoutesFiltered(neighborId string) (*api.RoutesResponse, error) {
response := &api.RoutesResponse{}
// Check if we have a cache hit
cachedRoutes := self.routesRequiredCache.Get(neighborId)
if cachedRoutes != nil {
response.Api = cachedRoutes.Api
response.Filtered = cachedRoutes.Filtered
return response, nil
}
// Fetch required routes first (received and filtered)
routes, err := self.fetchRequiredRoutes(neighborId)
if err != nil {
return nil, err
}
response.Api = routes.Api
response.Filtered = routes.Filtered
return response, nil
}
// Get all not exported routes
func (self *MultiTableBirdwatcher) RoutesNotExported(neighborId string) (*api.RoutesResponse, error) {
// Check if we have a cache hit
response := self.routesNotExportedCache.Get(neighborId)
if response != nil {
return response, nil
}
// Fetch not exported routes
apiStatus, routes, err := self.fetchNotExportedRoutes(neighborId)
if err != nil {
return nil, err
}
response = &api.RoutesResponse{
Api: *apiStatus,
NotExported: routes,
}
// Cache result
self.routesNotExportedCache.Set(neighborId, response)
return response, nil
}
func (self *MultiTableBirdwatcher) AllRoutes() (*api.RoutesResponse, error) {
// Query birdwatcher
_, birdProtocols, err := self.fetchProtocols()
if err != nil {
return nil, err
}
// Fetch received routes first
birdImported, err := self.client.GetJson("/routes/table/master")
if err != nil {
return nil, err
}
// Use api status from first request
apiStatus, err := parseApiStatus(birdImported, self.config)
if err != nil {
return nil, err
}
response := &api.RoutesResponse{
Api: apiStatus,
}
// Parse the routes
imported := parseRoutesData(birdImported["routes"].([]interface{}), self.config)
// Sort routes for deterministic ordering
sort.Sort(imported)
response.Imported = imported
// Iterate over all the protocols and fetch the filtered routes for everyone
protocolsBgp := self.filterProtocolsBgp(birdProtocols)
for protocolId, protocolsData := range protocolsBgp["protocols"].(map[string]interface{}) {
peer := protocolsData.(map[string]interface{})["neighbor_address"].(string)
learntFrom := mustString(protocolsData.(map[string]interface{})["learnt_from"], peer)
// Fetch filtered routes
_, filtered, err := self.fetchFilteredRoutes(protocolId)
if err != nil {
continue
}
// Perform route deduplication
filtered = self.filterRoutesByPeerOrLearntFrom(filtered, peer, learntFrom)
response.Filtered = append(response.Filtered, filtered...)
}
return response, nil
}

View File

@ -1,315 +0,0 @@
package birdwatcher
import (
"github.com/alice-lg/alice-lg/backend/api"
"log"
"sort"
)
type SingleTableBirdwatcher struct {
GenericBirdwatcher
}
func (self *SingleTableBirdwatcher) fetchReceivedRoutes(neighborId string) (*api.ApiStatus, api.Routes, error) {
// Query birdwatcher
bird, err := self.client.GetJson("/routes/protocol/" + neighborId)
if err != nil {
return nil, nil, err
}
// Use api status from first request
apiStatus, err := parseApiStatus(bird, self.config)
if err != nil {
return nil, nil, err
}
// Parse the routes
received, err := parseRoutes(bird, self.config)
if err != nil {
log.Println("WARNING Could not retrieve received routes:", err)
log.Println("Is the 'routes_protocol' module active in birdwatcher?")
return &apiStatus, nil, err
}
return &apiStatus, received, nil
}
func (self *SingleTableBirdwatcher) fetchFilteredRoutes(neighborId string) (*api.ApiStatus, api.Routes, error) {
// Query birdwatcher
bird, err := self.client.GetJson("/routes/filtered/" + neighborId)
if err != nil {
return nil, nil, err
}
// Use api status from first request
apiStatus, err := parseApiStatus(bird, self.config)
if err != nil {
return nil, nil, err
}
// Parse the routes
filtered, err := parseRoutes(bird, self.config)
if err != nil {
log.Println("WARNING Could not retrieve filtered routes:", err)
log.Println("Is the 'routes_filtered' module active in birdwatcher?")
return &apiStatus, nil, err
}
return &apiStatus, filtered, nil
}
func (self *SingleTableBirdwatcher) fetchNotExportedRoutes(neighborId string) (*api.ApiStatus, api.Routes, error) {
// Query birdwatcher
bird, err := self.client.GetJson("/routes/noexport/" + neighborId)
// Use api status from first request
apiStatus, err := parseApiStatus(bird, self.config)
if err != nil {
return nil, nil, err
}
// Parse the routes
notExported, err := parseRoutes(bird, self.config)
if err != nil {
log.Println("WARNING Could not retrieve routes not exported:", err)
log.Println("Is the 'routes_noexport' module active in birdwatcher?")
}
return &apiStatus, notExported, nil
}
/*
RoutesRequired is a specialized request to fetch:
- RoutesExported and
- RoutesFiltered
from Birdwatcher. As the not exported routes can be very many
these are optional and can be loaded on demand using the
RoutesNotExported() API.
A route deduplication is applied.
*/
func (self *SingleTableBirdwatcher) fetchRequiredRoutes(neighborId string) (*api.RoutesResponse, error) {
// Allow only one concurrent request for this neighbor
// to our backend server.
self.routesFetchMutex.Lock(neighborId)
defer self.routesFetchMutex.Unlock(neighborId)
// Check if we have a cache hit
response := self.routesRequiredCache.Get(neighborId)
if response != nil {
return response, nil
}
// First: get routes received
apiStatus, receivedRoutes, err := self.fetchReceivedRoutes(neighborId)
if err != nil {
return nil, err
}
// Second: get routes filtered
_, filteredRoutes, err := self.fetchFilteredRoutes(neighborId)
if err != nil {
return nil, err
}
// Perform route deduplication
importedRoutes := api.Routes{}
if len(receivedRoutes) > 0 {
peer := receivedRoutes[0].Gateway
learntFrom := mustString(receivedRoutes[0].Details["learnt_from"], peer)
filteredRoutes = self.filterRoutesByPeerOrLearntFrom(filteredRoutes, peer, learntFrom)
importedRoutes = self.filterRoutesByDuplicates(receivedRoutes, filteredRoutes)
}
response = &api.RoutesResponse{
Api: *apiStatus,
Imported: importedRoutes,
Filtered: filteredRoutes,
}
// Cache result
self.routesRequiredCache.Set(neighborId, response)
return response, nil
}
// Get neighbors from protocols
func (self *SingleTableBirdwatcher) Neighbours() (*api.NeighboursResponse, error) {
// Check if we hit the cache
response := self.neighborsCache.Get()
if response != nil {
return response, nil
}
// Query birdwatcher
bird, err := self.client.GetJson("/protocols/bgp")
if err != nil {
return nil, err
}
// Use api status from first request
apiStatus, err := parseApiStatus(bird, self.config)
if err != nil {
return nil, err
}
// Parse the neighbors
neighbours, err := parseNeighbours(bird, self.config)
if err != nil {
return nil, err
}
response = &api.NeighboursResponse{
Api: apiStatus,
Neighbours: neighbours,
}
// Cache result
self.neighborsCache.Set(response)
return response, nil // dereference for now
}
// Get filtered and exported routes
func (self *SingleTableBirdwatcher) Routes(neighbourId string) (*api.RoutesResponse, error) {
response := &api.RoutesResponse{}
// Fetch required routes first (received and filtered)
required, err := self.fetchRequiredRoutes(neighbourId)
if err != nil {
return nil, err
}
// Optional: NoExport
_, notExported, err := self.fetchNotExportedRoutes(neighbourId)
if err != nil {
return nil, err
}
response.Api = required.Api
response.Imported = required.Imported
response.Filtered = required.Filtered
response.NotExported = notExported
return response, nil
}
// Get all received routes
func (self *SingleTableBirdwatcher) RoutesReceived(neighborId string) (*api.RoutesResponse, error) {
response := &api.RoutesResponse{}
// Check if we hit the cache
cachedRoutes := self.routesRequiredCache.Get(neighborId)
if cachedRoutes != nil {
response.Api = cachedRoutes.Api
response.Imported = cachedRoutes.Imported
return response, nil
}
// Fetch required routes first (received and filtered)
// However: Store in separate cache for faster access
routes, err := self.fetchRequiredRoutes(neighborId)
if err != nil {
return nil, err
}
response.Api = routes.Api
response.Imported = routes.Imported
return response, nil
}
// Get all filtered routes
func (self *SingleTableBirdwatcher) RoutesFiltered(neighborId string) (*api.RoutesResponse, error) {
response := &api.RoutesResponse{}
// Check if we hit the cache
cachedRoutes := self.routesRequiredCache.Get(neighborId)
if cachedRoutes != nil {
response.Api = cachedRoutes.Api
response.Filtered = cachedRoutes.Filtered
return response, nil
}
// Fetch required routes first (received and filtered)
// However: Store in separate cache for faster access
routes, err := self.fetchRequiredRoutes(neighborId)
if err != nil {
return nil, err
}
response.Api = routes.Api
response.Filtered = routes.Filtered
return response, nil
}
// Get all not exported routes
func (self *SingleTableBirdwatcher) RoutesNotExported(neighborId string) (*api.RoutesResponse, error) {
// Check if we hit the cache
response := self.routesNotExportedCache.Get(neighborId)
if response != nil {
return response, nil
}
// Fetch not exported routes
apiStatus, routes, err := self.fetchNotExportedRoutes(neighborId)
if err != nil {
return nil, err
}
response = &api.RoutesResponse{
Api: *apiStatus,
NotExported: routes,
}
// Cache result
self.routesNotExportedCache.Set(neighborId, response)
return response, nil
}
func (self *SingleTableBirdwatcher) AllRoutes() (*api.RoutesResponse, error) {
// First fetch all routes from the master table
birdImported, err := self.client.GetJson("/routes/table/master")
if err != nil {
return nil, err
}
// Then fetch all filtered routes from the master table
birdFiltered, err := self.client.GetJson("/routes/table/master/filtered")
if err != nil {
return nil, err
}
// Use api status from second request
apiStatus, err := parseApiStatus(birdFiltered, self.config)
if err != nil {
return nil, err
}
response := &api.RoutesResponse{
Api: apiStatus,
}
// Parse the routes
imported := parseRoutesData(birdImported["routes"].([]interface{}), self.config)
// Sort routes for deterministic ordering
sort.Sort(imported)
response.Imported = imported
// Parse the routes
filtered := parseRoutesData(birdFiltered["routes"].([]interface{}), self.config)
// Sort routes for deterministic ordering
sort.Sort(filtered)
response.Filtered = filtered
return response, nil
}

View File

@ -1,61 +0,0 @@
package birdwatcher
import (
"strconv"
)
/*
* Types helper for parser
*/
// Assert string, provide default
func mustString(value interface{}, fallback string) string {
sval, ok := value.(string)
if !ok {
return fallback
}
return sval
}
// Assert list of strings
func mustStringList(data interface{}) []string {
list := []string{}
ldata, ok := data.([]interface{})
if !ok {
return []string{}
}
for _, e := range ldata {
s, ok := e.(string)
if ok {
list = append(list, s)
}
}
return list
}
// Convert list of strings to int
func mustIntList(data interface{}) []int {
list := []int{}
sdata := mustStringList(data)
for _, e := range sdata {
val, _ := strconv.Atoi(e)
list = append(list, val)
}
return list
}
func mustInt(value interface{}, fallback int) int {
fval, ok := value.(float64)
if !ok {
return fallback
}
return int(fval)
}
func mustBool(value interface{}, fallback bool) bool {
val, ok := value.(bool)
if !ok {
return fallback
}
return val
}

View File

@ -1,53 +0,0 @@
package birdwatcher
import (
"fmt"
"sync"
"github.com/alice-lg/alice-lg/backend/api"
)
/*
Helper functions for dealing with birdwatcher API data
*/
// Get neighbour by protocol id
func getNeighbourById(neighbours api.Neighbours, id string) (*api.Neighbour, error) {
for _, n := range neighbours {
if n.Id == id {
return n, nil
}
}
unknown := &api.Neighbour{
Id: "unknown",
Description: "Unknown neighbour",
}
return unknown, fmt.Errorf("Neighbour not found")
}
/*
LockMap: Uses the sync.Map to manage locks, accessed by a key.
TODO: Maybe this would be a nice generic helper
*/
type LockMap struct {
locks *sync.Map
}
func NewLockMap() *LockMap {
return &LockMap{
locks: &sync.Map{},
}
}
func (self *LockMap) Lock(key string) {
mutex, _ := self.locks.LoadOrStore(key, &sync.Mutex{})
mutex.(*sync.Mutex).Lock()
}
func (self *LockMap) Unlock(key string) {
mutex, ok := self.locks.Load(key)
if !ok {
return // Nothing to unlock
}
mutex.(*sync.Mutex).Unlock()
}

View File

@ -1,11 +0,0 @@
package gobgp
type Config struct {
Id string
Name string
Host string `ini:"host"`
Insecure bool `ini:"insecure"`
TLSCert string `ini:"tls_crt"`
TLSCommonName string `ini:"tls_common_name"`
}

View File

@ -1,194 +0,0 @@
package gobgp
import (
"github.com/alice-lg/alice-lg/backend/sources/gobgp/apiutil"
"github.com/osrg/gobgp/pkg/packet/bgp"
"github.com/alice-lg/alice-lg/backend/api"
gobgpapi "github.com/osrg/gobgp/api"
"context"
"fmt"
"io"
"log"
"time"
)
var families []gobgpapi.Family = []gobgpapi.Family{gobgpapi.Family{
Afi: gobgpapi.Family_AFI_IP,
Safi: gobgpapi.Family_SAFI_UNICAST,
}, gobgpapi.Family{
Afi: gobgpapi.Family_AFI_IP6,
Safi: gobgpapi.Family_SAFI_UNICAST,
},
}
func NewRoutesResponse() api.RoutesResponse {
routes := api.RoutesResponse{}
routes.Imported = make(api.Routes, 0)
routes.Filtered = make(api.Routes, 0)
routes.NotExported = make(api.Routes, 0)
return routes
}
func (gobgp *GoBGP) lookupNeighbour(neighborId string) (*gobgpapi.Peer, error) {
peers, err := gobgp.GetNeighbours()
if err != nil {
return nil, err
}
for _, peer := range peers {
peerId := PeerHash(peer)
if neighborId == "" || peerId == neighborId {
return peer, nil
}
}
return nil, fmt.Errorf("Could not lookup neighbour")
}
func (gobgp *GoBGP) GetNeighbours() ([]*gobgpapi.Peer, error) {
ctx, cancel := context.WithTimeout(context.Background(), time.Second)
defer cancel()
peerStream, err := gobgp.client.ListPeer(ctx, &gobgpapi.ListPeerRequest{EnableAdvertised: true})
if err != nil {
return nil, err
}
peers := make([]*gobgpapi.Peer, 0)
for {
peer, err := peerStream.Recv()
if err == io.EOF {
break
}
peers = append(peers, peer.Peer)
}
return peers, nil
}
func (gobgp *GoBGP) parsePathIntoRoute(path *gobgpapi.Path, prefix string) (error, *api.Route) {
route := api.Route{}
route.Id = fmt.Sprintf("%s_%s", path.SourceId, prefix)
route.NeighbourId = PeerHashWithASAndAddress(path.SourceAsn, path.NeighborIp)
route.Network = prefix
route.Interface = "Unknown"
route.Age = time.Now().Sub(time.Unix(path.Age.GetSeconds(), int64(path.Age.GetNanos())))
route.Primary = path.Best
attrs, err := apiutil.GetNativePathAttributes(path)
if err != nil {
return err, nil
}
route.Bgp.Communities = make(api.Communities, 0)
route.Bgp.LargeCommunities = make(api.Communities, 0)
route.Bgp.ExtCommunities = make(api.ExtCommunities, 0)
for _, attr := range attrs {
switch attr.(type) {
case *bgp.PathAttributeMultiExitDisc:
med := attr.(*bgp.PathAttributeMultiExitDisc)
route.Bgp.Med = int(med.Value)
case *bgp.PathAttributeNextHop:
nh := attr.(*bgp.PathAttributeNextHop)
route.Gateway = nh.Value.String()
route.Bgp.NextHop = nh.Value.String()
case *bgp.PathAttributeLocalPref:
lp := attr.(*bgp.PathAttributeLocalPref)
route.Bgp.LocalPref = int(lp.Value)
case *bgp.PathAttributeOrigin:
origin := attr.(*bgp.PathAttributeOrigin)
switch origin.Value {
case bgp.BGP_ORIGIN_ATTR_TYPE_IGP:
route.Bgp.Origin = "IGP"
case bgp.BGP_ORIGIN_ATTR_TYPE_EGP:
route.Bgp.Origin = "EGP"
case bgp.BGP_ORIGIN_ATTR_TYPE_INCOMPLETE:
route.Bgp.Origin = "Incomplete"
}
case *bgp.PathAttributeAsPath:
aspath := attr.(*bgp.PathAttributeAsPath)
for _, aspth := range aspath.Value {
for _, as := range aspth.GetAS() {
route.Bgp.AsPath = append(route.Bgp.AsPath, int(as))
}
}
case *bgp.PathAttributeCommunities:
communities := attr.(*bgp.PathAttributeCommunities)
for _, community := range communities.Value {
_community := api.Community{int((0xffff0000 & community) >> 16), int(0xffff & community)}
route.Bgp.Communities = append(route.Bgp.Communities, _community)
}
case *bgp.PathAttributeExtendedCommunities:
communities := attr.(*bgp.PathAttributeExtendedCommunities)
for _, community := range communities.Value {
if _community, ok := community.(*bgp.TwoOctetAsSpecificExtended); ok {
route.Bgp.ExtCommunities = append(route.Bgp.ExtCommunities, api.ExtCommunity{_community.AS, _community.LocalAdmin})
}
}
case *bgp.PathAttributeLargeCommunities:
communities := attr.(*bgp.PathAttributeLargeCommunities)
for _, community := range communities.Values {
route.Bgp.LargeCommunities = append(route.Bgp.LargeCommunities, api.Community{int(community.ASN), int(community.LocalData1), int(community.LocalData2)})
}
}
}
route.Metric = (route.Bgp.LocalPref + route.Bgp.Med)
return nil, &route
}
func (gobgp *GoBGP) GetRoutes(peer *gobgpapi.Peer, tableType gobgpapi.TableType, response *api.RoutesResponse) error {
ctx, cancel := context.WithTimeout(context.Background(), time.Second)
defer cancel()
for _, family := range families {
pathStream, err := gobgp.client.ListPath(ctx, &gobgpapi.ListPathRequest{
Name: peer.State.NeighborAddress,
TableType: tableType,
Family: &family,
EnableFiltered: true,
})
if err != nil {
log.Print(err)
continue
}
rib := make([]*gobgpapi.Destination, 0)
for {
_path, err := pathStream.Recv()
if err == io.EOF {
break
} else if err != nil {
log.Print(err)
return err
}
rib = append(rib, _path.Destination)
}
for _, destination := range rib {
for _, path := range destination.Paths {
err, route := gobgp.parsePathIntoRoute(path, destination.Prefix)
if err != nil {
log.Println(err)
continue
}
if path.Filtered {
response.Filtered = append(response.Filtered, route)
} else {
response.Imported = append(response.Imported, route)
}
}
}
}
return nil
}

View File

@ -1,17 +0,0 @@
package sources
import (
"github.com/alice-lg/alice-lg/backend/api"
)
type Source interface {
ExpireCaches() int
Status() (*api.StatusResponse, error)
Neighbours() (*api.NeighboursResponse, error)
NeighboursStatus() (*api.NeighboursStatusResponse, error)
Routes(neighbourId string) (*api.RoutesResponse, error)
RoutesReceived(neighbourId string) (*api.RoutesResponse, error)
RoutesFiltered(neighbourId string) (*api.RoutesResponse, error)
RoutesNotExported(neighbourId string) (*api.RoutesResponse, error)
AllRoutes() (*api.RoutesResponse, error)
}

View File

@ -1,31 +0,0 @@
package main
var version = "unknown"
// Gather application status information
type AppStatus struct {
Version string `json:"version"`
Routes RoutesStoreStats `json:"routes"`
Neighbours NeighboursStoreStats `json:"neighbours"`
}
// Get application status, perform health checks
// on backends.
func NewAppStatus() (*AppStatus, error) {
routesStatus := RoutesStoreStats{}
if AliceRoutesStore != nil {
routesStatus = AliceRoutesStore.Stats()
}
neighboursStatus := NeighboursStoreStats{}
if AliceRoutesStore != nil {
neighboursStatus = AliceNeighboursStore.Stats()
}
status := &AppStatus{
Version: version,
Routes: routesStatus,
Neighbours: neighboursStatus,
}
return status, nil
}

View File

@ -1,33 +0,0 @@
package main
import (
"time"
)
const (
STATE_INIT = iota
STATE_READY
STATE_UPDATING
STATE_ERROR
)
type StoreStatus struct {
LastRefresh time.Time
LastError error
State int
}
// Helper: stateToString
func stateToString(state int) string {
switch state {
case STATE_INIT:
return "INIT"
case STATE_READY:
return "READY"
case STATE_UPDATING:
return "UPDATING"
case STATE_ERROR:
return "ERROR"
}
return "INVALID"
}

View File

@ -1,78 +0,0 @@
package main
import (
"log"
"time"
)
// Routes Store
type RoutesStats struct {
Filtered int `json:"filtered"`
Imported int `json:"imported"`
}
type RouteServerRoutesStats struct {
Name string `json:"name"`
Routes RoutesStats `json:"routes"`
State string `json:"state"`
UpdatedAt time.Time `json:"updated_at"`
}
type RoutesStoreStats struct {
TotalRoutes RoutesStats `json:"total_routes"`
RouteServers []RouteServerRoutesStats `json:"route_servers"`
}
// Write stats to the log
func (stats RoutesStoreStats) Log() {
log.Println("Routes store:")
log.Println(" Routes Imported:",
stats.TotalRoutes.Imported,
"Filtered:",
stats.TotalRoutes.Filtered)
log.Println(" Routeservers:")
for _, rs := range stats.RouteServers {
log.Println(" -", rs.Name)
log.Println(" State:", rs.State)
log.Println(" UpdatedAt:", rs.UpdatedAt)
log.Println(" Routes Imported:",
rs.Routes.Imported,
"Filtered:",
rs.Routes.Filtered)
}
}
// Neighbours Store
type RouteServerNeighboursStats struct {
Name string `json:"name"`
State string `json:"state"`
Neighbours int `json:"neighbours"`
UpdatedAt time.Time `json:"updated_at"`
}
type NeighboursStoreStats struct {
TotalNeighbours int `json:"total_neighbours"`
RouteServers []RouteServerNeighboursStats `json:"route_servers"`
}
// Print stats
func (stats NeighboursStoreStats) Log() {
log.Println("Neighbours store:")
log.Println(" Neighbours:",
stats.TotalNeighbours)
for _, rs := range stats.RouteServers {
log.Println(" -", rs.Name)
log.Println(" State:", rs.State)
log.Println(" UpdatedAt:", rs.UpdatedAt)
log.Println(" Neighbours:",
rs.Neighbours)
}
}

View File

@ -1,191 +0,0 @@
package main
/*
The theme provides a method for adding customized CSS
or Javascript to Alice:
A theme directory can be specified in the config.
Stylesheets and Javascript residing in the theme root
directory will be included in the frontends HTML.
Additional files can be added in subdirectories.
These are served aswell and can be used for additional
assets. (E.g. a logo)
*/
import (
"fmt"
"log"
"os"
"strconv"
"strings"
"io/ioutil"
"net/http"
"path/filepath"
"github.com/julienschmidt/httprouter"
)
type Theme struct {
Config ThemeConfig
}
func NewTheme(config ThemeConfig) *Theme {
theme := &Theme{
Config: config,
}
return theme
}
/*
Get includable files from theme directory
*/
func (self *Theme) listIncludes(suffix string) []string {
includes := []string{}
files, err := ioutil.ReadDir(self.Config.Path)
if err != nil {
return []string{}
}
for _, file := range files {
if file.IsDir() {
continue
}
filename := file.Name()
if strings.HasPrefix(filename, ".") {
continue
}
if strings.HasSuffix(filename, suffix) {
includes = append(includes, filename)
}
}
return includes
}
/*
Calculate a hashvalue for an include file,
to help with cache invalidation, when the file changes.
We are using the timestamp of the last access as Unix()
encoded as hex
*/
func (self *Theme) HashInclude(include string) string {
path := filepath.Join(self.Config.Path, include)
stat, err := os.Stat(path)
if err != nil {
return ""
}
modTime := stat.ModTime().UTC()
timestamp := modTime.Unix()
return strconv.FormatInt(timestamp, 16)
}
/*
Retrieve a list of includeable stylesheets, with
their md5sum as hash
*/
func (self *Theme) Stylesheets() []string {
return self.listIncludes(".css")
}
/*
Make include statement: stylesheet
*/
func (self *Theme) StylesheetIncludes() string {
includes := []string{}
for _, stylesheet := range self.Stylesheets() {
hash := self.HashInclude(stylesheet)
include := fmt.Sprintf(
"<link rel=\"stylesheet\" href=\"%s/%s?%s\" />",
self.Config.BasePath, stylesheet, hash,
)
includes = append(includes, include)
}
return strings.Join(includes, "\n")
}
/*
Retrieve a list of includeable javascipts
*/
func (self *Theme) Scripts() []string {
return self.listIncludes(".js")
}
/*
Make include statement: script
*/
func (self *Theme) ScriptIncludes() string {
includes := []string{}
for _, script := range self.Scripts() {
hash := self.HashInclude(script)
include := fmt.Sprintf(
"<script type=\"text/javascript\" src=\"%s/%s?%s\"></script>",
self.Config.BasePath, script, hash,
)
includes = append(includes, include)
}
return strings.Join(includes, "\n")
}
/*
Theme HTTP Handler
*/
func (self *Theme) Handler() http.Handler {
// Serve the content using the file server
path := self.Config.Path
themeFilesHandler := http.StripPrefix(
self.Config.BasePath, http.FileServer(http.Dir(path)))
return themeFilesHandler
}
/*
Register theme at path
*/
func (self *Theme) RegisterThemeAssets(router *httprouter.Router) error {
fsPath := self.Config.Path
if fsPath == "" {
return nil // nothing to do here
}
if _, err := os.Stat(fsPath); err != nil {
return fmt.Errorf("Theme path '%s' could not be found!", fsPath)
}
log.Println("Using theme at:", fsPath)
// We have a theme, install handler
path := fmt.Sprintf("%s/*path", self.Config.BasePath)
router.Handler("GET", path, self.Handler())
return nil
}
/*
Prepare document, fill placeholder with scripts and stylesheet
*/
func (self *Theme) PrepareClientHtml(html string) string {
stylesheets := self.StylesheetIncludes()
scripts := self.ScriptIncludes()
html = strings.Replace(html,
"<!-- ###THEME_STYLESHEETS### -->",
stylesheets, 1)
html = strings.Replace(html,
"<!-- ###THEME_SCRIPTS### -->",
scripts, 1)
return html
}

View File

@ -1,91 +0,0 @@
package main
// Some helper functions
import (
"regexp"
"strconv"
"strings"
"time"
)
var REGEX_MATCH_IP_PREFIX = regexp.MustCompile(`([a-f0-9/]+[\.:]*)+`)
/*
Case Insensitive Contains
*/
func ContainsCi(s, substr string) bool {
return strings.Contains(
strings.ToLower(s),
strings.ToLower(substr),
)
}
/*
Check array membership
*/
func MemberOf(list []string, key string) bool {
for _, v := range list {
if v == key {
return true
}
}
return false
}
/*
Check if something could be a prefix
*/
func MaybePrefix(s string) bool {
s = strings.ToLower(s)
// Rule out anything which can not be
if strings.ContainsAny(s, "ghijklmnopqrstuvwxyz][;'_") {
return false
}
// Test using regex
matches := REGEX_MATCH_IP_PREFIX.FindAllStringIndex(s, -1)
if len(matches) == 1 {
return true
}
return false
}
/*
Since havin ints as keys in json is
acutally undefined behaviour, we keep these interally
but provide a string as a key for serialization
*/
func SerializeReasons(reasons map[int]string) map[string]string {
res := make(map[string]string)
for id, reason := range reasons {
res[strconv.Itoa(id)] = reason
}
return res
}
/*
Make trimmed list of CSV strings.
Ommits empty values.
*/
func TrimmedStringList(s string) []string {
tokens := strings.Split(s, ",")
list := []string{}
for _, t := range tokens {
if t == "" {
continue
}
list = append(list, strings.TrimSpace(t))
}
return list
}
/*
Convert time.Duration to milliseconds
*/
func DurationMs(d time.Duration) float64 {
return float64(d) / 1000.0 / 1000.0 // nano -> micro -> milli
}

View File

@ -1 +0,0 @@
node_modules/

View File

@ -1,15 +0,0 @@
#
# Client build Dockerfile
# Use node:10 as base image
#
FROM node:11
RUN npm install -g gulp@4.0.0
RUN npm install -g gulp-cli
WORKDIR /client
VOLUME ["/client"]

View File

@ -1,53 +0,0 @@
#
# Build Helper
# ------------
#
# Create a full build by just typing make.
# This will automatically install all dependencies from NPM and
# start the build process.
#
# While developing, you might want to use 'make watch'
# which will automatically restart gulp in case something went
# wrong.
#
VERSION=$(shell cat ../VERSION)
DIST_BUILDS=../../birdseye-static/builds
DIST=birdseye-ui-dist-$(VERSION).tar.gz
# == END CONFIGURATION ==
DIST_BUILD=$(addprefix $(DIST_BUILDS)/, $(DIST))
all: client
deps:
@echo "Installing dependencies"
yarn install
client: deps
@echo "Building alice UI"
gulp
client_prod: deps
@echo "Building alice UI (production)"
DISABLE_LOGGING=1 NODE_ENV=production gulp
watch:
while true; do gulp watch; done
$(DIST_BUILD): deps client_prod
@echo "Creating alice ui distribution"
tar cvzf $(DIST) build/
mv $(DIST) $(DIST_BUILDS)
@echo ""
@echo "Done. Don't forget to push the dist to github"
clean:
rm -rf build/
dist: $(DIST_BUILD)
echo $(DIST_BUILD)

View File

@ -1,40 +0,0 @@
DOCKER_CONTAINER := alice-lg-node-build
DOCKER_IMAGE := alice-lg-node:latest
DOCKER_EXEC := docker run --rm -t -i \
-a stdin -a stdout -a stderr \
-v `pwd`:/client/ \
--name $(DOCKER_CONTAINER) \
$(DOCKER_IMAGE) /bin/bash -c
all: client
@echo "Built alice-lg client"
image:
docker build . -t $(DOCKER_IMAGE)
deps: image
$(DOCKER_EXEC) "yarn install"
client: stop deps
@echo "Building alice UI"
$(DOCKER_EXEC) "./node_modules/.bin/gulp"
client_prod: stop deps
@echo "Building alice UI (production)"
$(DOCKER_EXEC) "DISABLE_LOGGING=1 NODE_ENV=production ./node_modules/.bin/gulp"
watch:
$(DOCKER_EXEC) "while true; do ./node_modules/.bin/gulp watch; done"
stop:
@echo "Stopping docker container: $(DOCKER_CONTAINER)"
-docker stop $(DOCKER_CONTAINER)
@sleep 1
kill:
@echo "Killing docker container: $(DOCKER_CONTAINER)"
-docker kill $(DOCKER_CONTAINER)
@sleep 1

View File

@ -1,118 +0,0 @@
/**
* Alice (formerly known as Birdseye) v.2.0.0
* ------------------------------------------
*
* @author Matthias Hannig <mha@ecix.net>
*/
import axios from 'axios'
import React from 'react'
import ReactDOM from 'react-dom'
import { Component } from 'react'
// Config
import { configureAxios } from './config'
// Content
import { contentUpdate } from './components/content/actions'
// Redux
import { createStore, applyMiddleware } from 'redux'
import { Provider } from 'react-redux'
// Router
import { createHistory } from 'history'
import { Router,
Route,
IndexRoute,
IndexRedirect,
useRouterHistory } from 'react-router'
import { syncHistoryWithStore } from 'react-router-redux'
// Components
import LayoutMain from 'layouts/main'
import WelcomePage
from 'components/welcome'
import RouteserverPage
from 'components/routeservers/page'
import RoutesPage
from 'components/routeservers/routes/page'
import LookupPage
from 'components/lookup/page'
// Middlewares
import thunkMiddleware from 'redux-thunk'
import createLogger from 'redux-logger'
import { routerMiddleware as createRouterMiddleware }
from 'react-router-redux'
// Reducer
import combinedReducer from './reducer/app-reducer'
// Setup routing
const browserHistory = useRouterHistory(createHistory)({
basename: '/'
});
// Setup application
let store;
const routerMiddleware = createRouterMiddleware(browserHistory);
if (window.NO_LOG) {
store = createStore(combinedReducer, applyMiddleware(
routerMiddleware,
thunkMiddleware
));
} else {
const loggerMiddleware = createLogger();
store = createStore(combinedReducer, applyMiddleware(
routerMiddleware,
thunkMiddleware,
loggerMiddleware
));
}
// Create extension endpoint:
window.Alice = {
updateContent: (content) => {
store.dispatch(contentUpdate(content));
}
};
const history = syncHistoryWithStore(browserHistory, store);
// Setup axios
configureAxios(axios);
// Create App
class Birdseye extends Component {
render() {
return (
<Provider store={store}>
<Router history={history}>
<Route path="/" component={LayoutMain}>
<IndexRoute component={WelcomePage}/>
<Route path="/search"
component={LookupPage} />
<Route path="/routeservers">
<Route path=":routeserverId" component={RouteserverPage} />
<Route path=":routeserverId/protocols/:protocolId/routes" component={RoutesPage} />
</Route>
</Route>
</Router>
</Provider>
);
}
}
var mount = document.getElementById('app');
ReactDOM.render(<Birdseye />, mount);

View File

@ -1,8 +0,0 @@
.welcome-page {
.jumbotron {
padding: 20px;
}
}

View File

@ -1,30 +0,0 @@
import React from 'react'
import {connect} from 'react-redux'
import {parseServerTime} from 'components/datetime/parse'
import moment from 'moment'
/*
* Calculate age (generated_at), and set from_cache_status
*/
export const apiCacheStatus = function(apiStatus) {
if (apiStatus == {}) {
return null;
}
const cacheStatus = apiStatus["cache_status"] || {};
const cachedAt = cacheStatus.cached_at;
if (!cachedAt) {
return null;
}
const fromCache = apiStatus.result_from_cache;
const ttl = parseServerTime(apiStatus.ttl);
const generatedAt = parseServerTime(cachedAt);
const age = ttl.diff(generatedAt); // ms
return {fromCache, age, generatedAt, ttl};
};

View File

@ -1,22 +0,0 @@
import axios from 'axios';
import {apiError} from 'components/errors/actions'
export const LOAD_CONFIG_SUCCESS = "@config/LOAD_CONFIG_SUCCESS";
function loadConfigSuccess(config) {
return {
type: LOAD_CONFIG_SUCCESS,
payload: config
};
}
export function loadConfig() {
return (dispatch) => {
axios.get(`/api/v1/config`)
.then(
({data}) => {
dispatch(loadConfigSuccess(data));
},
(error) => dispatch(apiError(error)));
}
}

View File

@ -1,72 +0,0 @@
import {LOAD_CONFIG_SUCCESS} from './actions'
import {LOAD_ROUTESERVERS_SUCCESS} from 'components/routeservers/actions'
const initialState = {
asn: 0, // Our own ASN (might be abstracted in the future)
routes_columns: {},
routes_columns_order: [],
neighbours_columns: {},
neighbours_columns_order: [],
lookup_columns: {},
lookup_columns_order: [],
prefix_lookup_enabled: false,
content: {},
noexport_load_on_demand: true, // we have to assume this
// otherwise fetch will start.
rpki: {
enabled: false,
},
bgp_communities: {},
blackholes: {}, // Map blackholes to routeservers
asns: {}, // Map ASNs to routeservers (for future use)
};
const _handleRouteserversConfig = function(state, payload) {
let blackholes = {};
let asns = {};
let asn = 0;
for (const rs of payload.routeservers) {
blackholes[rs.id] = rs.blackholes;
asns[rs.id] = rs.asn;
if (!asn) {
asn = rs.asn; // Just go with the first asn as our own
}
}
return Object.assign({}, state, {
asn: asn,
blackholes: blackholes,
asns: asns,
});
}
export default function reducer(state = initialState, action) {
switch(action.type) {
case LOAD_CONFIG_SUCCESS:
return Object.assign({}, state, {
routes_columns: action.payload.routes_columns,
routes_columns_order: action.payload.routes_columns_order,
neighbours_columns: action.payload.neighbours_columns,
neighbours_columns_order: action.payload.neighbours_columns_order,
lookup_columns: action.payload.lookup_columns,
lookup_columns_order: action.payload.lookup_columns_order,
prefix_lookup_enabled: action.payload.prefix_lookup_enabled,
rpki: action.payload.rpki,
bgp_communities: action.payload.bgp_communities,
noexport_load_on_demand: action.payload.noexport.load_on_demand
});
case LOAD_ROUTESERVERS_SUCCESS:
return _handleRouteserversConfig(state, action.payload);
}
return state;
}

View File

@ -1,17 +0,0 @@
import React from 'react'
import { connect } from 'react-redux'
import { loadConfig } from 'components/config/actions'
class Config extends React.Component {
componentDidMount() {
this.props.dispatch(loadConfig());
}
render() {
return null;
}
}
export default connect()(Config);

View File

@ -1,10 +0,0 @@
export const CONTENT_UPDATE = "@content/CONTENT_UPDATE";
export function contentUpdate(content) {
return {
type: CONTENT_UPDATE,
payload: content
}
}

View File

@ -1,39 +0,0 @@
import React from 'react'
import {connect} from 'react-redux'
/*
* Content Component
*/
function ContentComponent(props) {
let key = props.id;
let defaultValue = props.children;
if (!key) {
return <span>{defaultValue}</span>;
}
// Traverse content by key, if content is found
// return content, otherwise fall back to the default
let tokens = key.split(".");
let resolved = props.content;
for (let part of tokens) {
resolved = resolved[part];
if (!resolved) {
break;
}
}
if (!resolved) {
resolved = defaultValue;
}
return (<span dangerouslySetInnerHTML={{__html: resolved}}></span>);
}
export default connect(
(state) => ({
content: state.content
})
)(ContentComponent);

View File

@ -1,17 +0,0 @@
/*
* Content reducer
*/
import {CONTENT_UPDATE} from './actions'
const initialState = {};
export default function reducer(state = initialState, action) {
switch(action.type) {
case CONTENT_UPDATE:
return Object.assign({}, state, action.payload);
}
return state;
}

View File

@ -1,29 +0,0 @@
/**
* Datetime Component
*
* @author Matthias Hannig <mha@ecix.net>
*/
import React from 'react'
import moment from 'moment'
import {parseServerTime} from './parse'
export default class Datetime extends React.Component {
render() {
let timefmt = this.props.format;
if (!timefmt) {
timefmt = 'LLLL';
}
let time = parseServerTime(this.props.value);
return (
<span>{time.format(timefmt)}</span>
);
}
}

View File

@ -1,16 +0,0 @@
/*
* Some datetime parsing helper functions
*/
import moment from 'moment'
window.moment = moment;
export function parseServerTime(serverTime) {
const fmt = "YYYY-MM-DDTHH:mm:ss.SSSSSSSSZ"; // S was 4 byte short
return moment(serverTime, fmt);
}

View File

@ -1,16 +0,0 @@
import React from 'react'
import moment from 'moment'
export default class RelativeTimestamp extends React.Component {
render() {
const tsMs = this.props.value / 1000.0 / 1000.0; // nano -> micro -> milli
const now = moment.utc()
const rel = now.subtract(tsMs, 'ms');
return (
<span>{rel.fromNow(this.props.suffix)}</span>
);
}
}

View File

@ -1,80 +0,0 @@
import moment from 'moment'
import React from 'react'
export default class RelativeTime extends React.Component {
// Local state updates, to trigger a rerender
// every second for time updates.
componentDidMount() {
this.timer = setInterval(() => {
this.setState({
now: Date.now()
})
}, 1000);
}
// Stop timer
componentWillUnmount() {
clearInterval(this.timer);
}
// Helper: Assert time is an instance of moment
getTime() {
if (!this.props.value) {
return false;
}
let time = false;
if (this.props.value instanceof moment) {
time = this.props.value;
} else {
time = moment.utc(this.props.value);
}
return time
}
// Time can be capped, if we are handling a past
// or future event:
capTime(time) {
const now = moment.utc();
if (this.props.pastEvent && time.isAfter(now)) {
return now;
}
if (this.props.futureEvent && time.isBefore(now)) {
return now;
}
return time;
}
render() {
let time = this.getTime();
if (!time) {
return null; // Well, nothing to do here
}
time = this.capTime(time);
// A few seconds ago / in a few seconds can be replaced
// with 'just now'.
// fuzzyNow can be set as a threshold of seconds
if (this.props.fuzzyNow) {
const now = moment.utc();
if (Math.abs(now - time) / 1000.0 < this.props.fuzzyNow) {
return (
<span>just now</span>
);
}
}
return (
<span>{time.fromNow(this.props.suffix)}</span>
);
}
}

View File

@ -1,12 +0,0 @@
export const API_ERROR = '@birdseye/API_ERROR';
export function apiError(error) {
return {
type: API_ERROR,
error,
};
}
export function resetApiError() {
return apiError(null);
}

View File

@ -1,95 +0,0 @@
import _ from 'underscore'
import React from 'react'
import {connect} from 'react-redux'
import {resetApiError} from './actions'
import {infoFromError} from './utils'
class ErrorsPage extends React.Component {
resetApiError() {
this.props.dispatch(resetApiError());
}
render() {
if (!this.props.error) {
return null;
}
let status = null;
if (this.props.error.response) {
status = this.props.error.response.status;
} else {
status = 600;
}
if (!status || (status != 429 && status < 500)) {
return null;
}
let body = null;
// Find affected routeserver
let rs = null;
const errorInfo = infoFromError(this.props.error);
if (errorInfo) {
const rsId = errorInfo.routeserver_id;
if (rsId !== null) {
rs = _.findWhere(this.props.routeservers, { id: rsId });
}
}
if (status == 429) {
body = (
<div className="error-message">
<p>Alice reached the request limit.</p>
<p>We suggest you try at a less busy time.</p>
</div>
);
} else {
let errorStatus = "";
if (this.props.error.response) {
errorStatus = " (got HTTP " + this.props.error.response.status + ")";
}
if (errorInfo) {
errorStatus = ` (got ${errorInfo.tag})`;
}
body = (
<div className="error-message">
<p>
Alice has trouble connecting to the API
{rs &&
<span> of <b>{rs.name}</b></span>}
{errorStatus}
.
</p>
<p>If this problem persist, we suggest you try again later.</p>
</div>
);
}
return (
<div className="error-notify">
<div className="error-dismiss">
<i className="fa fa-times-circle" aria-hidden="true"
onClick={() => this.resetApiError()}></i>
</div>
<div className="error-icon">
<i className="fa fa-times-circle" aria-hidden="true"></i>
</div>
{body}
</div>
);
}
}
export default connect(
(state) => ({
error: state.errors.error,
routeservers: state.routeservers.byId,
})
)(ErrorsPage);

View File

@ -1,17 +0,0 @@
import {API_ERROR} from './actions'
const initialState = {
error: null,
};
export default function reducer(state = initialState, action) {
switch(action.type) {
case API_ERROR:
return {error: action.error};
}
return state;
}

View File

@ -1,12 +0,0 @@
/*
* Helper: Get info from api error
*/
export const infoFromError = function(error) {
if (error.response && error.response.data && error.response.data.code) {
return error.response.data;
}
return null;
}

View File

@ -1,126 +0,0 @@
import _ from 'underscore'
import React from 'react'
import {connect} from 'react-redux'
import {push} from 'react-router-redux'
import {cloneFilters,
hasFilters}
from 'components/filters/state'
import {FILTER_GROUP_SOURCES,
FILTER_GROUP_ASNS,
FILTER_GROUP_COMMUNITIES,
FILTER_GROUP_EXT_COMMUNITIES,
FILTER_GROUP_LARGE_COMMUNITIES}
from './groups'
import {RouteserversSelect,
PeersFilterSelect,
CommunitiesSelect}
from './widgets'
/*
* Helper: Add and remove filter
*/
function _applyFilterValue(filters, group, value) {
let nextFilters = cloneFilters(filters);
nextFilters[group].filters.push({
value: value,
});
return nextFilters;
}
function _removeFilterValue(filters, group, value) {
const svalue = value.toString();
let nextFilters = cloneFilters(filters);
let groupFilters = nextFilters[group].filters;
nextFilters[group].filters = _.filter(groupFilters, (f) => {
return f.value.toString() !== svalue;
});
return nextFilters;
}
class FiltersEditor extends React.Component {
addFilter(group, value) {
let nextFilters = _applyFilterValue(
this.props.applied, group, value
);
this.props.dispatch(push(
this.props.makeLinkProps(Object.assign({}, this.props.link, {
filtersApplied: nextFilters,
}))
));
}
removeFilter(group, sourceId) {
let nextFilters = _removeFilterValue(
this.props.applied, group, sourceId
);
this.props.dispatch(push(
this.props.makeLinkProps(Object.assign({}, this.props.link, {
filtersApplied: nextFilters,
}))
));
}
render() {
if (!hasFilters(this.props.available) && !hasFilters(this.props.applied)) {
return null;
}
/*
*/
return (
<div className="card lookup-filters-editor">
<RouteserversSelect onChange={(value) => this.addFilter(FILTER_GROUP_SOURCES, value)}
onRemove={(value) => this.removeFilter(FILTER_GROUP_SOURCES, value)}
available={this.props.availableSources}
applied={this.props.appliedSources} />
<PeersFilterSelect onChange={(value) => this.addFilter(FILTER_GROUP_ASNS, value)}
onRemove={(value) => this.removeFilter(FILTER_GROUP_ASNS, value)}
available={this.props.availableAsns}
applied={this.props.appliedAsns} />
<CommunitiesSelect onChange={(group, value) => this.addFilter(group, value)}
onRemove={(group, value) => this.removeFilter(group, value)}
available={this.props.availableCommunities}
applied={this.props.appliedCommunities} />
</div>
);
}
}
export default connect(
(state, props) => ({
isLoading: state.lookup.isLoading,
link: props.linkProps,
available: props.filtersAvailable,
applied: props.filtersApplied,
availableSources: props.filtersAvailable[FILTER_GROUP_SOURCES].filters,
appliedSources: props.filtersApplied[FILTER_GROUP_SOURCES].filters,
availableAsns: props.filtersAvailable[FILTER_GROUP_ASNS].filters,
appliedAsns: props.filtersApplied[FILTER_GROUP_ASNS].filters,
availableCommunities: {
communities: props.filtersAvailable[FILTER_GROUP_COMMUNITIES].filters,
ext: props.filtersAvailable[FILTER_GROUP_EXT_COMMUNITIES].filters,
large: props.filtersAvailable[FILTER_GROUP_LARGE_COMMUNITIES].filters,
},
appliedCommunities: {
communities: props.filtersApplied[FILTER_GROUP_COMMUNITIES].filters,
ext: props.filtersApplied[FILTER_GROUP_EXT_COMMUNITIES].filters,
large: props.filtersApplied[FILTER_GROUP_LARGE_COMMUNITIES].filters,
},
})
)(FiltersEditor);

View File

@ -1,96 +0,0 @@
import {
FILTER_GROUP_SOURCES,
FILTER_GROUP_ASNS,
FILTER_GROUP_COMMUNITIES,
FILTER_GROUP_EXT_COMMUNITIES,
FILTER_GROUP_LARGE_COMMUNITIES,
} from './groups'
function _makeFilter(value) {
return {
name: "",
value: value,
cardinality: 1,
}
}
export function decodeFiltersSources(params) {
if (!params.sources) {
return []; // No params available
}
const sources = params.sources.split(",");
return sources.map((sid) => _makeFilter(sid));
}
export function decodeFiltersAsns(params) {
if (!params.asns) {
return []; // No params available
}
const asns = params.asns.split(",");
return asns.map((asn) => _makeFilter(parseInt(asn, 10)));
}
function _decodeCommunity(community) {
const parts = community.split(":");
return parts.map((p) => parseInt(p, 10));
}
function _decodeExtCommunity(community) {
return community.split(":");
}
export function decodeFiltersCommunities(params) {
if (!params.communities) {
return []; // No params available
}
const communities = params.communities.split(",");
return communities.map((c) => _makeFilter(_decodeCommunity(c)));
}
export function decodeFiltersExtCommunities(params) {
if (!params.ext_communities) {
return []; // No params available
}
const communities = params.ext_communities.split(",");
return communities.map((c) => _makeFilter(_decodeExtCommunity(c)));
}
export function decodeFiltersLargeCommunities(params) {
if (!params.large_communities) {
return []; // No params available
}
const communities = params.large_communities.split(",");
return communities.map((c) => _makeFilter(_decodeCommunity(c)));
}
export function encodeGroupInt(group) {
if (!group.filters.length) {
return "";
}
const values = group.filters.map((f) => f.value).join(",");
return `&${group.key}=${values}`;
}
export function encodeGroupCommunities(group) {
if (!group.filters.length) {
return "";
}
const values = group.filters.map((f) => f.value.join(":")).join(",");
return `&${group.key}=${values}`;
}
export function filtersUrlEncode(filters) {
let encoded = "";
encoded += encodeGroupInt(filters[FILTER_GROUP_SOURCES]);
encoded += encodeGroupInt(filters[FILTER_GROUP_ASNS]);
encoded += encodeGroupCommunities(filters[FILTER_GROUP_COMMUNITIES]);
encoded += encodeGroupCommunities(filters[FILTER_GROUP_EXT_COMMUNITIES]);
encoded += encodeGroupCommunities(filters[FILTER_GROUP_LARGE_COMMUNITIES]);
return encoded;
}

View File

@ -1,31 +0,0 @@
export const FILTER_KEY_SOURCES = "sources"
export const FILTER_KEY_ASNS = "asns"
export const FILTER_KEY_COMMUNITIES = "communities"
export const FILTER_KEY_EXT_COMMUNITIES = "ext_communities"
export const FILTER_KEY_LARGE_COMMUNITIES = "large_communities"
export const FILTER_GROUP_SOURCES = 0
export const FILTER_GROUP_ASNS = 1
export const FILTER_GROUP_COMMUNITIES = 2
export const FILTER_GROUP_EXT_COMMUNITIES = 3
export const FILTER_GROUP_LARGE_COMMUNITIES = 4
export function filtersEqual(a, b) {
return (a[FILTER_GROUP_SOURCES].filters.length ===
b[FILTER_GROUP_SOURCES].filters.length) &&
(a[FILTER_GROUP_ASNS].filters.length ===
b[FILTER_GROUP_ASNS].filters.length) &&
(a[FILTER_GROUP_COMMUNITIES].filters.length ===
b[FILTER_GROUP_COMMUNITIES].filters.length) &&
(a[FILTER_GROUP_EXT_COMMUNITIES].filters.length ===
b[FILTER_GROUP_EXT_COMMUNITIES].filters.length) &&
(a[FILTER_GROUP_LARGE_COMMUNITIES].filters.length ===
b[FILTER_GROUP_LARGE_COMMUNITIES].filters.length);
}

View File

@ -1,161 +0,0 @@
import _ from 'underscore'
import {FILTER_GROUP_SOURCES,
FILTER_GROUP_ASNS,
FILTER_GROUP_COMMUNITIES,
FILTER_GROUP_EXT_COMMUNITIES,
FILTER_GROUP_LARGE_COMMUNITIES}
from './groups'
import {decodeFiltersSources,
decodeFiltersAsns,
decodeFiltersCommunities,
decodeFiltersExtCommunities,
decodeFiltersLargeCommunities}
from 'components/filters/encoding'
export const initializeFilterState = () => ([
{"key": "sources", "filters": []},
{"key": "asns", "filters": []},
{"key": "communities", "filters": []},
{"key": "ext_communities", "filters": []},
{"key": "large_communities", "filters": []},
]);
export function cloneFilters(filters) {
const nextFilters = [
Object.assign({}, filters[FILTER_GROUP_SOURCES]),
Object.assign({}, filters[FILTER_GROUP_ASNS]),
Object.assign({}, filters[FILTER_GROUP_COMMUNITIES]),
Object.assign({}, filters[FILTER_GROUP_EXT_COMMUNITIES]),
Object.assign({}, filters[FILTER_GROUP_LARGE_COMMUNITIES]),
];
nextFilters[FILTER_GROUP_SOURCES].filters =
[...nextFilters[FILTER_GROUP_SOURCES].filters];
nextFilters[FILTER_GROUP_ASNS].filters =
[...nextFilters[FILTER_GROUP_ASNS].filters];
nextFilters[FILTER_GROUP_COMMUNITIES].filters =
[...nextFilters[FILTER_GROUP_COMMUNITIES].filters];
nextFilters[FILTER_GROUP_EXT_COMMUNITIES].filters =
[...nextFilters[FILTER_GROUP_EXT_COMMUNITIES].filters];
nextFilters[FILTER_GROUP_LARGE_COMMUNITIES].filters =
[...nextFilters[FILTER_GROUP_LARGE_COMMUNITIES].filters];
return nextFilters;
}
/*
* Decode filters applied from params
*/
export function decodeFiltersApplied(params) {
const groups = initializeFilterState();
groups[FILTER_GROUP_SOURCES].filters = decodeFiltersSources(params);
groups[FILTER_GROUP_ASNS].filters = decodeFiltersAsns(params);
groups[FILTER_GROUP_COMMUNITIES].filters = decodeFiltersCommunities(params);
groups[FILTER_GROUP_EXT_COMMUNITIES].filters = decodeFiltersExtCommunities(params);
groups[FILTER_GROUP_LARGE_COMMUNITIES].filters = decodeFiltersLargeCommunities(params);
return groups;
}
/*
* Merge filters
*/
function _mergeFilters(a, b) {
let groups = initializeFilterState();
let setCmp = [];
setCmp[FILTER_GROUP_SOURCES] = cmpFilterValue;
setCmp[FILTER_GROUP_ASNS] = cmpFilterValue;
setCmp[FILTER_GROUP_COMMUNITIES] = cmpFilterCommunity;
setCmp[FILTER_GROUP_EXT_COMMUNITIES] = cmpFilterCommunity;
setCmp[FILTER_GROUP_LARGE_COMMUNITIES] = cmpFilterCommunity;
for (const i in groups) {
groups[i].filters = mergeFilterSet(setCmp[i], a[i].filters, b[i].filters);
}
return groups;
}
export function mergeFilters(a, ...other) {
let result = cloneFilters(a);
for (const filters of other) {
result = _mergeFilters(result, cloneFilters(filters));
}
return result;
}
/*
* Merge list of filters
*/
function mergeFilterSet(inSet, a, b) {
let result = a;
for (const f of b) {
const present = inSet(result, f);
if (present) {
// Update filter cardinality
// present.cardinality = Math.max(f.cardinality, present.cardinality);
present.cardinality += f.cardinality;
continue;
}
result.push(f);
}
return result;
}
/*
* Does a single group have any filters?
*/
export function groupHasFilters(group) {
return group.filters.length > 0;
}
/*
* Filters set compare
*/
function cmpFilterValue(set, filter) {
for (const f of set) {
if(f.value == filter.value) {
return f;
}
}
return null;
}
function cmpFilterCommunity(set, filter) {
for (const f of set) {
let match = true;
for (const i in f.value) {
if (f.value[i] != filter.value[i]) {
match = false;
break;
}
}
if (match) {
return f;
}
}
return null;
}
/*
* Do we have filters in general?
*/
export function hasFilters(groups) {
for (const g of groups) {
if (groupHasFilters(g)) {
return true;
}
}
return false;
}

View File

@ -1,318 +0,0 @@
import _ from 'underscore'
import React from 'react'
import {connect} from 'react-redux'
import CommunityLabel
from 'components/routeservers/communities/label'
import {makeReadableCommunity}
from 'components/routeservers/communities/utils'
import {FILTER_GROUP_COMMUNITIES,
FILTER_GROUP_EXT_COMMUNITIES,
FILTER_GROUP_LARGE_COMMUNITIES}
from './groups'
/*
* Add a title to the widget, if something needs to be rendered
*/
const withTitle = (title) => (Widget) => (class WidgetWithTitle extends Widget {
render() {
const result = super.render();
if (result == null) {
return null;
}
return (
<div className="filter-editor-widget">
<h2>{title}</h2>
{result}
</div>
)
}
});
class _RouteserversSelect extends React.Component {
render() {
// Nothing to do if we don't have filters
if (this.props.available.length == 0 &&
this.props.applied.length == 0) {
return null;
}
// Sort filters available
const sortedFiltersAvailable = this.props.available.sort((a, b) => {
return a.value - b.value;
});
// For now we allow only one applied
const appliedFilter = this.props.applied[0] || {value: undefined};
if (appliedFilter.value !== undefined) {
// Just render this, with a button for removal
return (
<table className="select-ctrl">
<tbody>
<tr>
<td className="select-container">
{appliedFilter.name}
</td>
<td>
<button className="btn btn-remove"
onClick={() => this.props.onRemove(appliedFilter.value)}>
<i className="fa fa-times" />
</button>
</td>
</tr>
</tbody>
</table>
);
}
// Build options
const optionsAvailable = sortedFiltersAvailable.map((filter) => {
return <option key={filter.value} value={filter.value}>
{filter.name} ({filter.cardinality})
</option>;
});
return (
<table className="select-ctrl">
<tbody>
<tr>
<td className="select-container">
<select className="form-control"
onChange={(e) => this.props.onChange(e.target.value)}
value={appliedFilter.value}>
<option value="none" className="options-title">Show results from RS...</option>
{optionsAvailable}
</select>
</td>
</tr>
</tbody>
</table>
);
}
}
export const RouteserversSelect = withTitle("Route Server")(_RouteserversSelect);
class _PeersFilterSelect extends React.Component {
render() {
// Nothing to do if we don't have filters
if (this.props.available.length == 0 &&
this.props.applied.length == 0) {
return null;
}
// Sort filters available
const sortedFiltersAvailable = this.props.available.sort((a, b) => {
return a.name.localeCompare(b.name);
});
// For now we allow only one applied
const appliedFilter = this.props.applied[0] || {value: undefined};
if (appliedFilter.value !== undefined) {
// Just render this, with a button for removal
return (
<table className="select-ctrl">
<tbody>
<tr>
<td className="select-container">
{appliedFilter.name}
</td>
<td>
<button className="btn btn-remove"
onClick={() => this.props.onRemove(appliedFilter.value)}>
<i className="fa fa-times" />
</button>
</td>
</tr>
</tbody>
</table>
);
}
// Build options
const optionsAvailable = sortedFiltersAvailable.map((filter) => {
return <option key={filter.value} value={filter.value}>
{filter.name}, AS{filter.value} ({filter.cardinality})
</option>;
});
return (
<table className="select-ctrl">
<tbody>
<tr>
<td className="select-container">
<select className="form-control"
onChange={(e) => this.props.onChange(e.target.value)}
value={appliedFilter.value}>
<option className="options-title"
value="none">Show only results from AS...</option>
{optionsAvailable}
</select>
</td>
</tr>
</tbody>
</table>
);
}
}
export const PeersFilterSelect = withTitle("Neighbor")(_PeersFilterSelect);
class __CommunitiesSelect extends React.Component {
propagateChange(value) {
// Decode value
const [group, community] = value.split(",", 2);
const filterValue = community.split(":"); // spew.
this.props.onChange(group, filterValue);
}
render() {
// Nothing to do if we don't have filters
const hasAvailable = this.props.available.communities.length > 0 ||
this.props.available.ext.length > 0 ||
this.props.available.large.length > 0;
const hasApplied = this.props.applied.communities.length > 0 ||
this.props.applied.ext.length > 0 ||
this.props.applied.large.length > 0;
if (!hasApplied && !hasAvailable) {
return null; // nothing to do here.
}
const communitiesAvailable = this.props.available.communities.sort((a, b) => {
return (a.value[0] - b.value[0]) * 100000 + (a.value[1] - b.value[1]);
});
const extCommunitiesAvailable = this.props.available.ext.sort((a, b) => {
return (a.value[1] - b.value[1]) * 100000 + (a.value[2] - b.value[2]);
});
// const extCommunitiesAvailable = []; // They don't work. for now.
const largeCommunitiesAvailable = this.props.available.large.sort((a, b) => {
return (a.value[0] - b.value[0]) * 10000000000 +
(a.value[1] - b.value[1]) * 100000 +
(a.value[2] - b.value[2]);
});
const makeOption = (group, name, filter, cls) => {
const value = `${group},${filter.value.join(":")}`; // yikes.
return (
<option key={filter.value} value={value} className={cls}>
{filter.name} {name} ({filter.cardinality})
</option>
);
}
const communitiesOptions = communitiesAvailable.map((filter) => {
const name = makeReadableCommunity(this.props.communities, filter.value);
const cls = `select-bgp-community-0-${filter.value[0]} ` +
`select-bgp-community-1-${filter.value[1]}`;
return makeOption(FILTER_GROUP_COMMUNITIES, name, filter, cls);
});
const extCommunitiesOptions = extCommunitiesAvailable.map((filter) => {
const name = makeReadableCommunity(this.props.communities, filter.value);
const cls = `select-bgp-community-0-${filter.value[0]} ` +
`select-bgp-community-1-${filter.value[1]} ` +
`select-bgp-community-2-${filter.value[2]}`;
return makeOption(FILTER_GROUP_EXT_COMMUNITIES, name, filter, cls);
});
const largeCommunitiesOptions = largeCommunitiesAvailable.map((filter) => {
const name = makeReadableCommunity(this.props.communities, filter.value);
const cls = `select-bgp-community-0-${filter.value[0]} ` +
`select-bgp-community-1-${filter.value[1]} ` +
`select-bgp-community-2-${filter.value[2]}`;
return makeOption(FILTER_GROUP_LARGE_COMMUNITIES, name, filter, cls);
});
// Render list of applied communities
const makeCommunity = (group, name, filter) => (
<tr key={filter.value}>
<td className="select-container">
<CommunityLabel community={filter.value} />
</td>
<td>
<button className="btn btn-remove"
onClick={() => this.props.onRemove(group, filter.value)}>
<i className="fa fa-times" />
</button>
</td>
</tr>
);
const appliedCommunities = this.props.applied.communities.map((filter) => {
const name = makeReadableCommunity(this.props.communities, filter.value);
return makeCommunity(FILTER_GROUP_COMMUNITIES, name, filter);
});
const appliedExtCommunities = this.props.applied.ext.map((filter) => {
const name = makeReadableCommunity(this.props.communities, filter.value);
return makeCommunity(FILTER_GROUP_EXT_COMMUNITIES, name, filter);
});
const appliedLargeCommunities = this.props.applied.large.map((filter) => {
const name = makeReadableCommunity(this.props.communities, filter.value);
return makeCommunity(FILTER_GROUP_LARGE_COMMUNITIES, name, filter);
});
return (
<table className="select-ctrl">
<tbody>
{appliedCommunities}
{appliedExtCommunities}
{appliedLargeCommunities}
{hasAvailable &&
<tr>
<td className="select-container" colSpan="2">
<select value="none"
onChange={(e) => this.propagateChange(e.target.value)}
className="form-control">
<option value="none" className="options-title">
Select BGP Communities to match...
</option>
{communitiesOptions.length > 0 &&
<optgroup label="Communities">
{communitiesOptions}
</optgroup>}
{extCommunitiesOptions.length > 0 &&
<optgroup label="Ext. Communities">
{extCommunitiesOptions}
</optgroup>}
{largeCommunitiesOptions.length > 0 &&
<optgroup label="Large Communities">
{largeCommunitiesOptions}
</optgroup>}
</select>
</td>
</tr>}
</tbody>
</table>
);
}
}
const _CommunitiesSelect = connect(
(state) => ({
communities: state.config.bgp_communities,
})
)(__CommunitiesSelect);
export const CommunitiesSelect = withTitle("BGP Communities")(_CommunitiesSelect);

View File

@ -1,19 +0,0 @@
import React from 'react'
import Spinner from 'react-spinkit'
export default class Indicator extends React.Component {
render() {
if (this.props.show == false) {
return null;
}
return (
<div className="loading-indicator">
<Spinner spinnerName="circle" />
</div>
);
}
}

View File

@ -1,83 +0,0 @@
/*
* Prefix lookup actions
*/
import axios from 'axios'
import {filtersUrlEncode} from 'components/filters/encoding'
export const SET_LOOKUP_QUERY_VALUE = '@lookup/SET_LOOKUP_QUERY_VALUE';
export const LOAD_RESULTS_REQUEST = '@lookup/LOAD_RESULTS_REQUEST';
export const LOAD_RESULTS_SUCCESS = '@lookup/LOAD_RESULTS_SUCCESS';
export const LOAD_RESULTS_ERROR = '@lookup/LOAD_RESULTS_ERROR';
export const RESET = "@lookup/RESET";
// Action creators
export function setLookupQueryValue(value) {
return {
type: SET_LOOKUP_QUERY_VALUE,
payload: {
value: value,
}
}
}
export function loadResultsRequest(query) {
return {
type: LOAD_RESULTS_REQUEST,
payload: {
query: query
}
}
}
export function loadResultsSuccess(query, results) {
return {
type: LOAD_RESULTS_SUCCESS,
payload: {
query: query,
results: results
}
}
}
export function loadResultsError(query, error) {
return {
type: LOAD_RESULTS_ERROR,
payload: {
query: query,
error: error
}
}
}
export function loadResults(query, filters, pageImported=0, pageFiltered=0) {
return (dispatch) => {
dispatch(loadResultsRequest(query));
// Build querystring
const q = `q=${query}&page_filtered=${pageFiltered}&page_imported=${pageImported}`;
const f = filtersUrlEncode(filters);
axios.get(`/api/v1/lookup/prefix?${q}${f}`)
.then(
(res) => {
dispatch(loadResultsSuccess(query, res.data));
},
(error) => {
dispatch(loadResultsError(query, error));
});
}
}
export function reset() {
return {
type: RESET,
payload: {}
}
}

View File

@ -1,122 +0,0 @@
/*
* Alice (Prefix-)Lookup
*/
import {debounce} from 'underscore'
import React from 'react'
import {connect} from 'react-redux'
import {replace} from 'react-router-redux'
import {setLookupQueryValue} from './actions'
import Content from 'components/content'
import LookupResults from './results'
import SearchInput from 'components/search-input'
import QuickLinks from 'components/routeservers/routes/quick-links'
class LookupHelp extends React.Component {
render() {
if(this.props.query != '') {
return null;
}
return (
<div className="lookup-help">
<h3>Did you know?</h3>
<p>You can search for</p>
<ul>
<li><b>Prefixes</b>,</li>
<li><b>Peers</b> by entering their name and</li>
<li><b>ASNs</b> by prefixing them with 'AS'</li>
</ul>
<p>Just start typing!</p>
</div>
);
}
}
class Lookup extends React.Component {
constructor(props) {
super(props);
this.debouncedDispatch = debounce(this.props.dispatch, 400);
}
doLookup(q) {
// Make path
const destination = {
pathname: "/search",
search: `?q=${q}`
};
// Set lookup params
this.props.dispatch(setLookupQueryValue(q));
this.debouncedDispatch(replace(destination));
}
componentDidMount() {
// this is yucky but the debounced
// search input seems to kill the ref=
let input = document.getElementById('lookup-search-input');
input.focus();
let value = input.value;
input.value = "";
input.value = value;
}
render() {
return (
<div className="lookup-container">
<div className="card">
<h2><Content id="lookup.title">Search on all route servers</Content></h2>
<SearchInput
ref="searchInput"
id="lookup-search-input"
value={this.props.queryValue}
placeholder="Search for Prefixes, Peers or ASNs on all Route Servers"
onChange={(e) => this.doLookup(e.target.value)} />
</div>
<QuickLinks routes={this.props.routes}
excludeNotExported={true} />
<LookupHelp query={this.props.query} />
<LookupResults />
</div>
)
}
}
export default connect(
(state) => {
const lookup = state.lookup;
return {
query: state.lookup.query,
queryValue: state.lookup.queryValue,
isLoading: state.lookup.isLoading,
error: state.lookup.error,
routes: {
filtered: {
loading: lookup.isLoading,
totalResults: lookup.totalRoutesFiltered,
},
received: {
loading: lookup.isLoading,
totalResults: lookup.totalRoutesImported,
},
notExported: {
loading: false,
totalResults: 0,
}
}
}
}
)(Lookup);

View File

@ -1,71 +0,0 @@
import React from 'react'
import {connect} from 'react-redux'
import PageHeader from 'components/page-header'
import Lookup from 'components/lookup'
import LookupSummary from 'components/lookup/results-summary'
import FiltersEditor from 'components/filters/editor'
import Content from 'components/content'
import {makeLinkProps} from './state'
class _LookupView extends React.Component {
render() {
if (this.props.enabled == false) {
return null;
}
return (
<div className="lookup-container details-main">
<div className="col-main col-lg-9 col-md-12">
<Lookup />
</div>
<div className="col-aside-details col-lg-3 col-md-12">
<LookupSummary />
<FiltersEditor makeLinkProps={makeLinkProps}
linkProps={this.props.linkProps}
filtersApplied={this.props.filtersApplied}
filtersAvailable={this.props.filtersAvailable} />
</div>
</div>
);
}
}
const LookupView = connect(
(state) => {
return {
enabled: state.config.prefix_lookup_enabled,
filtersAvailable: state.lookup.filtersAvailable,
filtersApplied: state.lookup.filtersApplied,
linkProps: {
anchor: "filtered",
page: 0,
pageReceived: 0, // Reset pagination on filter change
pageFiltered: 0,
query: state.lookup.query,
filtersApplied: state.lookup.filtersApplied,
routing: state.routing.locationBeforeTransitions,
},
}
}
)(_LookupView);
export default class LookupPage extends React.Component {
render() {
return (
<div className="welcome-page">
<PageHeader></PageHeader>
<p></p>
<LookupView />
</div>
);
}
}

View File

@ -1,218 +0,0 @@
/*
* Routes Lookup Pagination
* ------------------------
*
* This code contains a lot of overlap with the pagination
* code in components/routeservers/routes/pagination.jsx
*
* Because time right now is at the essence, we will use
* this as a base and generalize the pagionation code later.
* (I'm so sorry :/)
*
* TODO: Refactor an generalize pagination links
*/
import React from 'react'
import {connect} from 'react-redux'
import {Link} from 'react-router'
import {push} from 'react-router-redux'
import {makeLinkProps} from './state'
const PageLink = function(props) {
const linkPage = parseInt(props.page, 10);
const label = props.label || (linkPage + 1);
if (props.disabled) {
return <span>{label}</span>;
}
const linkTo = makeLinkProps(props);
return (
<Link to={linkTo}>{label}</Link>
);
}
const PageSelect = (props) => {
const {pages, options} = props;
if (pages.length == 0) {
return null; // nothing to do here.
}
const items = pages.map((p) => (
<option key={p} value={p}>{p + 1}</option>
));
const active = props.page >= pages[0];
let itemClassName = "";
if (active) {
itemClassName = "active";
}
return (
<li className={itemClassName}>
<select className="form-control pagination-select"
value={props.page}
onChange={(e) => props.onChange(e.target.value) }>
{ props.page < pages[0] && <option value={pages[0]}>more...</option> }
{items}
</select>
</li>
);
}
class RoutesPaginatorView extends React.Component {
/*
* Create an array of page "ids" we can use to map our
* pagination items.
* Split result into items for direct link access and
* select for a dropdown like access.
*/
makePaginationPages(numPages) {
const MAX_ITEMS = 12;
const pages = Array.from(Array(numPages), (_, i) => i);
return {
items: pages.slice(0, MAX_ITEMS),
select: pages.slice(MAX_ITEMS)
}
}
/*
* Dispatch navigation event and go to page
*/
navigateToPage(page) {
const linkProps = makeLinkProps(Object.assign({}, this.props, {
page: page
}));
this.props.dispatch(push(linkProps));
}
render() {
if (this.props.totalPages <= 1) {
return null; // Nothing to paginate
}
const pages = this.makePaginationPages(this.props.totalPages);
const pageLinks = pages.items.map((p) => {
let className = "";
if (p == this.props.page) {
className = "active";
}
return (
<li key={p} className={className}>
<PageLink page={p}
routing={this.props.routing}
anchor={this.props.anchor}
loadNotExported={this.props.loadNotExported}
filtersApplied={this.props.filtersApplied}
pageReceived={this.props.pageReceived}
pageFiltered={this.props.pageFiltered}
pageNotExported={this.props.pageNotExported} />
</li>
);
});
let prevLinkClass = "";
if (this.props.page == 0) {
prevLinkClass = "disabled";
}
let nextLinkClass = "";
if (this.props.page + 1 == this.props.totalPages) {
nextLinkClass = "disabled";
}
return (
<nav aria-label="Routes Pagination">
<ul className="pagination">
<li className={prevLinkClass}>
<PageLink page={this.props.page - 1}
label="&laquo;"
disabled={this.props.page == 0}
routing={this.props.routing}
anchor={this.props.anchor}
loadNotExported={this.props.loadNotExported}
filtersApplied={this.props.filtersApplied}
pageReceived={this.props.pageReceived}
pageFiltered={this.props.pageFiltered}
pageNotExported={this.props.pageNotExported} />
</li>
{pageLinks}
<PageSelect pages={pages.select}
page={this.props.page}
onChange={(page) => this.navigateToPage(page)} />
{pages.select.length == 0 &&
<li className={nextLinkClass}>
<PageLink page={this.props.page + 1}
disabled={this.props.page + 1 == this.props.totalPages}
label="&raquo;"
routing={this.props.routing}
anchor={this.props.anchor}
loadNotExported={this.props.loadNotExported}
filtersApplied={this.props.filtersApplied}
pageReceived={this.props.pageReceived}
pageFiltered={this.props.pageFiltered}
pageNotExported={this.props.pageNotExported} />
</li>}
</ul>
</nav>
);
}
}
export const RoutesPaginator = connect(
(state) => ({
filtersApplied: state.lookup.filtersApplied,
pageReceived: state.lookup.pageImported,
pageFiltered: state.lookup.pageFiltered,
pageNotExported: 0,
routing: state.routing.locationBeforeTransitions
})
)(RoutesPaginatorView);
export class RoutesPaginationInfo extends React.Component {
render() {
const totalResults = this.props.totalResults;
const perPage = this.props.pageSize;
const start = this.props.page * perPage + 1;
const end = Math.min(start + perPage - 1, totalResults);
if (this.props.totalPages <= 1) {
let routes = "route";
if (totalResults > 1) {
routes = "routes";
}
return (
<div className="routes-pagination-info pull-right">
Showing <b>all</b> of <b>{totalResults}</b> {routes}
</div>
);
}
return (
<div className="routes-pagination-info pull-right">
Showing <b>{start} - {end}</b> of <b>{totalResults}</b> total routes
</div>
);
}
}

View File

@ -1,168 +0,0 @@
/*
* Prefix Lookup Reducer
*/
import {
LOAD_RESULTS_REQUEST,
LOAD_RESULTS_SUCCESS,
LOAD_RESULTS_ERROR,
SET_LOOKUP_QUERY_VALUE,
RESET,
} from './actions'
import {cloneFilters,
decodeFiltersApplied,
initializeFilterState}
from 'components/filters/state'
const LOCATION_CHANGE = '@@router/LOCATION_CHANGE'
const initialState = {
query: "",
queryValue: "",
anchor: "",
filtersAvailable: initializeFilterState(),
filtersApplied: initializeFilterState(),
routesImported: [],
routesFiltered: [],
error: null,
queryDurationMs: 0.0,
cachedAt: false,
cacheTtl: false,
pageImported: 0,
pageFiltered: 0,
pageSizeImported: 0,
pageSizeFiltered: 0,
totalPagesImported: 0,
totalPagesFiltered: 0,
totalRoutesImported: 0,
totalRoutesFiltered: 0,
isLoading: false
}
/*
* Helper: Get scroll anchor from hash
*/
const getScrollAnchor = function(hash) {
return hash.substr(hash.indexOf('-')+1);
}
/*
* Restore lookup query state from location paramenters
*/
const _handleLocationChange = function(state, payload) {
const params = payload.query;
const query = params["q"] || "";
const pageFiltered = parseInt(params["pf"] || 0, 10);
const pageReceived = parseInt(params["pr"] || 0, 10);
const anchor = getScrollAnchor(payload.hash);
// Restore filters applied from location
const filtersApplied = decodeFiltersApplied(params);
return Object.assign({}, state, {
anchor: anchor,
query: query,
queryValue: query,
pageImported: pageReceived,
pageFiltered: pageFiltered,
filtersApplied: filtersApplied,
});
}
/*
* Receive query results
*/
const _loadQueryResult = function(state, payload) {
const results = payload.results;
const imported = results.imported;
const filtered = results.filtered;
const api = results.api;
return Object.assign({}, state, {
isLoading: false,
// Cache Status
cachedAt: api.cache_status.cached_at, // I don't like this style.
cacheTtl: api.ttl,
// Routes
routesImported: imported.routes,
routesFiltered: filtered.routes,
// Filters available
filtersAvailable: results.filters_available,
filtersApplied: results.filters_applied,
// Pagination
pageImported: imported.pagination.page,
pageFiltered: filtered.pagination.page,
pageSizeImported: imported.pagination.page_size,
pageSizeFiltered: filtered.pagination.page_size,
totalPagesImported: imported.pagination.total_pages,
totalPagesFiltered: filtered.pagination.total_pages,
totalRoutesImported: imported.pagination.total_results,
totalRoutesFiltered: filtered.pagination.total_results,
// Statistics
queryDurationMs: results.request_duration_ms,
totalRoutes: imported.pagination.total_results + filtered.pagination.total_results
});
}
export default function reducer(state=initialState, action) {
switch(action.type) {
case LOCATION_CHANGE:
return _handleLocationChange(state, action.payload);
case SET_LOOKUP_QUERY_VALUE:
return Object.assign({}, state, {
queryValue: action.payload.value,
});
case LOAD_RESULTS_REQUEST:
return Object.assign({}, state, {
query: action.payload.query,
queryValue: action.payload.query,
isLoading: true
});
case LOAD_RESULTS_SUCCESS:
if (state.query != action.payload.query) {
return state;
}
return _loadQueryResult(state, action.payload);
case LOAD_RESULTS_ERROR:
if (state.query != action.payload.query) {
return state;
}
return Object.assign({}, state, initialState, {
query: action.payload.query,
queryValue: action.payload.query,
error: action.payload.error
});
case RESET:
return Object.assign({}, state, initialState);
}
return state;
}

View File

@ -1,88 +0,0 @@
import React from 'react'
import {connect} from 'react-redux'
import moment from 'moment'
import RelativeTime from 'components/datetime/relative'
const RefreshState = function(props) {
if (!props.cachedAt || !props.cacheTtl) {
return null;
}
const cachedAt = moment.utc(props.cachedAt);
const cacheTtl = moment.utc(props.cacheTtl);
if (cacheTtl.isBefore(moment.utc())) {
// This means cache is currently being rebuilt
return (
<li>
Routes cache was built <b><RelativeTime fuzzyNow={5}
pastEvent={true}
value={cachedAt} /> </b>
and is currently being refreshed.
</li>
);
}
return (
<li>
Routes cache was built <b><RelativeTime fuzzyNow={5} value={cachedAt} /> </b>
and will be refreshed <b><RelativeTime value={cacheTtl} futureEvent={true} /></b>.
</li>
);
}
class ResultsBox extends React.Component {
render() {
if (this.props.query == '') {
return null;
}
if (this.props.isLoading) {
return null;
}
const queryDuration = this.props.queryDuration.toFixed(2);
const cachedAt = this.props.cachedAt;
const cacheTtl = this.props.cacheTtl;
return (
<div className="card">
<div className="lookup-result-summary">
<ul>
<li>
Found <b>{this.props.totalImported}</b> received
and <b>{this.props.totalFiltered}</b> filtered routes.
</li>
<li>Query took <b>{queryDuration} ms</b> to complete.</li>
<RefreshState cachedAt={this.props.cachedAt}
cacheTtl={this.props.cacheTtl} />
</ul>
</div>
</div>
);
}
}
export default connect(
(state) => {
return {
isLoading: state.lookup.isLoading,
totalImported: state.lookup.totalRoutesImported,
totalFiltered: state.lookup.totalRoutesFiltered,
cachedAt: state.lookup.cachedAt,
cacheTtl: state.lookup.cacheTtl,
queryDuration: state.lookup.queryDurationMs
}
}
)(ResultsBox)

View File

@ -1,215 +0,0 @@
import _ from 'underscore'
import React from 'react'
import {connect} from 'react-redux'
import {Link} from 'react-router'
import {replace} from 'react-router-redux'
import {filtersEqual} from 'components/filters/groups'
import FilterReason
from 'components/routeservers/communities/filter-reason'
import NoexportReason
from 'components/routeservers/communities/noexport-reason'
import BgpAttributesModal
from 'components/routeservers/routes/bgp-attributes-modal'
import LoadingIndicator
from 'components/loading-indicator/small'
import ResultsTable from './table'
import {loadResults, reset} from './actions'
import {RoutesPaginator,
RoutesPaginationInfo} from './pagination'
import {RoutesHeader}
from 'components/routeservers/routes/view'
const ResultsView = function(props) {
if(!props.routes) {
return null;
}
if(props.routes.length == 0) {
return null;
}
const type = props.type;
return (
<div className={`card routes-view routes-${type}`}>
<div className="row">
<div className="col-md-6 routes-header-container">
<RoutesHeader type={type} />
</div>
<div className="col-md-6">
<RoutesPaginationInfo page={props.page}
pageSize={props.pageSize}
totalPages={props.totalPages}
totalResults={props.totalResults} />
</div>
</div>
<ResultsTable routes={props.routes}
displayReasons={props.displayReasons} />
<center>
<RoutesPaginator page={props.page} totalPages={props.totalPages}
queryParam={props.query}
anchor={type} />
</center>
</div>
);
}
class NoResultsView extends React.Component {
render() {
if (!this.props.show) {
return null;
}
return (
<p className="lookup-no-results text-info card">
No prefixes could be found for <b>{this.props.query}</b>
</p>
);
}
}
const NoResultsFallback = connect(
(state) => {
let total = state.lookup.totalRoutes;
let query = state.lookup.query;
let isLoading = state.lookup.isLoading;
let show = false;
if (total == 0 && query != "" && isLoading == false) {
show = true;
}
return {
show: show,
query: state.lookup.query
}
}
)(NoResultsView);
class LookupResults extends React.Component {
dispatchLookup() {
const query = this.props.query;
const pageImported = this.props.pagination.imported.page;
const pageFiltered = this.props.pagination.filtered.page;
const filters = this.props.filtersApplied;
if (query == "") {
// Dispatch reset and transition to main page
this.props.dispatch(reset());
this.props.dispatch(replace("/"));
} else {
this.props.dispatch(
loadResults(query, filters, pageImported, pageFiltered)
);
}
}
componentDidMount() {
// Dispatch query
this.dispatchLookup();
}
componentDidUpdate(prevProps) {
if(this.props.query != prevProps.query ||
this.props.pagination.filtered.page != prevProps.pagination.filtered.page ||
this.props.pagination.imported.page != prevProps.pagination.imported.page ||
!filtersEqual(this.props.filtersApplied, prevProps.filtersApplied)) {
this.dispatchLookup();
}
}
render() {
if(this.props.isLoading) {
return <LoadingIndicator />;
}
const ref = this.refs[this.props.anchor];
if(ref) {
ref.scrollIntoView();
}
const filteredRoutes = this.props.routes.filtered;
const importedRoutes = this.props.routes.imported;
return (
<div className="lookup-results">
<BgpAttributesModal />
<NoResultsFallback />
<a ref="filtered" name="routes-filtered" />
<ResultsView type="filtered"
routes={filteredRoutes}
page={this.props.pagination.filtered.page}
pageSize={this.props.pagination.filtered.pageSize}
totalPages={this.props.pagination.filtered.totalPages}
totalResults={this.props.pagination.filtered.totalResults}
query={this.props.query}
displayReasons="filtered" />
<a ref="received" name="routes-received" />
<ResultsView type="received"
page={this.props.pagination.imported.page}
pageSize={this.props.pagination.imported.pageSize}
totalPages={this.props.pagination.imported.totalPages}
totalResults={this.props.pagination.imported.totalResults}
query={this.props.query}
routes={importedRoutes} />
</div>
);
}
}
export default connect(
(state) => {
const filteredRoutes = state.lookup.routesFiltered;
const importedRoutes = state.lookup.routesImported;
return {
anchor: state.lookup.anchor,
routes: {
filtered: filteredRoutes,
imported: importedRoutes
},
pagination: {
filtered: {
page: state.lookup.pageFiltered,
pageSize: state.lookup.pageSizeFiltered,
totalPages: state.lookup.totalPagesFiltered,
totalResults: state.lookup.totalRoutesFiltered,
},
imported: {
page: state.lookup.pageImported,
pageSize: state.lookup.pageSizeImported,
totalPages: state.lookup.totalPagesImported,
totalResults: state.lookup.totalRoutesImported,
}
},
isLoading: state.lookup.isLoading,
query: state.lookup.query,
filtersApplied: state.lookup.filtersApplied,
}
}
)(LookupResults);

View File

@ -1,64 +0,0 @@
/*
* Manage state
*/
import {filtersUrlEncode} from 'components/filters/encoding'
import {FILTER_GROUP_SOURCES,
FILTER_GROUP_ASNS,
FILTER_GROUP_COMMUNITIES,
FILTER_GROUP_EXT_COMMUNITIES,
FILTER_GROUP_LARGE_COMMUNITIES} from 'components/filters/groups'
/*
* Maybe this can be customized and injected into
* the PageLink component.
*/
export function makeLinkProps(props) {
const linkPage = parseInt(props.page, 10);
let pr = props.pageReceived;
let pf = props.pageFiltered;
// This here can be surely more elegant.
switch(props.anchor) {
case "received":
pr = linkPage;
break;
case "filtered":
pf = linkPage;
break;
}
let pagination = "";
if (pr) {
pagination += `pr=${pr}&`;
}
if (pf) {
pagination += `pf=${pf}&`;
}
let filtering = "";
if (props.filtersApplied) {
filtering = filtersUrlEncode(props.filtersApplied);
}
const query = props.routing.query.q || "";
const search = `?${pagination}q=${query}${filtering}`;
let hash = null;
if (props.anchor) {
hash = `#routes-${props.anchor}`;
}
const linkTo = {
pathname: props.routing.pathname,
hash: hash,
search: search,
};
return linkTo;
}

View File

@ -1,129 +0,0 @@
/*
* Lookup Results Table
* --------------------
*/
import React from 'react'
import {connect} from 'react-redux'
import {Link} from 'react-router'
import {push} from 'react-router-redux'
import {_lookup,
ColDefault,
ColNetwork,
ColFlags,
ColAsPath} from 'components/routeservers/routes/route/column'
import {showBgpAttributes}
from 'components/routeservers/routes/bgp-attributes-modal-actions'
// Link Wrappers:
const ColLinkedNeighbor = function(props) {
const route = props.route;
const to = `/routeservers/${route.routeserver.id}/protocols/${route.neighbour.id}/routes`;
return (
<td>
<Link to={to}>{_lookup(props.route, props.column)}</Link>
</td>
);
}
const ColLinkedRouteserver = function(props) {
const route = props.route;
const to = `/routeservers/${route.routeserver.id}`;
return (
<td>
<Link to={to}>{_lookup(props.route, props.column)}</Link>
</td>
);
}
// Custom RouteColumn
const RouteColumn = function(props) {
const widgets = {
"network": ColNetwork,
"flags": ColFlags,
"bgp.as_path": ColAsPath,
"ASPath": ColAsPath,
"neighbour.description": ColLinkedNeighbor,
"neighbour.asn": ColLinkedNeighbor,
"routeserver.name": ColLinkedRouteserver
};
const rsId = props.route.routeserver.id;
const blackholes = props.blackholesMap[rsId] || [];
let Widget = widgets[props.column] || ColDefault;
return (
<Widget column={props.column} route={props.route}
displayReasons={props.displayReasons}
blackholes={blackholes}
onClick={props.onClick} />
);
}
class LookupRoutesTable extends React.Component {
showAttributesModal(route) {
this.props.dispatch(showBgpAttributes(route));
}
render() {
let routes = this.props.routes;
const routesColumns = this.props.routesColumns;
const routesColumnsOrder = this.props.routesColumnsOrder;
if (!routes || !routes.length) {
return null;
}
let routesView = routes.map((r,i) => {
return (
<tr key={i}>
{routesColumnsOrder.map(col => {
return (<RouteColumn key={col}
onClick={() => this.showAttributesModal(r)}
blackholesMap={this.props.blackholesMap}
column={col}
route={r}
displayReasons={this.props.displayReasons} />);
}
)}
</tr>
);
});
return (
<table className="table table-striped table-routes">
<thead>
<tr>
{routesColumnsOrder.map(col => <th key={col}>{routesColumns[col]}</th>)}
</tr>
</thead>
<tbody>
{routesView}
</tbody>
</table>
);
}
}
export default connect(
(state) => ({
blackholesMap: state.config.blackholes,
routesColumns: state.config.lookup_columns,
routesColumnsOrder: state.config.lookup_columns_order,
})
)(LookupRoutesTable);

View File

@ -1,70 +0,0 @@
/**
* Bootstrap Modal React Component
*
* @author Matthias Hannig <mha@ecix.net>
*/
import React from 'react'
export class Header extends React.Component {
render() {
return(
<div className="modal-header">
<button type="button"
className="close"
aria-label="Close"
onClick={this.props.onClickClose}>
<span aria-hidden="true">&times;</span></button>
{this.props.children}
</div>
);
}
}
export class Body extends React.Component {
render() {
return (
<div className="modal-body">
{this.props.children}
</div>
);
}
}
export class Footer extends React.Component {
render() {
return(
<div className="modal-footer">
{this.props.children}
</div>
);
}
}
export default class Modal extends React.Component {
render() {
if(!this.props.show) {
return null;
}
return (
<div className={this.props.className}>
<div className="modal modal-open modal-show fade in" role="dialog">
<div className="modal-dialog" role="document">
<div className="modal-content">
{this.props.children}
</div>
</div>
</div>
<div className="modal-backdrop fade in"
onClick={this.props.onClickBackdrop}></div>
</div>
);
}
}

View File

@ -1,12 +0,0 @@
import { combineReducers } from 'redux'
import bgpAttributesModalReducer
from 'components/routeservers/routes/bgp-attributes-modal-reducer'
export default combineReducers({
bgpAttributes: bgpAttributesModalReducer
});

View File

@ -1,14 +0,0 @@
import React from 'react'
export default class PageHeader extends React.Component {
render() {
return (
<div className="page-header">
{this.props.children}
</div>
)
}
}

View File

@ -1,156 +0,0 @@
/**
* Routeservers Actions
*/
import axios from 'axios'
import {apiError} from 'components/errors/actions'
export const LOAD_ROUTESERVERS_REQUEST = '@routeservers/LOAD_ROUTESERVERS_REQUEST';
export const LOAD_ROUTESERVERS_SUCCESS = '@routeservers/LOAD_ROUTESERVERS_SUCCESS';
export const LOAD_ROUTESERVERS_ERROR = '@routeservers/LOAD_ROUTESERVERS_ERROR';
export const LOAD_ROUTESERVER_STATUS_REQUEST = '@routeservers/LOAD_ROUTESERVER_STATUS_REQUEST';
export const LOAD_ROUTESERVER_STATUS_SUCCESS = '@routeservers/LOAD_ROUTESERVER_STATUS_SUCCESS';
export const LOAD_ROUTESERVER_STATUS_ERROR = '@routeservers/LOAD_ROUTESERVER_STATUS_ERROR';
export const LOAD_ROUTESERVER_PROTOCOL_REQUEST = '@routeservers/LOAD_ROUTESERVER_PROTOCOL_REQUEST';
export const LOAD_ROUTESERVER_PROTOCOL_SUCCESS = '@routeservers/LOAD_ROUTESERVER_PROTOCOL_SUCCESS';
export const LOAD_ROUTESERVER_PROTOCOL_ERROR = '@routeservers/LOAD_ROUTESERVER_PROTOCOL_ERROR';
export const SELECT_GROUP = "@routeservers/SELECT_GROUP";
// Action Creators
export function loadRouteserversRequest() {
return {
type: LOAD_ROUTESERVERS_REQUEST
}
}
export function loadRouteserversSuccess(routeservers) {
return {
type: LOAD_ROUTESERVERS_SUCCESS,
payload: {
routeservers: routeservers
}
}
}
export function loadRouteserversError(error) {
return {
type: LOAD_ROUTESERVERS_ERROR,
payload: {
error: error
}
}
}
export function loadRouteservers() {
return (dispatch) => {
dispatch(loadRouteserversRequest())
axios.get('/api/v1/routeservers')
.then(
({data}) => {
dispatch(loadRouteserversSuccess(data["routeservers"]));
},
(error) => {
dispatch(apiError(error));
dispatch(loadRouteserversError(error.data));
});
}
}
export function loadRouteserverStatusRequest(routeserverId) {
return {
type: LOAD_ROUTESERVER_STATUS_REQUEST,
payload: {
routeserverId: routeserverId
}
}
}
export function loadRouteserverStatusSuccess(routeserverId, status) {
return {
type: LOAD_ROUTESERVER_STATUS_SUCCESS,
payload: {
status: status,
routeserverId: routeserverId
}
}
}
export function loadRouteserverStatusError(routeserverId, error) {
return {
type: LOAD_ROUTESERVER_STATUS_ERROR,
payload: {
error: error,
routeserverId: routeserverId
}
}
}
export function loadRouteserverStatus(routeserverId) {
return (dispatch) => {
dispatch(loadRouteserverStatusRequest(routeserverId));
axios.get(`/api/v1/routeservers/${routeserverId}/status`)
.then(
({data}) => {
dispatch(loadRouteserverStatusSuccess(routeserverId, data.status));
},
(error) => {
dispatch(apiError(error));
dispatch(loadRouteserverStatusError(routeserverId, error));
});
}
}
export function loadRouteserverProtocolRequest(routeserverId) {
return {
type: LOAD_ROUTESERVER_PROTOCOL_REQUEST,
payload: {
routeserverId: routeserverId,
}
}
}
export function loadRouteserverProtocolSuccess(routeserverId, protocol, api) {
return {
type: LOAD_ROUTESERVER_PROTOCOL_SUCCESS,
payload: {
routeserverId: routeserverId,
protocol: protocol,
api: api
}
}
}
export function loadRouteserverProtocol(routeserverId) {
return (dispatch) => {
dispatch(loadRouteserverProtocolRequest(routeserverId));
axios.get(`/api/v1/routeservers/${routeserverId}/neighbors`)
.then(
({data}) => {
dispatch(loadRouteserverProtocolSuccess(
routeserverId,
data.neighbours,
data.api,
));
},
(error) => dispatch(apiError(error)));
}
}
export function selectGroup(group) {
return {
type: SELECT_GROUP,
payload: group,
}
}

View File

@ -1,41 +0,0 @@
import React from 'react'
import {connect} from 'react-redux'
import {resolveCommunities} from './utils'
class FilterReason extends React.Component {
render() {
const route = this.props.route;
if (!this.props.rejectReasons || !route || !route.bgp ||
!route.bgp.large_communities) {
return null;
}
const reasons = resolveCommunities(
this.props.rejectReasons, route.bgp.large_communities,
);
const reasonsView = reasons.map(([community, reason], key) => {
const cls = `reject-reason reject-reason-${community[1]}-${community[2]}`;
return (
<p key={key} className={cls}>
<a href={`http://irrexplorer.nlnog.net/search/${route.network}`}
target="_blank" >{reason}</a>
</p>
);
});
return (<div className="reject-reasons">{reasonsView}</div>);
}
}
export default connect(
state => {
return {
rejectReasons: state.routeservers.rejectReasons,
}
}
)(FilterReason);

View File

@ -1,53 +0,0 @@
import React from 'react'
import {connect} from 'react-redux'
import {makeReadableCommunity} from './utils'
/*
* Make style tags
* Derive classes from community parts.
*/
function _makeStyleTags(community) {
return community.map((part, i) => {
return `label-bgp-community-${i}-${part}`;
});
}
/*
* Render community label
*/
class Label extends React.Component {
render() {
// Lookup communities
const readableCommunity = makeReadableCommunity(
this.props.communities,
this.props.community);
const key = this.props.community.join(":");
let cls = 'label label-bgp-community ';
if (!readableCommunity) {
cls += "label-bgp-unknown";
// Default label
return (
<span className={cls}>{key}</span>
);
}
// Apply style
cls += "label-info ";
const styleTags = _makeStyleTags(this.props.community);
cls += styleTags.join(" ");
return (<span className={cls}>{readableCommunity} ({key})</span>);
}
}
export default connect(
(state) => ({
communities: state.config.bgp_communities,
})
)(Label);

View File

@ -1,41 +0,0 @@
import React from 'react'
import {connect} from 'react-redux'
import {resolveCommunities} from './utils'
class NoExportReason extends React.Component {
render() {
const route = this.props.route;
if (!this.props.noexportReasons || !route || !route.bgp ||
!route.bgp.large_communities) {
return null;
}
const reasons = resolveCommunities(
this.props.noexportReasons, route.bgp.large_communities,
);
const reasonsView = reasons.map(([community, reason], key) => {
const cls = `noexport-reason noexport-reason-${community[1]}-${community[2]}`;
return (
<p key={key} className={cls}>
<a href={`http://irrexplorer.nlnog.net/search/${route.network}`}
target="_blank" >{reason}</a>
</p>
);
});
return (<div className="reject-reasons">{reasonsView}</div>);
}
}
export default connect(
state => {
return {
noexportReasons: state.routeservers.noexportReasons,
}
}
)(NoExportReason);

View File

@ -1,102 +0,0 @@
/*
* Communities helper
*/
/*
* Communities are represented as a nested object:
* {
* 1234: {
* 23: "community-leaf",
* 42: {
* 1: "large-community-leaf"
* }
* }
*/
/*
* Resolve a community description from the above described
* tree structure.
*/
export function resolveCommunity(base, community) {
let lookup = base;
for (const part of community) {
if (typeof(lookup) !== "object") {
return null;
}
let res = lookup[part];
if (!res) {
// Try the wildcard
if (lookup["*"]) {
res = lookup["*"]
} else {
return null; // We did everything we could
}
}
lookup = res;
}
return lookup;
}
/*
* Resolve all communities
*/
export function resolveCommunities(base, communities) {
let results = [];
for (const c of communities) {
const description = resolveCommunity(base, c);
if (description != null) {
results.push([c, description]);
}
}
return results;
}
/*
* Reject candidate helpers:
*
* - check if prefix is a reject candidate
* - make css classes
*/
export function isRejectCandidate(rejectCommunities, route) {
// Check if any reject candidate community is set
const communities = route.bgp.communities;
const largeCommunities = route.bgp.large_communities;
const resolved = resolveCommunities(
rejectCommunities, largeCommunities
);
return (resolved.length > 0);
}
/*
* Expand variables in string:
* "Test AS$0 rejects $2"
* will expand with [23, 42, 123] to
* "Test AS23 rejects 123"
*/
export function expandVars(str, vars) {
if (!str) {
return str; // We don't have to do anything.
}
var res = str;
vars.map((v, i) => {
res = res.replace(`$${i}`, v);
});
return res;
}
export function makeReadableCommunity(communities, community) {
const label = resolveCommunity(communities, community);
return expandVars(label, community);
}
export function communityRepr(community) {
return community.join(":");
}

View File

@ -1,32 +0,0 @@
import React from 'react'
import {connect} from 'react-redux'
class Details extends React.Component {
render() {
let rsStatus = this.props.details[this.props.routeserverId];
if (!rsStatus) {
return null;
}
// Get routeserver name
let rs = this.props.routeservers[this.props.routeserverId];
if (!rs) {
return null;
}
return (
<span className="status-name">{rs.name}</span>
);
}
}
export default connect(
(state) => {
return {
routeservers: state.routeservers.byId,
details: state.routeservers.details
}
}
)(Details);

View File

@ -1,92 +0,0 @@
import {debounce} from 'underscore'
import React from 'react'
import {connect} from 'react-redux'
import {replace} from 'react-router-redux'
import PageHeader from 'components/page-header'
import Details from './details'
import Status from './status'
import SearchInput from 'components/search-input'
import Protocols from './protocols'
import QuickLinks from './protocols/quick-links'
import {setFilterValue} from './protocols/actions'
import {makeQueryLinkProps} from './protocols/routing'
class RouteserversPage extends React.Component {
constructor(props) {
super(props);
this.dispatchDebounced = debounce(this.props.dispatch, 350);
}
setFilter(value) {
// Set filter value (for input rendering)
this.props.dispatch(setFilterValue(value));
// Update location delayed
this.dispatchDebounced(replace(
makeQueryLinkProps(
this.props.routing,
value,
this.props.sortColumn,
this.props.sortOrder)));
}
render() {
return(
<div className="routeservers-page">
<PageHeader>
<Details routeserverId={this.props.params.routeserverId} />
</PageHeader>
<div className="row details-main">
<div className="col-main col-lg-9 col-md-12">
<div className="card">
<SearchInput
value={this.props.filterValue}
placeholder="Filter by Neighbor, ASN or Description"
onChange={(e) => this.setFilter(e.target.value)}
/>
</div>
<QuickLinks />
<Protocols protocol="bgp" routeserverId={this.props.params.routeserverId} />
</div>
<div className="col-lg-3 col-md-12 col-aside-details">
<div className="card">
<Status routeserverId={this.props.params.routeserverId}
cacheStatus={this.props.cacheStatus} />
</div>
</div>
</div>
</div>
);
}
}
export default connect(
(state) => {
return {
routing: state.routing.locationBeforeTransitions,
filterValue: state.neighbors.filterValue,
sortColumn: state.neighbors.sortColumn,
sortOrder: state.neighbors.sortOrder,
cacheStatus: {
generatedAt: state.neighbors.cachedAt,
ttl: state.neighbors.cacheTtl,
}
};
}
)(RouteserversPage);

View File

@ -1,15 +0,0 @@
// Actions
export const SET_FILTER_VALUE = "@neighbors/SET_FILTER_VALUE";
// Action Creators: Set Filter Query
export function setFilterValue(value) {
return {
type: SET_FILTER_VALUE,
payload: {
value: value
}
}
}

Some files were not shown because too many files have changed in this diff Show More