Compare commits
640 Commits
Netnod/gob
...
main
Author | SHA1 | Date | |
---|---|---|---|
|
6a3947bc17 | ||
|
d0b11b1688 | ||
|
4a05471da0 | ||
|
8ce5bcd22e | ||
|
965313c62b | ||
|
4e88ca340c | ||
|
369d3e117c | ||
|
a8f6abee84 | ||
|
f427012e6f | ||
|
bb457e1ba9 | ||
|
7ab7436c85 | ||
|
b046db6911 | ||
|
88f7c47884 | ||
|
d6fed41e8d | ||
|
65443abea0 | ||
|
c8306988e6 | ||
|
d800360ea2 | ||
|
b5dec11f7c | ||
|
f2a75fdaba | ||
|
94d5de9c97 | ||
|
bac0119f1b | ||
|
b5299ebcd3 | ||
|
57765c6cfd | ||
|
bb16957745 | ||
|
4db0f92816 | ||
|
0853cb83de | ||
|
5cc6b7a0b9 | ||
|
719da070ee | ||
|
3951704b92 | ||
|
dff2773826 | ||
|
f72c4a3518 | ||
|
bd78a05f37 | ||
|
498ce65af5 | ||
|
98847ba0cb | ||
|
1b600447a8 | ||
|
b21c47c8de | ||
|
852d2d7a6d | ||
|
aa3fe6dd2c | ||
|
85c5d19d4f | ||
|
d6fa635377 | ||
|
07092b51bc | ||
|
130db6c0f4 | ||
|
c272856047 | ||
|
6fe7856808 | ||
|
01491ec0b4 | ||
|
f59eb1fe94 | ||
|
585a4a6b49 | ||
|
6c5c7ea14f | ||
|
e6c4b42bfc | ||
|
5bd48aa9ac | ||
|
e6537752d3 | ||
|
1c9375d797 | ||
|
0d14d22cf6 | ||
|
b79bbcb160 | ||
|
4c6cc454bb | ||
|
bb16436c95 | ||
|
1d05b4605a | ||
|
871dcd6673 | ||
|
0432c83899 | ||
|
254a0c9bf2 | ||
|
eb4cc3159a | ||
|
35240d08c6 | ||
|
23e5539688 | ||
|
cd8fdfa610 | ||
|
7e92e99be3 | ||
|
985e8ad180 | ||
|
5cac4ee02b | ||
|
6a4f9935e6 | ||
|
c131ca1a0b | ||
|
fd0700ed6f | ||
|
ad8a55815c | ||
|
ca3fe5d6a7 | ||
|
4e7330b4f3 | ||
|
be5568d530 | ||
|
084f595a86 | ||
|
108504b9f9 | ||
|
085640170d | ||
|
3c13d05414 | ||
|
88b31ece52 | ||
|
d0bed6e6ce | ||
|
775b423152 | ||
|
65f5edc83b | ||
|
ac5b8b8287 | ||
|
d0779320e3 | ||
|
8d02af4b78 | ||
|
86ce9b2681 | ||
|
7675418a2a | ||
|
07b8b7c993 | ||
|
f644edc500 | ||
|
3d588023b0 | ||
|
ec963a67db | ||
|
17a9cc67f2 | ||
|
4c1909a659 | ||
|
0449e563d8 | ||
|
e8509f74d5 | ||
|
ddb01405df | ||
|
ff00fc5677 | ||
|
4f88a05f4d | ||
|
c32cb948d3 | ||
|
9c96ccd888 | ||
|
a3c276a3e0 | ||
|
e2b6272c71 | ||
|
0873b84e68 | ||
|
9a0b5b5d7d | ||
|
71469cf4c9 | ||
|
dc9baafbdb | ||
|
555eec64af | ||
|
44a67c1f89 | ||
|
56a8fd65cc | ||
|
dd6bf20f8d | ||
|
758bfa4c5e | ||
|
4455ade49e | ||
|
e8c2a9c7fd | ||
|
1ea457d9f1 | ||
|
e9c57c811f | ||
|
9f3b8d6399 | ||
|
da3913a25e | ||
|
4084504c80 | ||
|
cf90b3e903 | ||
|
16c6eabe13 | ||
|
d768d3edac | ||
|
1535c5f7e9 | ||
|
bf95c539ce | ||
|
cbdc1f5d67 | ||
|
97dbe0c4c8 | ||
|
022e8a2fd5 | ||
|
6e3a433c82 | ||
|
50f155917a | ||
|
e30d98f0cf | ||
|
9955f3feeb | ||
|
b47769817e | ||
|
9c8093e630 | ||
|
d335a22666 | ||
|
ee8faf706d | ||
|
b5b5148dec | ||
|
27a6b9806d | ||
|
a04d065423 | ||
|
8b4b14b7a9 | ||
|
11e1508a16 | ||
|
a6da9c1b0a | ||
|
c66d1973e4 | ||
|
4a87daf97f | ||
|
91562472ec | ||
|
8a67a2d7be | ||
|
ac6cae3aa6 | ||
|
4db8affe13 | ||
|
0328bacd49 | ||
|
4db3f39baa | ||
|
4193bcb107 | ||
|
2b7efe725f | ||
|
afc402f3cc | ||
|
0c589507f0 | ||
|
76c4f1fb6f | ||
|
3a5c2e235c | ||
|
f649350867 | ||
|
6a45a4d961 | ||
|
b10634d666 | ||
|
a8c3c11fd2 | ||
|
83c3d857cc | ||
|
0648f8e095 | ||
|
9da371065d | ||
|
9622f5745b | ||
|
8c21b29bfd | ||
|
59f3141f2a | ||
|
f97a4a7bc8 | ||
|
64e5b1874b | ||
|
8f5ea78429 | ||
|
39339b9969 | ||
|
9064364325 | ||
|
5bb0d0aaf2 | ||
|
12d66146e8 | ||
|
0a6155035d | ||
|
691c6adb31 | ||
|
3ad8f2da78 | ||
|
6e67454feb | ||
|
e613494e3c | ||
|
ea5bac3927 | ||
|
158dd73854 | ||
|
2f322305fe | ||
|
cc9c919bbb | ||
|
468371737b | ||
|
82afece002 | ||
|
d9513f4c32 | ||
|
8ae09b11f7 | ||
|
10d07a9f34 | ||
|
8ba9c09b80 | ||
|
ef3acbae17 | ||
|
9339d0d296 | ||
|
e5aae9eaf8 | ||
|
15eb5cc295 | ||
|
a412e3d56f | ||
|
e24e8635e8 | ||
|
b763d97956 | ||
|
b996f8ed0f | ||
|
88ba3ac7e1 | ||
|
c484ff2a0b | ||
|
e21d9b238e | ||
|
4e2e36970f | ||
|
5dcb66965a | ||
|
926a75fb0b | ||
|
5ec9187102 | ||
|
f1fa264043 | ||
|
240a49065d | ||
|
c1621589a2 | ||
|
2dccc56367 | ||
|
989f03a4aa | ||
|
7bbc38c7cf | ||
|
334ced87db | ||
|
e35f4ad17c | ||
|
6ccf8e6261 | ||
|
b844af4097 | ||
|
fb9747b76d | ||
|
2b0d4a7d31 | ||
|
f5d4766896 | ||
|
a106a84b36 | ||
|
b2bb0b3d5d | ||
|
c70f067845 | ||
|
a68897ffc8 | ||
|
0a81590b56 | ||
|
d2fb819723 | ||
|
e762f11257 | ||
|
acc37d53d8 | ||
|
04681d5bbb | ||
|
0d5f2f9484 | ||
|
3b6c76a37f | ||
|
8a867675ca | ||
|
f5668ba5a2 | ||
|
8e96efdf23 | ||
|
2d506c4c51 | ||
|
eae42bd93a | ||
|
266b97c368 | ||
|
6cf502800e | ||
|
5b6dcb37f6 | ||
|
4f525d8dc9 | ||
|
7ccf57d548 | ||
|
0c87b8924f | ||
|
462938db68 | ||
|
b230411b37 | ||
|
2f5f98c330 | ||
|
ec15e9f8df | ||
|
0d1a3bed3f | ||
|
1b122e67c7 | ||
|
6950593e8d | ||
|
479c4d608b | ||
|
fdf4304f7a | ||
|
ec07decba0 | ||
|
30bec0f37e | ||
|
d696d8cb37 | ||
|
072fa3ac09 | ||
|
3632debc03 | ||
|
70fab38365 | ||
|
96454440ac | ||
|
5d495aea13 | ||
|
ec09652dec | ||
|
bbd69e3d05 | ||
|
a67a04e5f3 | ||
|
28dd2f8d13 | ||
|
41b890ac0e | ||
|
c2e33a4715 | ||
|
395b3e9e03 | ||
|
35bf00ff49 | ||
|
1ea530fca3 | ||
|
9ae3d8232e | ||
|
e67aaa2ee8 | ||
|
c0e64d5946 | ||
|
7169c61e6f | ||
|
4c94909282 | ||
|
6588dd4765 | ||
|
ad4041a7a1 | ||
|
2a81057963 | ||
|
452dfbfb66 | ||
|
538cefe098 | ||
|
0bc219065d | ||
|
da87e3e4ca | ||
|
348e670491 | ||
|
a1cc181d30 | ||
|
e6a987b52c | ||
|
a808cfb79d | ||
|
1202d2af90 | ||
|
61962f2eef | ||
|
d2c20c74d5 | ||
|
d21033824f | ||
|
e9e8751e26 | ||
|
fc466cd816 | ||
|
068b270c7f | ||
|
d74c7d7abb | ||
|
6d58715742 | ||
|
3b825913ad | ||
|
2c25d9618d | ||
|
7420dbef40 | ||
|
d6011fc200 | ||
|
1cba3790ea | ||
|
fe66f1a510 | ||
|
1d16ecec15 | ||
|
6ee20adecd | ||
|
e61de03411 | ||
|
fa0dbe3f26 | ||
|
a0c942b9a9 | ||
|
9bf71a0fcf | ||
|
55089fb6d3 | ||
|
d9e24fa18e | ||
|
6632fc42d2 | ||
|
1cb7f70a0b | ||
|
e638254610 | ||
|
fa10dfb2d1 | ||
|
776b7ca2e4 | ||
|
b41ae0ae1a | ||
|
d54f78afa7 | ||
|
dc9efca4b3 | ||
|
46c7931344 | ||
|
cf63016f2b | ||
|
93c41a7ca0 | ||
|
5bd7ab511d | ||
|
c3ed4445a7 | ||
|
4a681023fe | ||
|
e3044abbea | ||
|
2af2d18b85 | ||
|
3782c5dcbb | ||
|
00b267ce5d | ||
|
10314c2997 | ||
|
c0fa87e585 | ||
|
a908ecffda | ||
|
ab9ba3cdbc | ||
|
6d3db74b54 | ||
|
726c84b6a0 | ||
|
f7ce30a5c3 | ||
|
98b8c07c11 | ||
|
3aa76fc45f | ||
|
bd79b4dafc | ||
|
bd2d0b61ee | ||
|
89fc7b443a | ||
|
fe42b38eb6 | ||
|
2be8d9e23c | ||
|
6c58ed0d4c | ||
|
19582a6287 | ||
|
aea2c18f59 | ||
|
4115e37f60 | ||
|
be439426c7 | ||
|
2fc3faeee0 | ||
|
377aaf63fe | ||
|
46225b7ab9 | ||
|
5890740754 | ||
|
deca20ad9f | ||
|
a1f33d90f2 | ||
|
9d93f0c9c1 | ||
|
292c0e0547 | ||
|
9a3938fb3f | ||
|
fb630f3057 | ||
|
d7dba44915 | ||
|
bd674993f4 | ||
|
ac15d52c5e | ||
|
f05531979f | ||
|
02b828dfa6 | ||
|
143a8027f7 | ||
|
76e80e28d5 | ||
|
2f75f9f1db | ||
|
673cd45561 | ||
|
ae8c4306b6 | ||
|
ee9e2d93ec | ||
|
9941a41d35 | ||
|
a1ffa39dd0 | ||
|
d715efa01a | ||
|
4e0803ab47 | ||
|
8e13197a4b | ||
|
891acfbd65 | ||
|
1c73eb4802 | ||
|
8d53d7e301 | ||
|
232e08f569 | ||
|
96ee2a76aa | ||
|
2cf3ad6b59 | ||
|
c0546be4cc | ||
|
1f5e5baaa6 | ||
|
a768bf4896 | ||
|
fc533ce12f | ||
|
93135c16f6 | ||
|
f9584e711a | ||
|
5419f5cd58 | ||
|
b609229245 | ||
|
85dfcdf8d9 | ||
|
2c7fab0a38 | ||
|
f589322cfe | ||
|
3241a150aa | ||
|
3af580e8c1 | ||
|
cac7d44f87 | ||
|
4b180a7b34 | ||
|
ebd0d17a89 | ||
|
b85a74924f | ||
|
401128a543 | ||
|
6be8927089 | ||
|
8e2cbe76e7 | ||
|
bcbc7961c1 | ||
|
5fc5841ece | ||
|
478f828e22 | ||
|
bf28a1af74 | ||
|
3b25d472f5 | ||
|
db0583d581 | ||
|
7f36756efc | ||
|
133f9505b7 | ||
|
7fb77bfb8f | ||
|
15d8130727 | ||
|
50f549ea6d | ||
|
454b8de596 | ||
|
4e7104ab81 | ||
|
c5ea17695f | ||
|
720a3b764c | ||
|
e2fa687b01 | ||
|
42ab0ab3c0 | ||
|
a2b2024bb3 | ||
|
1f5300a278 | ||
|
03293389ad | ||
|
b05b8558a4 | ||
|
17d6b7d9f0 | ||
|
48947100b0 | ||
|
6b1627fc00 | ||
|
5c6445264c | ||
|
2c1e3903fc | ||
|
b6835dfff1 | ||
|
4691279b3f | ||
|
3580b9adde | ||
|
7cfcb9c6e4 | ||
|
d9aae37f7e | ||
|
b4aed3496c | ||
|
330047fac1 | ||
|
aa9292b74c | ||
|
ed52257269 | ||
|
982107dbc2 | ||
|
783dd623e3 | ||
|
56126b933f | ||
|
82595a3249 | ||
|
7b15bcad3c | ||
|
f2f7ffc609 | ||
|
5a333ecfc8 | ||
|
1fc17d6b3e | ||
|
39e2605707 | ||
|
977e4db816 | ||
|
58e3ea8aab | ||
|
c6741c87b0 | ||
|
2763614dc4 | ||
|
d52e06272d | ||
|
76236cb311 | ||
|
608f998d0a | ||
|
5471f57dd4 | ||
|
14d860d834 | ||
|
eb915be28f | ||
|
3db5d9e989 | ||
|
dacda16d26 | ||
|
5fb298b65c | ||
|
ddcd7ae21c | ||
|
c289155aaa | ||
|
e9731b40ce | ||
|
18b9a6d205 | ||
|
7f46529352 | ||
|
a3cd1aec64 | ||
|
485fc94109 | ||
|
a81d68618d | ||
|
7861af5bb0 | ||
|
6f76691bd0 | ||
|
1707cb3396 | ||
|
669c54c64e | ||
|
e0391fec67 | ||
|
7d4b78c245 | ||
|
f4de929cc7 | ||
|
0a529a55a5 | ||
|
a1665a43de | ||
|
164d3e9318 | ||
|
bb28f4e8d3 | ||
|
0d9962207c | ||
|
45b0395f2e | ||
|
c07ab88b96 | ||
|
875e1ef76e | ||
|
d27b0dca20 | ||
|
a17ef713f9 | ||
|
303a29248f | ||
|
e36a1bc5e4 | ||
|
efbe4427ff | ||
|
7b1d876cd8 | ||
|
9d0069b59b | ||
|
05135eef0c | ||
|
928715d2ae | ||
|
6328cc92d5 | ||
|
6acdd0cbfa | ||
|
3170ab95cc | ||
|
520bbf195e | ||
|
61b56a7d6c | ||
|
5d77530214 | ||
|
8b5509f0c3 | ||
|
9c193029a0 | ||
|
629eca4f37 | ||
|
2417c86718 | ||
|
09426f3b05 | ||
|
ab8698e112 | ||
|
a75c4dd14c | ||
|
e0083b806f | ||
|
10139bc527 | ||
|
996465988f | ||
|
473054a919 | ||
|
db9aaf8d5b | ||
|
271175fcad | ||
|
20941b8bc0 | ||
|
8aa472c2e0 | ||
|
a58b3128b3 | ||
|
7cd4603232 | ||
|
d5500b8e0b | ||
|
9c21916135 | ||
|
ad544243ae | ||
|
36d78f0e74 | ||
|
859be64879 | ||
|
14e9a87f1f | ||
|
ea6b740f99 | ||
|
d75db85e96 | ||
|
c15f78d76f | ||
|
aa85c3f13a | ||
|
d804c8a1ba | ||
|
776eebe276 | ||
|
49b60cd44b | ||
|
2cc990693a | ||
|
635c1e145e | ||
|
38df30585d | ||
|
8533854a37 | ||
|
94b33b8da5 | ||
|
1bc9f42271 | ||
|
ef8ee57726 | ||
|
953bb83eef | ||
|
1ff744aa88 | ||
|
fcca7795ad | ||
|
28db125987 | ||
|
1aa728cfbc | ||
|
5ee1232e98 | ||
|
90927d832b | ||
|
811e20147d | ||
|
b0095524fe | ||
|
14745acc77 | ||
|
eef74b7e9b | ||
|
7711de1646 | ||
|
e7808438c9 | ||
|
554ceedaed | ||
|
1cfcd97593 | ||
|
67715768b4 | ||
|
d39d70b83e | ||
|
a16870bfb5 | ||
|
fb7f51ad9e | ||
|
4b68b7d908 | ||
|
d0a48492c7 | ||
|
0c70a37fdb | ||
|
9f85e19375 | ||
|
c17b826703 | ||
|
02b3b77d62 | ||
|
8ffa646988 | ||
|
82be9f0308 | ||
|
3d77e502ca | ||
|
dfeff9d48f | ||
|
18adbb8f9f | ||
|
6642aa0a70 | ||
|
fb3bc1b745 | ||
|
74c6213d32 | ||
|
8a8b070ffe | ||
|
25be7971cf | ||
|
5271a4e0ba | ||
|
ebaf40a180 | ||
|
1ccf36db0c | ||
|
553144f9fd | ||
|
1fa5b6a149 | ||
|
8e2565b3f6 | ||
|
b6c5e6a9cb | ||
|
a7faf644b0 | ||
|
18a4b67748 | ||
|
9d7611916b | ||
|
7533d1e198 | ||
|
93501c6712 | ||
|
d725fc0b2b | ||
|
d3f22e7adc | ||
|
c12bf7ac7c | ||
|
393625ee43 | ||
|
0af0d9e183 | ||
|
3432fc3af9 | ||
|
81cf0a474a | ||
|
39005b5f50 | ||
|
0363c56c2f | ||
|
c5f459329e | ||
|
2e48e42795 | ||
|
3f8540561e | ||
|
733cc5b68f | ||
|
492c7f9f93 | ||
|
2cbaacf907 | ||
|
d8286c1fb0 | ||
|
4302734a87 | ||
|
d196c88e29 | ||
|
028d13c99b | ||
|
6d61321346 | ||
|
607868f5de | ||
|
9d6baca1bb | ||
|
d9a196f344 | ||
|
6f81e79b2e | ||
|
090b950748 | ||
|
faae66f275 | ||
|
afc7baf79c | ||
|
2024d59f57 | ||
|
c2eff61eb7 | ||
|
18d5f585f2 | ||
|
40f182f20a | ||
|
176ad914de | ||
|
3450db1587 | ||
|
63c8b7478c | ||
|
65d97aba98 | ||
|
1c6df03d7d | ||
|
f2dce93653 | ||
|
a72e17221e | ||
|
5e4c88caff | ||
|
1d90ad4c09 | ||
|
b6272d884a | ||
|
ac2dcdfbc2 | ||
|
e0a8ef6239 | ||
|
901bd7b8ab | ||
|
9113290803 | ||
|
ed2b4201a1 | ||
|
2e94cd785b | ||
|
f20cb744f5 | ||
|
0f2a76f4ac | ||
|
f3ef10c3e3 | ||
|
074fd9fc8b | ||
|
f311315a27 | ||
|
1ed4744832 | ||
|
0849ee23c1 | ||
|
93641863c5 | ||
|
ed3751ce3a | ||
|
2584cfcd80 | ||
|
9d1d4358b1 | ||
|
c594fb3a67 | ||
|
f662e2f11c | ||
|
2d717b91de | ||
|
cdcc52d0fd | ||
|
13a733ecf2 | ||
|
e55c4a7d30 | ||
|
d02ed4c049 | ||
|
e750ed6ed2 | ||
|
c403436dcc | ||
|
45623133bb | ||
|
8da8a1684c | ||
|
5e4e4154c6 | ||
|
3be429e2ef |
2
.dockerignore
Normal file
2
.dockerignore
Normal file
@ -0,0 +1,2 @@
|
||||
ui/node_modules/
|
||||
ui/build/
|
49
.github/workflows/docker-image.yml
vendored
Normal file
49
.github/workflows/docker-image.yml
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
name: Docker Image CI
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [created]
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
jobs:
|
||||
|
||||
build-and-push-image:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
|
||||
- name: Generate branch tag
|
||||
id: set_tag
|
||||
run: |
|
||||
echo "::set-output name=tag::${{ github.ref_name }}-$(git rev-parse --short HEAD)-$(date +%s)"
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}, ${{ env.REGISTRY }}/${{env.IMAGE_NAME}}:${{ steps.set_tag.outputs.tag }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
37
.github/workflows/release.yml
vendored
Normal file
37
.github/workflows/release.yml
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
|
||||
name: Release
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [created]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
goreleaser:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Unshallow
|
||||
run: git fetch --prune --unshallow
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version-file: 'go.mod'
|
||||
cache: true
|
||||
|
||||
- name: Setup NodeJS
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 16
|
||||
|
||||
- name: Run GoReleaser
|
||||
uses: goreleaser/goreleaser-action@v3.2.0
|
||||
with:
|
||||
version: latest
|
||||
args: release --rm-dist
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
81
.github/workflows/test.yml
vendored
Normal file
81
.github/workflows/test.yml
vendored
Normal file
@ -0,0 +1,81 @@
|
||||
|
||||
name: Test
|
||||
|
||||
on: [ push, workflow_dispatch ]
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres
|
||||
env:
|
||||
POSTGRES_USER: alice
|
||||
POSTGRES_PASSWORD: alice
|
||||
POSTGRES_DB: alice
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
- 5432:5432
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# Install development dependencies
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: '1.21.x'
|
||||
|
||||
- name: Setup NodeJS
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: 16
|
||||
|
||||
- name: Add UI build stub
|
||||
working-directory: ./ui
|
||||
run: |
|
||||
mkdir -p ./build
|
||||
touch ./build/UI_BUILD_STUB
|
||||
|
||||
# Formatting
|
||||
- name: Check formatting
|
||||
run: |
|
||||
test -z $(gofmt -l ./pkg)
|
||||
test -z $(gofmt -l ./cmd)
|
||||
|
||||
# Vet
|
||||
- name: Vet
|
||||
run: |
|
||||
go vet ./pkg/...
|
||||
go vet ./cmd/...
|
||||
|
||||
# Lint
|
||||
- name: Lint
|
||||
run: |
|
||||
go install golang.org/x/lint/golint@latest
|
||||
golint -set_exit_status ./pkg/...
|
||||
golint -set_exit_status ./cmd/...
|
||||
|
||||
# Test environment
|
||||
- name: Setup Test Database
|
||||
env:
|
||||
PGHOST: localhost
|
||||
PGPORT: 5432
|
||||
PGUSER: alice
|
||||
PGPASSWORD: alice
|
||||
working-directory: ./db
|
||||
run: |
|
||||
./init.sh -c -t
|
||||
|
||||
# Run Tests
|
||||
- name: UI Tests
|
||||
run: make ui_test
|
||||
|
||||
- name: Backend Tests
|
||||
run: make backend_test
|
||||
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -18,6 +18,7 @@ venv/
|
||||
django_backend/birdseye/static
|
||||
|
||||
node_modules/
|
||||
ui/node_modules/
|
||||
|
||||
builds/
|
||||
|
||||
@ -28,8 +29,9 @@ DIST/
|
||||
|
||||
var/
|
||||
|
||||
etc/alice-lg/alice.conf
|
||||
|
||||
.DS_Store
|
||||
|
||||
*coverage*
|
||||
|
||||
dist/
|
||||
|
34
.goreleaser.yaml
Normal file
34
.goreleaser.yaml
Normal file
@ -0,0 +1,34 @@
|
||||
# This is an example .goreleaser.yml file with some sensible defaults.
|
||||
# Make sure to check the documentation at https://goreleaser.com
|
||||
before:
|
||||
hooks:
|
||||
- go mod tidy
|
||||
- make -C ui/
|
||||
builds:
|
||||
- main: ./cmd/alice-lg
|
||||
env:
|
||||
- CGO_ENABLED=0
|
||||
goos:
|
||||
- linux
|
||||
- freebsd
|
||||
- netbsd
|
||||
- openbsd
|
||||
goarch:
|
||||
- amd64
|
||||
- arm64
|
||||
ldflags:
|
||||
- -X "github.com/alice-lg/alice-lg/pkg/config.Version={{ .Version }}"
|
||||
archives:
|
||||
- name_template: 'alice-lg_{{ .Version }}_{{ .Os }}_{{ .Arch }}'
|
||||
|
||||
checksum:
|
||||
name_template: 'checksums.txt'
|
||||
algorithm: sha256
|
||||
snapshot:
|
||||
name_template: "{{ incpatch .Version }}-next"
|
||||
changelog:
|
||||
skip: true
|
||||
release:
|
||||
extra_files:
|
||||
- glob: ./CHANGELOG.md
|
||||
|
141
CHANGELOG.md
141
CHANGELOG.md
@ -1,6 +1,147 @@
|
||||
|
||||
# Changelog
|
||||
|
||||
## 6.1.0 (2024-02-12)
|
||||
|
||||
* Added memory pools for deduplicating route information.
|
||||
This drastically reduces the memory consumption.
|
||||
|
||||
* Single table birdwatcher source is now using stream
|
||||
response parsing. This also reduces the memory consumption.
|
||||
However, as there are now waiting times, CPU load can get
|
||||
high. You can introduce a delay while parsing with the
|
||||
`stream_parser_throttle` parameter in the config.
|
||||
|
||||
* Improved search performance and timeout handling.
|
||||
|
||||
* The BGP info modal can now be dismissed by pressing `esc`.
|
||||
|
||||
* Global search now supports querying for bgp communities.
|
||||
Please set the `routes_store_query_limit` config variable.
|
||||
Some communities might match a large number of routes.
|
||||
|
||||
* Examples for the global search can be added using the
|
||||
theme's `Alice.updateContent` API:
|
||||
`{lookup: {examples: [["asn", "AS2342"], ...]}}`. Valid types
|
||||
are: `asn`, `community` `prefix` and `q`.
|
||||
|
||||
* Performance in search has been improved.
|
||||
You can now set the `prefix_lookup_community_filter_cutoff`
|
||||
config variable to prevent timeouts with large result sets.
|
||||
|
||||
* The configuration now supports defining variables like
|
||||
`$ASN01 = 65535` which can be used for expressivly describing
|
||||
communities. For now see `pkg/config/testdata/alice.conf` for
|
||||
usage.
|
||||
|
||||
* Bugfixes:
|
||||
- Fixed parsing and handling of ext community filters.
|
||||
- Fixed stylesheet compatibility: For route flags, new SVG icons
|
||||
are now wrapped in an `<i>` tag, to ensure backward compatiblity.
|
||||
- Fixed trying to decode an 'undefined' value for a query filter.
|
||||
- Spelling fixes
|
||||
|
||||
* Deprecations:
|
||||
- The `/api/v1/routeservers/<rs>/neighbors/<id>/routes` endpoint
|
||||
is removed.
|
||||
|
||||
|
||||
## 6.0.0 (2022-11-10)
|
||||
|
||||
* Pure functional react UI!
|
||||
|
||||
Frontend is now using `create-react-app` for scripts and
|
||||
contexts instead of redux.
|
||||
|
||||
**Theme compatibility**
|
||||
|
||||
- Stylesheets are compatible
|
||||
- Content API is compatible
|
||||
- API now provides `Alice.onLayoutReady((page) => ... )`
|
||||
callback. This should be used to install additional
|
||||
DOM event listeners for extensions.
|
||||
|
||||
So, if you want to inject additional dom nodes into
|
||||
the UI and used something like:
|
||||
|
||||
`document.addEventListener("DOMContentLoaded", function() { ... }`
|
||||
|
||||
you now need to use the `Alice.onLayoutReady(function(main) { ... })`
|
||||
callback.
|
||||
|
||||
|
||||
## 5.1.1 (2022-06-21)
|
||||
|
||||
* Improved search query validation.
|
||||
|
||||
* Fixed http status response when validation fails.
|
||||
Was Internal Server Error (500), now: Bad Request (400).
|
||||
|
||||
* Memory-Store is now using sync.Map to avoid timeouts
|
||||
due to aggressive locking.
|
||||
|
||||
## 5.1.0 (2022-06-02)
|
||||
|
||||
* **BREAKING CHANGE** The spelling of "neighbors" is now harmonized.
|
||||
Please update your config and replace e.g. `neighbour.asn`
|
||||
with `neighbor.asn` (in case of java script errors).
|
||||
This also applies to the API.
|
||||
|
||||
In the config `neighbors_store_refresh_interval` needs to be updated.
|
||||
|
||||
* Parallel route / neighbor store refreshs: Route servers are not
|
||||
longer queried sequentially. A jitter is applied to not hit all
|
||||
servers exactly at once.
|
||||
|
||||
* Parallelism can be tuned through the config parameters:
|
||||
[server]
|
||||
|
||||
routes_store_refresh_parallelism = 5
|
||||
neighbors_store_refresh_parallelism = 10000
|
||||
|
||||
A value of 1 is a sequential refresh.
|
||||
|
||||
* Postgres store backend: Not keeping routes and neighbors in
|
||||
memory might reduce the memory footprint.
|
||||
|
||||
* Support for alternative pipe in `multi_table` birdwatcher
|
||||
configurations.
|
||||
|
||||
* Reduced memory footprint by precomputing route details
|
||||
|
||||
|
||||
## 5.0.1 (2021-11-01)
|
||||
|
||||
* Fixed parsing extended communities in openbgpd source causing a crash.
|
||||
|
||||
## 5.0.0 (2021-10-09)
|
||||
|
||||
* OpenBGPD support! Thanks to the Route Server Support Foundation
|
||||
for sponsoring this feature!
|
||||
|
||||
* Backend cleanup and restructured go codebase.
|
||||
This should improve a bit working with containers.
|
||||
|
||||
* Fixed links to the IRR Explorer.
|
||||
|
||||
## 4.3.0 (2021-04-15)
|
||||
|
||||
* Added configurable main table
|
||||
|
||||
## 4.2.0 (2020-07-29)
|
||||
|
||||
* Added GoBGP processing_timeout source config option
|
||||
|
||||
## 4.1.0 (2019-12-23)
|
||||
|
||||
* Added related neighbors feature
|
||||
|
||||
## 4.0.2, 4.0.3 (2019-09-09)
|
||||
|
||||
* Fixed issue with multitable bird: `getMasterPipeName` returned incorrect
|
||||
pipe.
|
||||
|
||||
* Fixed state check in multitable bird source with bird2.
|
||||
|
||||
## 4.0.1 (2019-03-07)
|
||||
|
||||
|
47
Dockerfile
Normal file
47
Dockerfile
Normal file
@ -0,0 +1,47 @@
|
||||
|
||||
#
|
||||
# Alice - The friendly BGP looking glass
|
||||
#
|
||||
|
||||
# Build frontend first
|
||||
FROM node:latest AS ui
|
||||
|
||||
# Install dependencies
|
||||
WORKDIR /src/alice-lg/ui
|
||||
ADD ui/package.json .
|
||||
ADD ui/yarn.lock .
|
||||
|
||||
RUN yarn install
|
||||
|
||||
# Add frontend
|
||||
ADD ui/ .
|
||||
|
||||
# Build frontend
|
||||
RUN yarn build
|
||||
|
||||
# Build the backend
|
||||
FROM golang:1.21 AS backend
|
||||
|
||||
# Install dependencies
|
||||
WORKDIR /src/alice-lg
|
||||
ADD go.mod .
|
||||
ADD go.sum .
|
||||
RUN go mod download
|
||||
|
||||
ADD . .
|
||||
|
||||
# Add client
|
||||
COPY --from=ui /src/alice-lg/ui/build ui/build
|
||||
|
||||
WORKDIR /src/alice-lg/cmd/alice-lg
|
||||
RUN make alpine
|
||||
|
||||
FROM alpine:latest
|
||||
|
||||
RUN apk add -U tzdata
|
||||
|
||||
COPY --from=backend /src/alice-lg/cmd/alice-lg/alice-lg-linux-amd64 /usr/bin/alice-lg
|
||||
RUN ls -lsha /usr/bin/alice-lg
|
||||
|
||||
EXPOSE 7340:7340
|
||||
CMD ["/usr/bin/alice-lg"]
|
40
LICENSE
40
LICENSE
@ -1,33 +1,31 @@
|
||||
BSD License
|
||||
BSD 3-Clause License
|
||||
|
||||
Copyright (c) 2016-2018, Peering GmbH / ECIX
|
||||
Copyright (c) 2018-present, Matthias Hannig
|
||||
Copyright (c) 2018-present, Annika Hannig
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions
|
||||
are met:
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
* Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
|
||||
THE POSSIBILITY OF SUCH DAMAGE.
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
116
Makefile
116
Makefile
@ -1,119 +1,43 @@
|
||||
|
||||
#
|
||||
# Build the Alice Looking Glass
|
||||
# -----------------------------
|
||||
#
|
||||
# Build the Alice Looking Glass locally
|
||||
#
|
||||
|
||||
PROG=alice-lg
|
||||
ARCH=amd64
|
||||
|
||||
SYSTEM_INIT=systemd
|
||||
|
||||
# == END BUILD CONFIGURATION ==
|
||||
|
||||
VERSION=$(shell cat ./VERSION)
|
||||
|
||||
# Specify build server for remotely building the RPM
|
||||
# you can do this when you invoke the makefile
|
||||
# using:
|
||||
# make remote_rpm BUILD_SERVER=build-rpm.example.com
|
||||
BUILD_SERVER=''
|
||||
|
||||
DIST=DIST/
|
||||
REMOTE_DIST=$(PROG)-$(DIST)
|
||||
|
||||
RPM=$(PROG)-$(VERSION)-1.x86_64.rpm
|
||||
|
||||
LOCAL_RPMS=RPMS
|
||||
|
||||
all: alice
|
||||
|
||||
client_dev:
|
||||
$(MAKE) -C client/
|
||||
test: ui_test backend_test
|
||||
|
||||
client_prod:
|
||||
$(MAKE) -C client/ client_prod
|
||||
alice: ui backend
|
||||
cp cmd/alice-lg/alice-lg-* bin/
|
||||
|
||||
backend_dev: client_dev
|
||||
$(MAKE) -C backend/
|
||||
ui:
|
||||
$(MAKE) -C ui/
|
||||
|
||||
dev:
|
||||
$(MAKE) -C backend/ osx-dev
|
||||
ui_test:
|
||||
$(MAKE) -C ui/ test
|
||||
|
||||
backend:
|
||||
$(MAKE) -C cmd/alice-lg/ static
|
||||
|
||||
backend_prod: client_prod
|
||||
$(MAKE) -C backend/ bundle
|
||||
$(MAKE) -C backend/ linux
|
||||
|
||||
|
||||
alice: client_prod backend_prod
|
||||
mv backend/alice-lg-* bin/
|
||||
|
||||
|
||||
dist: clean alice
|
||||
|
||||
mkdir -p $(DIST)opt/alice-lg/alice-lg/bin
|
||||
mkdir -p $(DIST)etc/alice-lg
|
||||
|
||||
# Adding post install script
|
||||
cp install/scripts/after_install $(DIST)/.
|
||||
|
||||
ifeq ($(SYSTEM_INIT), systemd)
|
||||
# Installing systemd services
|
||||
mkdir -p $(DIST)usr/lib/systemd/system/
|
||||
cp install/systemd/* $(DIST)usr/lib/systemd/system/.
|
||||
else
|
||||
# Installing upstart configuration
|
||||
mkdir -p $(DIST)/etc/init/
|
||||
cp install/upstart/* $(DIST)etc/init/.
|
||||
endif
|
||||
|
||||
# Copy example configuration
|
||||
cp etc/alice-lg/alice.example.conf $(DIST)/etc/alice-lg/alice.example.conf
|
||||
|
||||
# Copy application
|
||||
cp bin/$(PROG)-linux-$(ARCH) DIST/opt/alice-lg/alice-lg/bin/.
|
||||
|
||||
|
||||
rpm: dist
|
||||
|
||||
# Clear tmp failed build (if any)
|
||||
mkdir -p $(LOCAL_RPMS)
|
||||
|
||||
# Create RPM from dist
|
||||
fpm -s dir -t rpm -n $(PROG) -v $(VERSION) -C $(DIST) \
|
||||
--architecture $(ARCH) \
|
||||
--config-files /etc/alice-lg/alice.example.conf \
|
||||
--after-install $(DIST)/after_install \
|
||||
opt/ etc/
|
||||
|
||||
mv $(RPM) $(LOCAL_RPMS)
|
||||
|
||||
|
||||
build_server:
|
||||
ifeq ($(BUILD_SERVER), '')
|
||||
$(error BUILD_SERVER not configured)
|
||||
endif
|
||||
|
||||
remote_rpm: build_server dist
|
||||
|
||||
mkdir -p $(LOCAL_RPMS)
|
||||
|
||||
# Copy distribution to build server
|
||||
ssh $(BUILD_SERVER) -- rm -rf $(REMOTE_DIST)
|
||||
scp -r $(DIST) $(BUILD_SERVER):$(REMOTE_DIST)
|
||||
ssh $(BUILD_SERVER) -- fpm -s dir -t rpm -n $(PROG) -v $(VERSION) -C $(REMOTE_DIST) \
|
||||
--architecture $(ARCH) \
|
||||
--config-files /etc/alice-lg/alice.example.conf \
|
||||
--after-install $(REMOTE_DIST)/after_install \
|
||||
opt/ etc/
|
||||
|
||||
# Get rpm from server
|
||||
scp $(BUILD_SERVER):$(RPM) $(LOCAL_RPMS)/.
|
||||
backend_test:
|
||||
mkdir -p ./ui/build
|
||||
touch ./ui/build/UI_BUILD_STUB
|
||||
go test ./pkg/...
|
||||
rm ./ui/build/UI_BUILD_STUB
|
||||
|
||||
|
||||
clean:
|
||||
rm -f bin/alice-lg-linux-amd64
|
||||
rm -f bin/alice-lg-osx-amd64
|
||||
rm -rf $(DIST)
|
||||
rm ./ui/build/UI_BUILD_STUB
|
||||
|
||||
|
||||
.PHONY: backend ui clean
|
||||
|
||||
|
11
Makefile.docker
Normal file
11
Makefile.docker
Normal file
@ -0,0 +1,11 @@
|
||||
|
||||
# Build Docker Image
|
||||
|
||||
APP_VERSION=$(shell cat ./VERSION)
|
||||
|
||||
all: image
|
||||
|
||||
image:
|
||||
docker build . -t alice-lg:latest -t alice-lg:$(APP_VERSION)
|
||||
|
||||
|
178
README.md
178
README.md
@ -2,24 +2,48 @@
|
||||
__"No, no! The adventures first, explanations take such a dreadful time."__
|
||||
_Lewis Carroll, Alice's Adventures in Wonderland & Through the Looking-Glass_
|
||||
|
||||
Take a look at an Alice-LG production examples at:
|
||||
Take a look at Alice-LG production examples at:
|
||||
- https://lg.de-cix.net/
|
||||
- https://lg.ecix.net/
|
||||
- https://lg.ams-ix.net
|
||||
- https://lg.bcix.de/
|
||||
- https://lg.megaport.com/
|
||||
- https://lg.netnod.se/
|
||||
- https://alice-rs.linx.net/
|
||||
- https://lg.ix.br/
|
||||
- https://lg.ix.asn.au/
|
||||
- https://lg.ix.nz/
|
||||
|
||||
And checkout the API at:
|
||||
- https://lg.ecix.net/api/config
|
||||
- https://lg.ecix.net/api/routeservers
|
||||
- https://lg.ecix.net/api/routeservers/0/status
|
||||
- https://lg.ecix.net/api/routeservers/0/neighbours
|
||||
- https://lg.ecix.net/api/routeservers/0/neighbours/ID109_AS31078/routes
|
||||
- https://lg.ecix.net/api/lookup/prefix?q=217.115.0.0
|
||||
- https://lg.de-cix.net/api/v1/config
|
||||
- https://lg.de-cix.net/api/v1/routeservers
|
||||
- https://lg.de-cix.net/api/v1/routeservers/rs1_fra_ipv4/status
|
||||
- https://lg.de-cix.net/api/v1/routeservers/rs1_fra_ipv4/neighbors
|
||||
- https://lg.de-cix.net/api/v1/routeservers/rs1_fra_ipv4/neighbors/R194_106/routes
|
||||
- https://lg.de-cix.net/api/v1/lookup/prefix?q=217.115.0.0
|
||||
|
||||
|
||||
## Breaking Changes
|
||||
|
||||
### 6.0.0
|
||||
|
||||
With the new functional react UI, the `DOMContentLoaded` event can no
|
||||
longer be used for injecting additional content. Please use
|
||||
`Alice.onLayoutReady(function(main) { ... });` instead.
|
||||
|
||||
### 5.1.0
|
||||
|
||||
The spelling of "neighbors" is now harmonized. Please update your config and
|
||||
replace e.g. neighbour.asn with neighbor.asn (in case of java script errors).
|
||||
|
||||
## Explanations
|
||||
Alice-LG is a BGP looking glass which gets its data from external APIs.
|
||||
|
||||
Currently Alice-LG supports the following APIs:
|
||||
- [birdwatcher API](https://github.com/alice-lg/birdwatcher) for [BIRD](http://bird.network.cz/)
|
||||
- [GoBGP](https://osrg.github.io/gobgp/)
|
||||
- [bgplgd](https://man.openbsd.org/bgplgd) or [`openbgpd-state-server`](https://github.com/alice-lg/openbgpd-state-server) for [OpenBGP](https://www.openbgpd.org/)
|
||||
|
||||
### Birdwatcher
|
||||
Normally you would first install the [birdwatcher API](https://github.com/alice-lg/birdwatcher) directly on the machine(s) where you run [BIRD](http://bird.network.cz/) on
|
||||
and then install Alice-LG on a seperate public facing server and point her to the afore mentioned [birdwatcher API](https://github.com/alice-lg/birdwatcher).
|
||||
|
||||
@ -28,12 +52,19 @@ just prior to [RIPE73](https://ripe73.ripe.net/) in Madrid, Spain.
|
||||
|
||||
Major thanks to Barry O'Donovan who built the original [INEX Bird's Eye](https://github.com/inex/birdseye) BIRD API of which Alice-LG is a spinnoff
|
||||
|
||||
### GoBGP
|
||||
Alice-LG supports direct integration with GoBGP instances using gRPC.
|
||||
See the configuration section for more detail.
|
||||
|
||||
### OpenBGPD
|
||||
|
||||
Alice-LG supports OpenBGP via [`bgplgd`](https://man.openbsd.org/bgplgd)
|
||||
and [`openbgpd-state-server`](https://github.com/alice-lg/openbgpd-state-server).
|
||||
|
||||
## Building Alice-LG from scratch
|
||||
__These examples include setting up your Go environment, if you already have set that up then you can obviously skip that__
|
||||
|
||||
In case you have trouble with `npm` and `gulp` you can try using `yarn`.
|
||||
|
||||
### CentOS 7:
|
||||
### CentOS:
|
||||
First add the following lines at the end of your `~/.bash_profile`:
|
||||
```bash
|
||||
GOPATH=$HOME/go
|
||||
@ -47,10 +78,8 @@ source ~/.bash_profile
|
||||
|
||||
# Install frontend build dependencies
|
||||
sudo yum install golang npm
|
||||
sudo npm install --global gulp-cli
|
||||
sudo npm install --global yarn
|
||||
|
||||
go get github.com/GeertJohan/go.rice
|
||||
go get github.com/GeertJohan/go.rice/rice
|
||||
mkdir -p ~/go/bin ~/go/pkg ~/go/src/github.com/alice-lg/
|
||||
|
||||
cd ~/go/src/github.com/alice-lg
|
||||
@ -64,7 +93,7 @@ Your Alice-LG source will now be located at `~/go/src/github.com/alice-lg/alice-
|
||||
## Configuration
|
||||
|
||||
An example configuration can be found at
|
||||
[etc/alice-lg/alice.example.conf](https://github.com/alice-lg/alice-lg/blob/readme_update/etc/alice-lg/alice.example.conf).
|
||||
[etc/alice-lg/alice.example.conf](https://github.com/alice-lg/alice-lg/blob/master/etc/alice-lg/alice.example.conf).
|
||||
|
||||
You can copy it to any of the following locations:
|
||||
|
||||
@ -73,13 +102,15 @@ You can copy it to any of the following locations:
|
||||
/etc/alice-lg/alice.conf # global
|
||||
|
||||
|
||||
You will have to edit the configuration file as you need to point Alice-LG to the correct [APIs](https://github.com/alice-lg/birdwatcher):
|
||||
You will have to edit the configuration file as you need to point Alice-LG to the correct backend source. Multiple sources can be configured.
|
||||
|
||||
[Birdwatcher](https://github.com/alice-lg/birdwatcher):
|
||||
```ini
|
||||
[source.rs1-example-v4]
|
||||
name = rs1.example.com (IPv4)
|
||||
[source.rs1-example-v4.birdwatcher]
|
||||
api = http://rs1.example.com:29184/
|
||||
neighbors_refresh_timeout = 2
|
||||
# show_last_reboot = true
|
||||
# timezone = UTC
|
||||
# type = single_table / multi_table
|
||||
@ -94,6 +125,52 @@ name = rs1.example.com (IPv6)
|
||||
api = http://rs1.example.com:29186/
|
||||
```
|
||||
|
||||
[GoBGP](https://osrg.github.io/gobgp/):
|
||||
```ini
|
||||
[source.rs2-example]
|
||||
name = rs2.example.com
|
||||
group = AMS
|
||||
|
||||
[source.rs2-example.gobgp]
|
||||
# Host is the IP (or DNS name) and port for the remote GoBGP daemon
|
||||
host = rs2.example.com:50051
|
||||
# ProcessingTimeout is a timeout in seconds configured per gRPC call to a given GoBGP daemon
|
||||
processing_timeout = 300
|
||||
```
|
||||
Configure TLS with:
|
||||
```ini
|
||||
tls_crt = /path/to/cert
|
||||
tls_common_name = "common name"
|
||||
```
|
||||
|
||||
You can disable TLS with `insecure = true`.
|
||||
|
||||
[OpenBGPD](https://www.openbgpd.org/) via `openbgpd-state-server`:
|
||||
```ini
|
||||
[source.rs-example]
|
||||
name = rs-example.openbgpd-state-server
|
||||
|
||||
[source.rs-example.openbgpd-state-server]
|
||||
api = http://rs23.example.net:29111/api
|
||||
|
||||
# Optional response cache time in seconds
|
||||
# Default: disabled (0)
|
||||
cache_ttl = 100
|
||||
```
|
||||
|
||||
[OpenBGPD](https://www.openbgpd.org/) via `bgplgd`:
|
||||
```ini
|
||||
[source.rs-example]
|
||||
name = rs-example.openbgpd-bgplgd
|
||||
|
||||
[source.rs-example.openbgpd-bgplgd]
|
||||
api = http://rs23.example.net/bgplgd
|
||||
|
||||
# Optional response cache time in seconds
|
||||
# Default: disabled (0)
|
||||
cache_ttl = 100
|
||||
```
|
||||
|
||||
## Running
|
||||
|
||||
Launch the server by running
|
||||
@ -101,27 +178,6 @@ Launch the server by running
|
||||
./bin/alice-lg-linux-amd64
|
||||
|
||||
|
||||
## Deployment
|
||||
|
||||
We added a `Makefile` for packaging Alice as an RPM using [fpm](https://github.com/jordansissel/fpm).
|
||||
|
||||
If you have all tools available locally, you can just type:
|
||||
|
||||
make rpm
|
||||
|
||||
If you want to build the package on a remote machine, just use
|
||||
|
||||
make remote_rpm BUILD_SERVER=my-rpm-building-server.example.com
|
||||
|
||||
which will copy the dist to the remote server and executes fpm via ssh.
|
||||
|
||||
You can specify which system integration to use:
|
||||
Set the `SYSTEM_INIT` variable to `upstart` or `systemd` (default)
|
||||
prior to building the RPM.
|
||||
|
||||
make remote_rpm BUILD_SERVER=rpmbuild.example.com SYSTEM_INIT=upstart
|
||||
|
||||
|
||||
## Customization
|
||||
|
||||
Alice now supports custom themes!
|
||||
@ -154,46 +210,43 @@ Alice.updateContent({
|
||||
|
||||
```
|
||||
|
||||
A callback for running custom javascript after the base application
|
||||
was initialized can be installed using:
|
||||
|
||||
```javascript
|
||||
Alice.onLayoutReady(function(page) {
|
||||
// page is the layout HTML root element
|
||||
});
|
||||
```
|
||||
|
||||
For an example check out: https://github.com/alice-lg/alice-theme-example
|
||||
|
||||
## Hacking
|
||||
|
||||
The client is a Single Page React Application.
|
||||
All sources are available in `client/`.
|
||||
|
||||
Install build tools as needed:
|
||||
|
||||
npm install -g gulp-cli
|
||||
All sources are available in `ui/`.
|
||||
|
||||
`Yarn` is required for building the UI.
|
||||
|
||||
Create a fresh UI build with
|
||||
```bash
|
||||
cd client/
|
||||
make client
|
||||
cd ui/
|
||||
make
|
||||
```
|
||||
|
||||
This will install all dependencies and run `gulp`.
|
||||
This will install all dependencies with `yarn install` and run `yarn build`.
|
||||
|
||||
While working on the UI you might want to use `make watch`,
|
||||
which will keep the `gulp watch` task up and running.
|
||||
As this is a `create-react-app` application, react-scripts are present
|
||||
and you can just run a development server using `yarn start`.
|
||||
|
||||
### Docker
|
||||
For convenience we added a `Dockerfile` for building the frontend / client.
|
||||
All this available as a containerized environment:
|
||||
|
||||
Create a fresh UI build using docker with
|
||||
```bash
|
||||
cd client/
|
||||
Running `docker-compose up` in the `./dev` will build and start the
|
||||
backend and run a webpack dev server for the UI.
|
||||
|
||||
# Dev build:
|
||||
make -f Makefile.docker client
|
||||
The UI is then available on http://localhost:3000/ and on http://localhost:7340/
|
||||
the backend will serve the API.
|
||||
|
||||
# Production build:
|
||||
make -f Makefile.docker client_prod
|
||||
```
|
||||
You can use gulp with docker for watching the files while developing aswell:
|
||||
```bash
|
||||
make -f Makefile.docker watch
|
||||
```
|
||||
|
||||
## Sponsors
|
||||
|
||||
@ -205,3 +258,6 @@ The development of Alice is now sponsored by
|
||||
</p>
|
||||
|
||||
Many thanks go out to [ECIX](https://www.ecix.net), where this project originated and was backed over the last two years.
|
||||
|
||||
Support for **OpenBGPD** was sponsored by the [Route Server Support Foundation](https://www.rssf.nl/).
|
||||
|
||||
|
7
backend/.gitignore
vendored
7
backend/.gitignore
vendored
@ -1,7 +0,0 @@
|
||||
|
||||
# Ignore static build in repo
|
||||
rice-box.go
|
||||
|
||||
# Ignore builds
|
||||
alice-lg-*
|
||||
|
@ -1,14 +0,0 @@
|
||||
|
||||
FROM golang:1.10
|
||||
|
||||
# Add project (for prefetching dependencies)
|
||||
ADD . /go/src/github.com/alice-lg/alice-lg/backend
|
||||
|
||||
RUN cd /go/src/github.com/alice-lg/alice-lg/backend && go get -v .
|
||||
|
||||
RUN go get github.com/GeertJohan/go.rice/rice
|
||||
RUN go install github.com/GeertJohan/go.rice/rice
|
||||
|
||||
WORKDIR /go/src/github.com/alice-lg/alice-lg
|
||||
VOLUME ["/go/src/github.com/alice-lg/alice-lg"]
|
||||
|
@ -1,64 +0,0 @@
|
||||
#
|
||||
# Alice LG Backend
|
||||
# ----------------
|
||||
#
|
||||
|
||||
PROG=alice-lg
|
||||
ARCH=amd64
|
||||
|
||||
APP_VERSION=$(shell cat ../VERSION)
|
||||
VERSION=$(APP_VERSION)_$(shell git rev-parse --short HEAD)
|
||||
|
||||
LOCAL_RPMS=RPMS
|
||||
|
||||
# OS Detection
|
||||
UNAME=$(shell uname)
|
||||
ifeq ($(UNAME), Darwin)
|
||||
TARGET=osx
|
||||
else
|
||||
TARGET=linux
|
||||
endif
|
||||
|
||||
|
||||
LDFLAGS=-ldflags="-X main.version=$(APP_VERSION)"
|
||||
FILES=$(shell find . -depth 1 ! -name "*_test.go" -name "*.go")
|
||||
|
||||
all: $(TARGET)
|
||||
@echo "Built $(VERSION) @ $(TARGET)"
|
||||
|
||||
deps:
|
||||
GO111MODULE=on go get -v .
|
||||
|
||||
osx-dev: deps
|
||||
GO111MODULE=on go run $(FILES)
|
||||
|
||||
osx: deps
|
||||
GO111MODULE=on GOARCH=$(ARCH) GOOS=darwin go build $(LDFLAGS) -o $(PROG)-osx-$(ARCH)
|
||||
|
||||
linux: deps
|
||||
GO111MODULE=on GOARCH=$(ARCH) GOOS=linux go build $(LDFLAGS) -o $(PROG)-linux-$(ARCH)
|
||||
|
||||
bundle:
|
||||
rice embed-go
|
||||
|
||||
test:
|
||||
GO111MODULE=on go test -v
|
||||
cd api/ && GO111MODULE=on go test -v
|
||||
cd caches/ && GO111MODULE=on go test -v
|
||||
cd sources/birdwatcher && GO111MODULE=on go test -v
|
||||
|
||||
|
||||
dev: clean all
|
||||
|
||||
prod: clean bundle $(TARGET)
|
||||
@echo "Build $(VERSION) [production] @ $(TARGET)"
|
||||
|
||||
|
||||
clean:
|
||||
rm -f rice-box.go
|
||||
rm -f $(PROG)-osx-$(ARCH)
|
||||
rm -f $(PROG)-linux-$(ARCH)
|
||||
|
||||
coverage:
|
||||
go test -coverprofile=coverage.out
|
||||
go tool cover -func=coverage.out
|
@ -1,28 +0,0 @@
|
||||
|
||||
DOCKER_IMAGE := alice-lg-golang:latest
|
||||
DOCKER_EXEC := docker run --rm -t -i \
|
||||
--user 1000 \
|
||||
-v `pwd`/../:/go/src/github.com/alice-lg/alice-lg \
|
||||
alice-lg-golang:latest /bin/sh -c
|
||||
|
||||
|
||||
image:
|
||||
docker build . -t alice-lg-golang:latest
|
||||
|
||||
|
||||
# Somehow rice.box does not work when the binary was built
|
||||
# using docker. However it does work when used for embedding.
|
||||
# So, making a production build is doable, an dev build however
|
||||
# is not.
|
||||
#
|
||||
|
||||
# dev:
|
||||
# $(DOCKER_EXEC) "cd backend && make dev"
|
||||
|
||||
prod: image
|
||||
$(DOCKER_EXEC) "cd backend && make prod"
|
||||
|
||||
|
||||
|
||||
all: prod
|
||||
|
@ -1,256 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
)
|
||||
|
||||
// General api response
|
||||
type Response interface{}
|
||||
|
||||
// Details, usually the original backend response
|
||||
type Details map[string]interface{}
|
||||
|
||||
// Error Handling
|
||||
type ErrorResponse struct {
|
||||
Message string `json:"message"`
|
||||
Code int `json:"code"`
|
||||
Tag string `json:"tag"`
|
||||
RouteserverId string `json:"routeserver_id"`
|
||||
}
|
||||
|
||||
// Cache aware api response
|
||||
type CacheableResponse interface {
|
||||
CacheTtl() time.Duration
|
||||
}
|
||||
|
||||
// Config
|
||||
type ConfigResponse struct {
|
||||
Asn int `json:"asn"`
|
||||
|
||||
RejectReasons map[string]interface{} `json:"reject_reasons"`
|
||||
|
||||
Noexport Noexport `json:"noexport"`
|
||||
NoexportReasons map[string]interface{} `json:"noexport_reasons"`
|
||||
|
||||
RejectCandidates RejectCandidates `json:"reject_candidates"`
|
||||
|
||||
Rpki Rpki `json:"rpki"`
|
||||
|
||||
BgpCommunities map[string]interface{} `json:"bgp_communities"`
|
||||
|
||||
NeighboursColumns map[string]string `json:"neighbours_columns"`
|
||||
NeighboursColumnsOrder []string `json:"neighbours_columns_order"`
|
||||
|
||||
RoutesColumns map[string]string `json:"routes_columns"`
|
||||
RoutesColumnsOrder []string `json:"routes_columns_order"`
|
||||
|
||||
LookupColumns map[string]string `json:"lookup_columns"`
|
||||
LookupColumnsOrder []string `json:"lookup_columns_order"`
|
||||
|
||||
PrefixLookupEnabled bool `json:"prefix_lookup_enabled"`
|
||||
}
|
||||
|
||||
type Noexport struct {
|
||||
LoadOnDemand bool `json:"load_on_demand"`
|
||||
}
|
||||
|
||||
type RejectCandidates struct {
|
||||
Communities map[string]interface{} `json:"communities"`
|
||||
}
|
||||
|
||||
type Rpki struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
Valid []string `json:"valid"`
|
||||
Unknown []string `json:"unknown"`
|
||||
NotChecked []string `json:"not_checked"`
|
||||
Invalid []string `json:"invalid"`
|
||||
}
|
||||
|
||||
// Status
|
||||
type ApiStatus struct {
|
||||
Version string `json:"version"`
|
||||
CacheStatus CacheStatus `json:"cache_status"`
|
||||
ResultFromCache bool `json:"result_from_cache"`
|
||||
Ttl time.Time `json:"ttl"`
|
||||
}
|
||||
|
||||
type CacheStatus struct {
|
||||
CachedAt time.Time `json:"cached_at"`
|
||||
OrigTtl int `json:"orig_ttl"`
|
||||
}
|
||||
|
||||
type Status struct {
|
||||
ServerTime time.Time `json:"server_time"`
|
||||
LastReboot time.Time `json:"last_reboot"`
|
||||
LastReconfig time.Time `json:"last_reconfig"`
|
||||
Message string `json:"message"`
|
||||
RouterId string `json:"router_id"`
|
||||
Version string `json:"version"`
|
||||
Backend string `json:"backend"`
|
||||
}
|
||||
|
||||
type StatusResponse struct {
|
||||
Api ApiStatus `json:"api"`
|
||||
Status Status `json:"status"`
|
||||
}
|
||||
|
||||
// Routeservers
|
||||
type Routeserver struct {
|
||||
Id string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Group string `json:"group"`
|
||||
Blackholes []string `json:"blackholes"`
|
||||
|
||||
Order int `json:"-"`
|
||||
}
|
||||
|
||||
type Routeservers []Routeserver
|
||||
|
||||
// Implement sorting interface for routeservers
|
||||
func (rs Routeservers) Len() int {
|
||||
return len(rs)
|
||||
}
|
||||
|
||||
func (rs Routeservers) Less(i, j int) bool {
|
||||
return rs[i].Order < rs[j].Order
|
||||
}
|
||||
|
||||
func (rs Routeservers) Swap(i, j int) {
|
||||
rs[i], rs[j] = rs[j], rs[i]
|
||||
}
|
||||
|
||||
type RouteserversResponse struct {
|
||||
Routeservers []Routeserver `json:"routeservers"`
|
||||
}
|
||||
|
||||
// BGP
|
||||
type Community []int
|
||||
|
||||
func (com Community) String() string {
|
||||
res := ""
|
||||
for _, v := range com {
|
||||
res += fmt.Sprintf(":%d", v)
|
||||
}
|
||||
return res[1:]
|
||||
}
|
||||
|
||||
type Communities []Community
|
||||
|
||||
/*
|
||||
Deduplicate communities
|
||||
*/
|
||||
func (communities Communities) Unique() Communities {
|
||||
seen := map[string]bool{}
|
||||
result := make(Communities, 0, len(communities))
|
||||
|
||||
for _, com := range communities {
|
||||
key := com.String()
|
||||
if _, ok := seen[key]; !ok {
|
||||
// We have not seen this community yet
|
||||
result = append(result, com)
|
||||
seen[key] = true
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
type ExtCommunity []interface{}
|
||||
|
||||
func (com ExtCommunity) String() string {
|
||||
res := ""
|
||||
for _, v := range com {
|
||||
res += fmt.Sprintf(":%v", v)
|
||||
}
|
||||
return res[1:]
|
||||
}
|
||||
|
||||
type ExtCommunities []ExtCommunity
|
||||
|
||||
func (communities ExtCommunities) Unique() ExtCommunities {
|
||||
seen := map[string]bool{}
|
||||
result := make(ExtCommunities, 0, len(communities))
|
||||
|
||||
for _, com := range communities {
|
||||
key := com.String()
|
||||
if _, ok := seen[key]; !ok {
|
||||
// We have not seen this community yet
|
||||
result = append(result, com)
|
||||
seen[key] = true
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
type BgpInfo struct {
|
||||
Origin string `json:"origin"`
|
||||
AsPath []int `json:"as_path"`
|
||||
NextHop string `json:"next_hop"`
|
||||
Communities Communities `json:"communities"`
|
||||
LargeCommunities Communities `json:"large_communities"`
|
||||
ExtCommunities ExtCommunities `json:"ext_communities"`
|
||||
LocalPref int `json:"local_pref"`
|
||||
Med int `json:"med"`
|
||||
}
|
||||
|
||||
func (bgp BgpInfo) HasCommunity(community Community) bool {
|
||||
if len(community) != 2 {
|
||||
return false // This can never match.
|
||||
}
|
||||
|
||||
for _, com := range bgp.Communities {
|
||||
if len(com) != len(community) {
|
||||
continue // This can't match.
|
||||
}
|
||||
|
||||
if com[0] == community[0] &&
|
||||
com[1] == community[1] {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (bgp BgpInfo) HasExtCommunity(community ExtCommunity) bool {
|
||||
if len(community) != 3 {
|
||||
return false // This can never match.
|
||||
}
|
||||
|
||||
for _, com := range bgp.ExtCommunities {
|
||||
if len(com) != len(community) {
|
||||
continue // This can't match.
|
||||
}
|
||||
|
||||
if com[0] == community[0] &&
|
||||
com[1] == community[1] &&
|
||||
com[2] == community[2] {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (bgp BgpInfo) HasLargeCommunity(community Community) bool {
|
||||
// TODO: This is an almost 1:1 match to the function above.
|
||||
if len(community) != 3 {
|
||||
return false // This can never match.
|
||||
}
|
||||
|
||||
for _, com := range bgp.LargeCommunities {
|
||||
if len(com) != len(community) {
|
||||
continue // This can't match.
|
||||
}
|
||||
|
||||
if com[0] == community[0] &&
|
||||
com[1] == community[1] &&
|
||||
com[2] == community[2] {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
@ -1,81 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"time"
|
||||
)
|
||||
|
||||
// Neighbours
|
||||
type Neighbours []*Neighbour
|
||||
|
||||
type Neighbour struct {
|
||||
Id string `json:"id"`
|
||||
|
||||
// Mandatory fields
|
||||
Address string `json:"address"`
|
||||
Asn int `json:"asn"`
|
||||
State string `json:"state"`
|
||||
Description string `json:"description"`
|
||||
RoutesReceived int `json:"routes_received"`
|
||||
RoutesFiltered int `json:"routes_filtered"`
|
||||
RoutesExported int `json:"routes_exported"`
|
||||
RoutesPreferred int `json:"routes_preferred"`
|
||||
RoutesAccepted int `json:"routes_accepted"`
|
||||
Uptime time.Duration `json:"uptime"`
|
||||
LastError string `json:"last_error"`
|
||||
|
||||
// Original response
|
||||
Details map[string]interface{} `json:"details"`
|
||||
}
|
||||
|
||||
// Implement sorting interface for routes
|
||||
func (neighbours Neighbours) Len() int {
|
||||
return len(neighbours)
|
||||
}
|
||||
|
||||
func (neighbours Neighbours) Less(i, j int) bool {
|
||||
return neighbours[i].Asn < neighbours[j].Asn
|
||||
}
|
||||
|
||||
func (neighbours Neighbours) Swap(i, j int) {
|
||||
neighbours[i], neighbours[j] = neighbours[j], neighbours[i]
|
||||
}
|
||||
|
||||
type NeighboursResponse struct {
|
||||
Api ApiStatus `json:"api"`
|
||||
Neighbours Neighbours `json:"neighbours"`
|
||||
}
|
||||
|
||||
// Neighbours response is cacheable
|
||||
func (self *NeighboursResponse) CacheTtl() time.Duration {
|
||||
now := time.Now().UTC()
|
||||
return self.Api.Ttl.Sub(now)
|
||||
}
|
||||
|
||||
type NeighboursLookupResults map[string]Neighbours
|
||||
|
||||
|
||||
type NeighboursStatus []*NeighbourStatus
|
||||
|
||||
type NeighbourStatus struct {
|
||||
Id string `json:"id"`
|
||||
State string `json:"state"`
|
||||
Since time.Duration `json:"uptime"`
|
||||
}
|
||||
|
||||
// Implement sorting interface for status
|
||||
func (neighbours NeighboursStatus) Len() int {
|
||||
return len(neighbours)
|
||||
}
|
||||
|
||||
func (neighbours NeighboursStatus) Less(i, j int) bool {
|
||||
return neighbours[i].Id < neighbours[j].Id
|
||||
}
|
||||
|
||||
func (neighbours NeighboursStatus) Swap(i, j int) {
|
||||
neighbours[i], neighbours[j] = neighbours[j], neighbours[i]
|
||||
}
|
||||
|
||||
type NeighboursStatusResponse struct {
|
||||
Api ApiStatus `json:"api"`
|
||||
Neighbours NeighboursStatus `json:"neighbours"`
|
||||
}
|
@ -1,191 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"time"
|
||||
)
|
||||
|
||||
// Prefixes
|
||||
type Route struct {
|
||||
Id string `json:"id"`
|
||||
NeighbourId string `json:"neighbour_id"`
|
||||
|
||||
Network string `json:"network"`
|
||||
Interface string `json:"interface"`
|
||||
Gateway string `json:"gateway"`
|
||||
Metric int `json:"metric"`
|
||||
Bgp BgpInfo `json:"bgp"`
|
||||
Age time.Duration `json:"age"`
|
||||
Type []string `json:"type"` // [BGP, unicast, univ]
|
||||
Primary bool `json:"primary"`
|
||||
|
||||
Details Details `json:"details"`
|
||||
}
|
||||
|
||||
// Implement Filterable interface for routes
|
||||
func (self *Route) MatchSourceId(id string) bool {
|
||||
return true // A route has no source info so we exclude this filter
|
||||
}
|
||||
|
||||
func (self *Route) MatchAsn(asn int) bool {
|
||||
return true // Same here
|
||||
}
|
||||
|
||||
// Only community filters are interesting at this point:
|
||||
func (self *Route) MatchCommunity(community Community) bool {
|
||||
return self.Bgp.HasCommunity(community)
|
||||
}
|
||||
|
||||
func (self *Route) MatchExtCommunity(community ExtCommunity) bool {
|
||||
return self.Bgp.HasExtCommunity(community)
|
||||
}
|
||||
|
||||
func (self *Route) MatchLargeCommunity(community Community) bool {
|
||||
return self.Bgp.HasLargeCommunity(community)
|
||||
}
|
||||
|
||||
type Routes []*Route
|
||||
|
||||
// Implement sorting interface for routes
|
||||
func (routes Routes) Len() int {
|
||||
return len(routes)
|
||||
}
|
||||
|
||||
func (routes Routes) Less(i, j int) bool {
|
||||
return routes[i].Network < routes[j].Network
|
||||
}
|
||||
|
||||
func (routes Routes) Swap(i, j int) {
|
||||
routes[i], routes[j] = routes[j], routes[i]
|
||||
}
|
||||
|
||||
type RoutesResponse struct {
|
||||
Api ApiStatus `json:"api"`
|
||||
Imported Routes `json:"imported"`
|
||||
Filtered Routes `json:"filtered"`
|
||||
NotExported Routes `json:"not_exported"`
|
||||
}
|
||||
|
||||
func (self *RoutesResponse) CacheTtl() time.Duration {
|
||||
now := time.Now().UTC()
|
||||
return self.Api.Ttl.Sub(now)
|
||||
}
|
||||
|
||||
type TimedResponse struct {
|
||||
RequestDuration float64 `json:"request_duration_ms"`
|
||||
}
|
||||
|
||||
type Pagination struct {
|
||||
Page int `json:"page"`
|
||||
PageSize int `json:"page_size"`
|
||||
TotalPages int `json:"total_pages"`
|
||||
TotalResults int `json:"total_results"`
|
||||
}
|
||||
|
||||
type PaginatedResponse struct {
|
||||
Pagination Pagination `json:"pagination"`
|
||||
}
|
||||
|
||||
type FilterableResponse struct {
|
||||
FiltersAvailable *SearchFilters `json:"filters_available"`
|
||||
FiltersApplied *SearchFilters `json:"filters_applied"`
|
||||
}
|
||||
|
||||
type PaginatedRoutesResponse struct {
|
||||
*RoutesResponse
|
||||
TimedResponse
|
||||
FilterableResponse
|
||||
Pagination Pagination `json:"pagination"`
|
||||
}
|
||||
|
||||
// Lookup Prefixes
|
||||
type LookupRoute struct {
|
||||
Id string `json:"id"`
|
||||
NeighbourId string `json:"neighbour_id"`
|
||||
Neighbour *Neighbour `json:"neighbour"`
|
||||
|
||||
State string `json:"state"` // Filtered, Imported, ...
|
||||
|
||||
Routeserver Routeserver `json:"routeserver"`
|
||||
|
||||
Network string `json:"network"`
|
||||
Interface string `json:"interface"`
|
||||
Gateway string `json:"gateway"`
|
||||
Metric int `json:"metric"`
|
||||
Bgp BgpInfo `json:"bgp"`
|
||||
Age time.Duration `json:"age"`
|
||||
Type []string `json:"type"` // [BGP, unicast, univ]
|
||||
Primary bool `json:"primary"`
|
||||
|
||||
Details Details `json:"details"`
|
||||
}
|
||||
|
||||
// Implement Filterable interface for lookup routes
|
||||
func (self *LookupRoute) MatchSourceId(id string) bool {
|
||||
return self.Routeserver.Id == id
|
||||
}
|
||||
|
||||
func (self *LookupRoute) MatchAsn(asn int) bool {
|
||||
return self.Neighbour.Asn == asn
|
||||
}
|
||||
|
||||
// Only community filters are interesting at this point:
|
||||
func (self *LookupRoute) MatchCommunity(community Community) bool {
|
||||
return self.Bgp.HasCommunity(community)
|
||||
}
|
||||
|
||||
func (self *LookupRoute) MatchExtCommunity(community ExtCommunity) bool {
|
||||
return self.Bgp.HasExtCommunity(community)
|
||||
}
|
||||
|
||||
func (self *LookupRoute) MatchLargeCommunity(community Community) bool {
|
||||
return self.Bgp.HasLargeCommunity(community)
|
||||
}
|
||||
|
||||
// Implement sorting interface for lookup routes
|
||||
func (routes LookupRoutes) Len() int {
|
||||
return len(routes)
|
||||
}
|
||||
|
||||
func (routes LookupRoutes) Less(i, j int) bool {
|
||||
return routes[i].Network < routes[j].Network
|
||||
}
|
||||
|
||||
func (routes LookupRoutes) Swap(i, j int) {
|
||||
routes[i], routes[j] = routes[j], routes[i]
|
||||
}
|
||||
|
||||
type LookupRoutes []*LookupRoute
|
||||
|
||||
// TODO: Naming is a bit yuck
|
||||
type LookupRoutesResponse struct {
|
||||
*PaginatedResponse
|
||||
Routes LookupRoutes `json:"routes"`
|
||||
}
|
||||
|
||||
// TODO: Refactor this (might be legacy)
|
||||
type RoutesLookupResponse struct {
|
||||
Api ApiStatus `json:"api"`
|
||||
Routes LookupRoutes `json:"routes"`
|
||||
}
|
||||
|
||||
type RoutesLookupResponseGlobal struct {
|
||||
Routes LookupRoutes `json:"routes"`
|
||||
|
||||
// Pagination
|
||||
TotalRoutes int `json:"total_routes"`
|
||||
Limit int `json:"limit"`
|
||||
Offset int `json:"offset"`
|
||||
|
||||
// Meta
|
||||
Time float64 `json:"query_duration_ms"`
|
||||
}
|
||||
|
||||
type PaginatedRoutesLookupResponse struct {
|
||||
TimedResponse
|
||||
FilterableResponse
|
||||
|
||||
Api ApiStatus `json:"api"` // Add to provide cache status information
|
||||
|
||||
Imported *LookupRoutesResponse `json:"imported"`
|
||||
Filtered *LookupRoutesResponse `json:"filtered"`
|
||||
}
|
@ -1,543 +0,0 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
const (
|
||||
SEARCH_KEY_SOURCES = "sources"
|
||||
SEARCH_KEY_ASNS = "asns"
|
||||
SEARCH_KEY_COMMUNITIES = "communities"
|
||||
SEARCH_KEY_EXT_COMMUNITIES = "ext_communities"
|
||||
SEARCH_KEY_LARGE_COMMUNITIES = "large_communities"
|
||||
)
|
||||
|
||||
/*
|
||||
API Search
|
||||
|
||||
* Helper methods for searching
|
||||
* Handle filter criteria
|
||||
|
||||
*/
|
||||
type Filterable interface {
|
||||
MatchSourceId(sourceId string) bool
|
||||
MatchAsn(asn int) bool
|
||||
MatchCommunity(community Community) bool
|
||||
MatchExtCommunity(community ExtCommunity) bool
|
||||
MatchLargeCommunity(community Community) bool
|
||||
}
|
||||
|
||||
type FilterValue interface{}
|
||||
|
||||
type SearchFilter struct {
|
||||
Cardinality int `json:"cardinality"`
|
||||
Name string `json:"name"`
|
||||
Value FilterValue `json:"value"`
|
||||
}
|
||||
|
||||
type SearchFilterCmpFunc func(a FilterValue, b FilterValue) bool
|
||||
|
||||
func searchFilterCmpInt(a FilterValue, b FilterValue) bool {
|
||||
return a.(int) == b.(int)
|
||||
}
|
||||
|
||||
func searchFilterCmpString(a FilterValue, b FilterValue) bool {
|
||||
return a.(string) == b.(string)
|
||||
}
|
||||
|
||||
func searchFilterCmpCommunity(a FilterValue, b FilterValue) bool {
|
||||
ca := a.(Community)
|
||||
cb := b.(Community)
|
||||
|
||||
if len(ca) != len(cb) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Compare components
|
||||
for i, _ := range ca {
|
||||
if ca[i] != cb[i] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func searchFilterCmpExtCommunity(a FilterValue, b FilterValue) bool {
|
||||
ca := a.(ExtCommunity)
|
||||
cb := b.(ExtCommunity)
|
||||
|
||||
if len(ca) != len(cb) || len(ca) != 3 || len(cb) != 3 {
|
||||
return false
|
||||
}
|
||||
|
||||
return ca[0] == cb[0] && ca[1] == cb[1] && ca[2] == cb[2]
|
||||
}
|
||||
|
||||
func (self *SearchFilter) Equal(other *SearchFilter) bool {
|
||||
var cmp SearchFilterCmpFunc
|
||||
switch other.Value.(type) {
|
||||
case Community:
|
||||
cmp = searchFilterCmpCommunity
|
||||
break
|
||||
case ExtCommunity:
|
||||
cmp = searchFilterCmpExtCommunity
|
||||
break
|
||||
case int:
|
||||
cmp = searchFilterCmpInt
|
||||
break
|
||||
case string:
|
||||
cmp = searchFilterCmpString
|
||||
break
|
||||
}
|
||||
|
||||
if cmp == nil {
|
||||
log.Println("Unknown search filter value type")
|
||||
return false
|
||||
}
|
||||
|
||||
return cmp(self.Value, other.Value)
|
||||
}
|
||||
|
||||
/*
|
||||
Search Filter Groups
|
||||
*/
|
||||
|
||||
type SearchFilterGroup struct {
|
||||
Key string `json:"key"`
|
||||
|
||||
Filters []*SearchFilter `json:"filters"`
|
||||
filtersIdx map[string]int
|
||||
}
|
||||
|
||||
func (self *SearchFilterGroup) FindFilter(filter *SearchFilter) *SearchFilter {
|
||||
for _, f := range self.Filters {
|
||||
if f.Equal(filter) == true {
|
||||
return f
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (self *SearchFilterGroup) Contains(filter *SearchFilter) bool {
|
||||
return self.FindFilter(filter) != nil
|
||||
}
|
||||
|
||||
func (self *SearchFilterGroup) GetFilterByValue(value interface{}) *SearchFilter {
|
||||
// I've tried it with .(fmt.Stringer), but int does not implement this...
|
||||
// So whatever. I'm using the trick of letting Sprintf choose the right
|
||||
// conversion. If this is too expensive, we need to refactor this.
|
||||
// TODO: profile this.
|
||||
idx, ok := self.filtersIdx[fmt.Sprintf("%v", value)]
|
||||
if !ok {
|
||||
return nil // We don't have this particular filter
|
||||
}
|
||||
|
||||
return self.Filters[idx]
|
||||
}
|
||||
|
||||
func (self *SearchFilterGroup) AddFilter(filter *SearchFilter) {
|
||||
// Check if a filter with this value is present, if not:
|
||||
// append and update index; otherwise incrementc cardinality
|
||||
if presentFilter := self.GetFilterByValue(filter.Value); presentFilter != nil {
|
||||
presentFilter.Cardinality++
|
||||
return
|
||||
}
|
||||
|
||||
// Insert filter
|
||||
idx := len(self.Filters)
|
||||
filter.Cardinality = 1
|
||||
self.Filters = append(self.Filters, filter)
|
||||
self.filtersIdx[fmt.Sprintf("%v", filter.Value)] = idx
|
||||
}
|
||||
|
||||
func (self *SearchFilterGroup) AddFilters(filters []*SearchFilter) {
|
||||
for _, filter := range filters {
|
||||
self.AddFilter(filter)
|
||||
}
|
||||
}
|
||||
|
||||
func (self *SearchFilterGroup) rebuildIndex() {
|
||||
self.filtersIdx = map[string]int{}
|
||||
|
||||
for i, filter := range self.Filters {
|
||||
self.filtersIdx[fmt.Sprintf("%v", filter.Value)] = i
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Search comparators
|
||||
*/
|
||||
type SearchFilterComparator func(route Filterable, value interface{}) bool
|
||||
|
||||
func searchFilterMatchSource(route Filterable, value interface{}) bool {
|
||||
sourceId, ok := value.(string)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
return route.MatchSourceId(sourceId)
|
||||
}
|
||||
|
||||
func searchFilterMatchAsn(route Filterable, value interface{}) bool {
|
||||
asn, ok := value.(int)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
return route.MatchAsn(asn)
|
||||
}
|
||||
|
||||
func searchFilterMatchCommunity(route Filterable, value interface{}) bool {
|
||||
community, ok := value.(Community)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
return route.MatchCommunity(community)
|
||||
}
|
||||
|
||||
func searchFilterMatchExtCommunity(route Filterable, value interface{}) bool {
|
||||
community, ok := value.(ExtCommunity)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
return route.MatchExtCommunity(community)
|
||||
}
|
||||
|
||||
func searchFilterMatchLargeCommunity(route Filterable, value interface{}) bool {
|
||||
community, ok := value.(Community)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
return route.MatchLargeCommunity(community)
|
||||
}
|
||||
|
||||
func selectCmpFuncByKey(key string) SearchFilterComparator {
|
||||
var cmp SearchFilterComparator
|
||||
switch key {
|
||||
case SEARCH_KEY_SOURCES:
|
||||
cmp = searchFilterMatchSource
|
||||
break
|
||||
case SEARCH_KEY_ASNS:
|
||||
cmp = searchFilterMatchAsn
|
||||
break
|
||||
case SEARCH_KEY_COMMUNITIES:
|
||||
cmp = searchFilterMatchCommunity
|
||||
break
|
||||
case SEARCH_KEY_EXT_COMMUNITIES:
|
||||
cmp = searchFilterMatchExtCommunity
|
||||
break
|
||||
case SEARCH_KEY_LARGE_COMMUNITIES:
|
||||
cmp = searchFilterMatchLargeCommunity
|
||||
break
|
||||
default:
|
||||
cmp = nil
|
||||
}
|
||||
|
||||
return cmp
|
||||
}
|
||||
|
||||
func (self *SearchFilterGroup) MatchAny(route Filterable) bool {
|
||||
// Check if we have any filter to match
|
||||
if len(self.Filters) == 0 {
|
||||
return true // no filter, everything matches
|
||||
}
|
||||
|
||||
// Get comparator
|
||||
cmp := selectCmpFuncByKey(self.Key)
|
||||
if cmp == nil {
|
||||
return false // This should not have happened!
|
||||
}
|
||||
|
||||
// Check if any of the given filters matches
|
||||
for _, filter := range self.Filters {
|
||||
if cmp(route, filter.Value) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (self *SearchFilterGroup) MatchAll(route Filterable) bool {
|
||||
// Check if we have any filter to match
|
||||
if len(self.Filters) == 0 {
|
||||
return true // no filter, everything matches. Like above.
|
||||
}
|
||||
|
||||
// Get comparator
|
||||
cmp := selectCmpFuncByKey(self.Key)
|
||||
if cmp == nil {
|
||||
return false // This again should not have happened!
|
||||
}
|
||||
|
||||
// Assert that all filters match.
|
||||
for _, filter := range self.Filters {
|
||||
if !cmp(route, filter.Value) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// Everythings fine.
|
||||
return true
|
||||
}
|
||||
|
||||
type SearchFilters []*SearchFilterGroup
|
||||
|
||||
func NewSearchFilters() *SearchFilters {
|
||||
// Define groups: CAVEAT! the order is relevant
|
||||
groups := &SearchFilters{
|
||||
&SearchFilterGroup{
|
||||
Key: SEARCH_KEY_SOURCES,
|
||||
Filters: []*SearchFilter{},
|
||||
filtersIdx: make(map[string]int),
|
||||
},
|
||||
&SearchFilterGroup{
|
||||
Key: SEARCH_KEY_ASNS,
|
||||
Filters: []*SearchFilter{},
|
||||
filtersIdx: make(map[string]int),
|
||||
},
|
||||
&SearchFilterGroup{
|
||||
Key: SEARCH_KEY_COMMUNITIES,
|
||||
Filters: []*SearchFilter{},
|
||||
filtersIdx: make(map[string]int),
|
||||
},
|
||||
&SearchFilterGroup{
|
||||
Key: SEARCH_KEY_EXT_COMMUNITIES,
|
||||
Filters: []*SearchFilter{},
|
||||
filtersIdx: make(map[string]int),
|
||||
},
|
||||
&SearchFilterGroup{
|
||||
Key: SEARCH_KEY_LARGE_COMMUNITIES,
|
||||
Filters: []*SearchFilter{},
|
||||
filtersIdx: make(map[string]int),
|
||||
},
|
||||
}
|
||||
|
||||
return groups
|
||||
}
|
||||
|
||||
func (self *SearchFilters) GetGroupByKey(key string) *SearchFilterGroup {
|
||||
// This is an optimization (this is basically a fixed hash map,
|
||||
// with hash(key) = position(key)
|
||||
switch key {
|
||||
case SEARCH_KEY_SOURCES:
|
||||
return (*self)[0]
|
||||
case SEARCH_KEY_ASNS:
|
||||
return (*self)[1]
|
||||
case SEARCH_KEY_COMMUNITIES:
|
||||
return (*self)[2]
|
||||
case SEARCH_KEY_EXT_COMMUNITIES:
|
||||
return (*self)[3]
|
||||
case SEARCH_KEY_LARGE_COMMUNITIES:
|
||||
return (*self)[4]
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
/*
|
||||
Update filter struct to include route:
|
||||
- Extract ASN, source, bgp communites,
|
||||
- Find Filter in group, increment result count if required.
|
||||
*/
|
||||
func (self *SearchFilters) UpdateFromLookupRoute(route *LookupRoute) {
|
||||
// Add source
|
||||
self.GetGroupByKey(SEARCH_KEY_SOURCES).AddFilter(&SearchFilter{
|
||||
Name: route.Routeserver.Name,
|
||||
Value: route.Routeserver.Id,
|
||||
})
|
||||
|
||||
// Add ASN from neighbor
|
||||
self.GetGroupByKey(SEARCH_KEY_ASNS).AddFilter(&SearchFilter{
|
||||
Name: route.Neighbour.Description,
|
||||
Value: route.Neighbour.Asn,
|
||||
})
|
||||
|
||||
// Add communities
|
||||
communities := self.GetGroupByKey(SEARCH_KEY_COMMUNITIES)
|
||||
for _, c := range route.Bgp.Communities.Unique() {
|
||||
communities.AddFilter(&SearchFilter{
|
||||
Name: c.String(),
|
||||
Value: c,
|
||||
})
|
||||
}
|
||||
extCommunities := self.GetGroupByKey(SEARCH_KEY_EXT_COMMUNITIES)
|
||||
for _, c := range route.Bgp.ExtCommunities.Unique() {
|
||||
extCommunities.AddFilter(&SearchFilter{
|
||||
Name: c.String(),
|
||||
Value: c,
|
||||
})
|
||||
}
|
||||
largeCommunities := self.GetGroupByKey(SEARCH_KEY_LARGE_COMMUNITIES)
|
||||
for _, c := range route.Bgp.LargeCommunities.Unique() {
|
||||
largeCommunities.AddFilter(&SearchFilter{
|
||||
Name: c.String(),
|
||||
Value: c,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// This is the same as above, but only the communities
|
||||
// are considered.
|
||||
func (self *SearchFilters) UpdateFromRoute(route *Route) {
|
||||
|
||||
// Add communities
|
||||
communities := self.GetGroupByKey(SEARCH_KEY_COMMUNITIES)
|
||||
for _, c := range route.Bgp.Communities.Unique() {
|
||||
communities.AddFilter(&SearchFilter{
|
||||
Name: c.String(),
|
||||
Value: c,
|
||||
})
|
||||
}
|
||||
extCommunities := self.GetGroupByKey(SEARCH_KEY_EXT_COMMUNITIES)
|
||||
for _, c := range route.Bgp.ExtCommunities.Unique() {
|
||||
extCommunities.AddFilter(&SearchFilter{
|
||||
Name: c.String(),
|
||||
Value: c,
|
||||
})
|
||||
}
|
||||
largeCommunities := self.GetGroupByKey(SEARCH_KEY_LARGE_COMMUNITIES)
|
||||
for _, c := range route.Bgp.LargeCommunities.Unique() {
|
||||
largeCommunities.AddFilter(&SearchFilter{
|
||||
Name: c.String(),
|
||||
Value: c,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Build filter struct from query params:
|
||||
For example a query string of:
|
||||
asns=2342,23123&communities=23:42&large_communities=23:42:42
|
||||
yields a filtering struct of
|
||||
Groups[
|
||||
Group{"sources", []},
|
||||
Group{"asns", [Filter{Value: 2342},
|
||||
Filter{Value: 23123}]},
|
||||
Group{"communities", ...
|
||||
}
|
||||
|
||||
*/
|
||||
func FiltersFromQuery(query url.Values) (*SearchFilters, error) {
|
||||
queryFilters := NewSearchFilters()
|
||||
for key, _ := range query {
|
||||
value := query.Get(key)
|
||||
switch key {
|
||||
case SEARCH_KEY_SOURCES:
|
||||
filters, err := parseQueryValueList(parseStringValue, value)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
queryFilters.GetGroupByKey(SEARCH_KEY_SOURCES).AddFilters(filters)
|
||||
break
|
||||
|
||||
case SEARCH_KEY_ASNS:
|
||||
filters, err := parseQueryValueList(parseIntValue, value)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
queryFilters.GetGroupByKey(SEARCH_KEY_ASNS).AddFilters(filters)
|
||||
break
|
||||
|
||||
case SEARCH_KEY_COMMUNITIES:
|
||||
filters, err := parseQueryValueList(parseCommunityValue, value)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
queryFilters.GetGroupByKey(SEARCH_KEY_COMMUNITIES).AddFilters(filters)
|
||||
break
|
||||
|
||||
case SEARCH_KEY_EXT_COMMUNITIES:
|
||||
filters, err := parseQueryValueList(parseExtCommunityValue, value)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
queryFilters.GetGroupByKey(SEARCH_KEY_EXT_COMMUNITIES).AddFilters(filters)
|
||||
break
|
||||
|
||||
case SEARCH_KEY_LARGE_COMMUNITIES:
|
||||
filters, err := parseQueryValueList(parseCommunityValue, value)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
queryFilters.GetGroupByKey(SEARCH_KEY_LARGE_COMMUNITIES).AddFilters(filters)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return queryFilters, nil
|
||||
}
|
||||
|
||||
/*
|
||||
Match a route. Check if route matches all filters.
|
||||
Unless all filters are blank.
|
||||
*/
|
||||
func (self *SearchFilters) MatchRoute(route Filterable) bool {
|
||||
sources := self.GetGroupByKey(SEARCH_KEY_SOURCES)
|
||||
if !sources.MatchAny(route) {
|
||||
return false
|
||||
}
|
||||
|
||||
asns := self.GetGroupByKey(SEARCH_KEY_ASNS)
|
||||
if !asns.MatchAny(route) {
|
||||
return false
|
||||
}
|
||||
|
||||
communities := self.GetGroupByKey(SEARCH_KEY_COMMUNITIES)
|
||||
if !communities.MatchAll(route) {
|
||||
return false
|
||||
}
|
||||
|
||||
extCommunities := self.GetGroupByKey(SEARCH_KEY_EXT_COMMUNITIES)
|
||||
if !extCommunities.MatchAll(route) {
|
||||
return false
|
||||
}
|
||||
|
||||
largeCommunities := self.GetGroupByKey(SEARCH_KEY_LARGE_COMMUNITIES)
|
||||
if !largeCommunities.MatchAll(route) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (self *SearchFilters) Sub(other *SearchFilters) *SearchFilters {
|
||||
result := make(SearchFilters, len(*self))
|
||||
|
||||
for id, group := range *self {
|
||||
otherGroup := (*other)[id]
|
||||
diff := &SearchFilterGroup{
|
||||
Key: group.Key,
|
||||
Filters: []*SearchFilter{},
|
||||
}
|
||||
|
||||
// Combine filters
|
||||
for _, f := range group.Filters {
|
||||
if otherGroup.Contains(f) {
|
||||
continue // Let's skip this
|
||||
}
|
||||
diff.Filters = append(diff.Filters, f)
|
||||
}
|
||||
|
||||
diff.rebuildIndex()
|
||||
result[id] = diff
|
||||
}
|
||||
|
||||
return &result
|
||||
}
|
||||
|
||||
func (self *SearchFilters) MergeProperties(other *SearchFilters) {
|
||||
for id, group := range *self {
|
||||
otherGroup := (*other)[id]
|
||||
for _, filter := range group.Filters {
|
||||
otherFilter := otherGroup.FindFilter(filter)
|
||||
if otherFilter == nil {
|
||||
// Filter not present on other side, ignore this.
|
||||
continue
|
||||
}
|
||||
filter.Name = otherFilter.Name
|
||||
filter.Cardinality = otherFilter.Cardinality
|
||||
}
|
||||
}
|
||||
}
|
@ -1,60 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/alice-lg/alice-lg/backend/api"
|
||||
"github.com/julienschmidt/httprouter"
|
||||
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// Handle Status Endpoint, this is intended for
|
||||
// monitoring and service health checks
|
||||
func apiStatusShow(_req *http.Request, _params httprouter.Params) (api.Response, error) {
|
||||
status, err := NewAppStatus()
|
||||
return status, err
|
||||
}
|
||||
|
||||
// Handle status
|
||||
func apiStatus(_req *http.Request, params httprouter.Params) (api.Response, error) {
|
||||
rsId, err := validateSourceId(params.ByName("id"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
source := AliceConfig.SourceInstanceById(rsId)
|
||||
if source == nil {
|
||||
return nil, SOURCE_NOT_FOUND_ERROR
|
||||
}
|
||||
|
||||
result, err := source.Status()
|
||||
if err != nil {
|
||||
apiLogSourceError("status", rsId, err)
|
||||
}
|
||||
|
||||
return result, err
|
||||
}
|
||||
|
||||
// Handle Config Endpoint
|
||||
func apiConfigShow(_req *http.Request, _params httprouter.Params) (api.Response, error) {
|
||||
result := api.ConfigResponse{
|
||||
Asn: AliceConfig.Server.Asn,
|
||||
BgpCommunities: AliceConfig.Ui.BgpCommunities,
|
||||
RejectReasons: AliceConfig.Ui.RoutesRejections.Reasons,
|
||||
Noexport: api.Noexport{
|
||||
LoadOnDemand: AliceConfig.Ui.RoutesNoexports.LoadOnDemand,
|
||||
},
|
||||
NoexportReasons: AliceConfig.Ui.RoutesNoexports.Reasons,
|
||||
RejectCandidates: api.RejectCandidates{
|
||||
Communities: AliceConfig.Ui.RoutesRejectCandidates.Communities,
|
||||
},
|
||||
Rpki: api.Rpki(AliceConfig.Ui.Rpki),
|
||||
RoutesColumns: AliceConfig.Ui.RoutesColumns,
|
||||
RoutesColumnsOrder: AliceConfig.Ui.RoutesColumnsOrder,
|
||||
NeighboursColumns: AliceConfig.Ui.NeighboursColumns,
|
||||
NeighboursColumnsOrder: AliceConfig.Ui.NeighboursColumnsOrder,
|
||||
LookupColumns: AliceConfig.Ui.LookupColumns,
|
||||
LookupColumnsOrder: AliceConfig.Ui.LookupColumnsOrder,
|
||||
PrefixLookupEnabled: AliceConfig.Server.EnablePrefixLookup,
|
||||
}
|
||||
return result, nil
|
||||
}
|
@ -1,56 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/alice-lg/alice-lg/backend/api"
|
||||
"github.com/julienschmidt/httprouter"
|
||||
|
||||
"net/http"
|
||||
"sort"
|
||||
)
|
||||
|
||||
// Handle get neighbors on routeserver
|
||||
func apiNeighborsList(_req *http.Request, params httprouter.Params) (api.Response, error) {
|
||||
rsId, err := validateSourceId(params.ByName("id"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var neighborsResponse *api.NeighboursResponse
|
||||
|
||||
// Try to fetch neighbors from store, only fall back
|
||||
// to RS query if store is not ready yet
|
||||
sourceStatus := AliceNeighboursStore.SourceStatus(rsId)
|
||||
if sourceStatus.State == STATE_READY {
|
||||
neighbors := AliceNeighboursStore.GetNeighborsAt(rsId)
|
||||
// Make response
|
||||
neighborsResponse = &api.NeighboursResponse{
|
||||
Api: api.ApiStatus{
|
||||
Version: version,
|
||||
CacheStatus: api.CacheStatus{
|
||||
OrigTtl: 0,
|
||||
CachedAt: sourceStatus.LastRefresh,
|
||||
},
|
||||
ResultFromCache: true, // you bet!
|
||||
Ttl: sourceStatus.LastRefresh.Add(
|
||||
AliceNeighboursStore.refreshInterval),
|
||||
},
|
||||
Neighbours: neighbors,
|
||||
}
|
||||
} else {
|
||||
source := AliceConfig.SourceInstanceById(rsId)
|
||||
if source == nil {
|
||||
return nil, SOURCE_NOT_FOUND_ERROR
|
||||
}
|
||||
|
||||
neighborsResponse, err = source.Neighbours()
|
||||
if err != nil {
|
||||
apiLogSourceError("neighbors", rsId, err)
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Sort result
|
||||
sort.Sort(&neighborsResponse.Neighbours)
|
||||
|
||||
return neighborsResponse, nil
|
||||
}
|
@ -1,36 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/alice-lg/alice-lg/backend/api"
|
||||
"github.com/julienschmidt/httprouter"
|
||||
|
||||
"net/http"
|
||||
"sort"
|
||||
)
|
||||
|
||||
// Handle Routeservers List
|
||||
func apiRouteserversList(_req *http.Request, _params httprouter.Params) (api.Response, error) {
|
||||
// Get list of sources from config,
|
||||
routeservers := api.Routeservers{}
|
||||
|
||||
sources := AliceConfig.Sources
|
||||
for _, source := range sources {
|
||||
routeservers = append(routeservers, api.Routeserver{
|
||||
Id: source.Id,
|
||||
Name: source.Name,
|
||||
Group: source.Group,
|
||||
Blackholes: source.Blackholes,
|
||||
Order: source.Order,
|
||||
})
|
||||
}
|
||||
|
||||
// Assert routeserver ordering
|
||||
sort.Sort(routeservers)
|
||||
|
||||
// Make routeservers response
|
||||
response := api.RouteserversResponse{
|
||||
Routeservers: routeservers,
|
||||
}
|
||||
|
||||
return response, nil
|
||||
}
|
@ -1,136 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/alice-lg/alice-lg/backend/api"
|
||||
"github.com/julienschmidt/httprouter"
|
||||
|
||||
"net/http"
|
||||
"sort"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Handle global lookup
|
||||
func apiLookupPrefixGlobal(
|
||||
req *http.Request,
|
||||
params httprouter.Params,
|
||||
) (api.Response, error) {
|
||||
// TODO: This function is too long
|
||||
|
||||
// Get prefix to query
|
||||
q, err := validateQueryString(req, "q")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
q, err = validatePrefixQuery(q)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Check what we want to query
|
||||
// Prefix -> fetch prefix
|
||||
// _ -> fetch neighbours and routes
|
||||
lookupPrefix := MaybePrefix(q)
|
||||
|
||||
// Measure response time
|
||||
t0 := time.Now()
|
||||
|
||||
// Get additional filter criteria
|
||||
filtersApplied, err := api.FiltersFromQuery(req.URL.Query())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Perform query
|
||||
var routes api.LookupRoutes
|
||||
if lookupPrefix {
|
||||
routes = AliceRoutesStore.LookupPrefix(q)
|
||||
|
||||
} else {
|
||||
neighbours := AliceNeighboursStore.LookupNeighbours(q)
|
||||
routes = AliceRoutesStore.LookupPrefixForNeighbours(neighbours)
|
||||
}
|
||||
|
||||
// Split routes
|
||||
// TODO: Refactor at neighbors store
|
||||
totalResults := len(routes)
|
||||
imported := make(api.LookupRoutes, 0, totalResults)
|
||||
filtered := make(api.LookupRoutes, 0, totalResults)
|
||||
|
||||
// Now, as we have allocated even more space process routes by, splitting,
|
||||
// filtering and updating the available filters...
|
||||
filtersAvailable := api.NewSearchFilters()
|
||||
for _, r := range routes {
|
||||
|
||||
if !filtersApplied.MatchRoute(r) {
|
||||
continue // Exclude route from results set
|
||||
}
|
||||
|
||||
switch r.State {
|
||||
case "filtered":
|
||||
filtered = append(filtered, r)
|
||||
break
|
||||
case "imported":
|
||||
imported = append(imported, r)
|
||||
break
|
||||
}
|
||||
|
||||
filtersAvailable.UpdateFromLookupRoute(r)
|
||||
}
|
||||
|
||||
// Remove applied filters from available
|
||||
filtersApplied.MergeProperties(filtersAvailable)
|
||||
filtersAvailable = filtersAvailable.Sub(filtersApplied)
|
||||
|
||||
// Homogenize results
|
||||
sort.Sort(imported)
|
||||
sort.Sort(filtered)
|
||||
|
||||
// Paginate results
|
||||
pageImported := apiQueryMustInt(req, "page_imported", 0)
|
||||
pageSizeImported := AliceConfig.Ui.Pagination.RoutesAcceptedPageSize
|
||||
routesImported, paginationImported := apiPaginateLookupRoutes(
|
||||
imported, pageImported, pageSizeImported,
|
||||
)
|
||||
|
||||
pageFiltered := apiQueryMustInt(req, "page_filtered", 0)
|
||||
pageSizeFiltered := AliceConfig.Ui.Pagination.RoutesFilteredPageSize
|
||||
routesFiltered, paginationFiltered := apiPaginateLookupRoutes(
|
||||
filtered, pageFiltered, pageSizeFiltered,
|
||||
)
|
||||
|
||||
// Calculate query duration
|
||||
queryDuration := time.Since(t0)
|
||||
|
||||
// Make response
|
||||
response := api.PaginatedRoutesLookupResponse{
|
||||
Api: api.ApiStatus{
|
||||
CacheStatus: api.CacheStatus{
|
||||
CachedAt: AliceRoutesStore.CachedAt(),
|
||||
},
|
||||
ResultFromCache: true, // Well.
|
||||
Ttl: AliceRoutesStore.CacheTtl(),
|
||||
},
|
||||
TimedResponse: api.TimedResponse{
|
||||
RequestDuration: DurationMs(queryDuration),
|
||||
},
|
||||
Imported: &api.LookupRoutesResponse{
|
||||
Routes: routesImported,
|
||||
PaginatedResponse: &api.PaginatedResponse{
|
||||
Pagination: paginationImported,
|
||||
},
|
||||
},
|
||||
Filtered: &api.LookupRoutesResponse{
|
||||
Routes: routesFiltered,
|
||||
PaginatedResponse: &api.PaginatedResponse{
|
||||
Pagination: paginationFiltered,
|
||||
},
|
||||
},
|
||||
FilterableResponse: api.FilterableResponse{
|
||||
FiltersAvailable: filtersAvailable,
|
||||
FiltersApplied: filtersApplied,
|
||||
},
|
||||
}
|
||||
|
||||
return response, nil
|
||||
}
|
@ -1,72 +0,0 @@
|
||||
package main
|
||||
|
||||
// Improve error handling
|
||||
// Create api.ErrorResponses based on errors returned from server.
|
||||
// Strip out potentially sensitive information, eg. connection errors
|
||||
// to internal IP addresses.
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/alice-lg/alice-lg/backend/api"
|
||||
)
|
||||
|
||||
type ResourceNotFoundError struct{}
|
||||
|
||||
func (self *ResourceNotFoundError) Error() string {
|
||||
return "resource not found"
|
||||
}
|
||||
|
||||
var SOURCE_NOT_FOUND_ERROR = &ResourceNotFoundError{}
|
||||
|
||||
const (
|
||||
GENERIC_ERROR_TAG = "GENERIC_ERROR"
|
||||
CONNECTION_REFUSED_TAG = "CONNECTION_REFUSED"
|
||||
CONNECTION_TIMEOUT_TAG = "CONNECTION_TIMEOUT"
|
||||
RESOURCE_NOT_FOUND_TAG = "NOT_FOUND"
|
||||
)
|
||||
|
||||
const (
|
||||
GENERIC_ERROR_CODE = 42
|
||||
CONNECTION_REFUSED_CODE = 100
|
||||
CONNECTION_TIMEOUT_CODE = 101
|
||||
RESOURCE_NOT_FOUND_CODE = 404
|
||||
)
|
||||
|
||||
const (
|
||||
ERROR_STATUS = http.StatusInternalServerError
|
||||
RESOURCE_NOT_FOUND_STATUS = http.StatusNotFound
|
||||
)
|
||||
|
||||
func apiErrorResponse(routeserverId string, err error) (api.ErrorResponse, int) {
|
||||
code := GENERIC_ERROR_CODE
|
||||
message := err.Error()
|
||||
tag := GENERIC_ERROR_TAG
|
||||
status := ERROR_STATUS
|
||||
|
||||
switch e := err.(type) {
|
||||
case *ResourceNotFoundError:
|
||||
tag = RESOURCE_NOT_FOUND_TAG
|
||||
code = RESOURCE_NOT_FOUND_CODE
|
||||
status = RESOURCE_NOT_FOUND_STATUS
|
||||
case *url.Error:
|
||||
if strings.Contains(message, "connection refused") {
|
||||
tag = CONNECTION_REFUSED_TAG
|
||||
code = CONNECTION_REFUSED_CODE
|
||||
message = "Connection refused while dialing the API"
|
||||
} else if e.Timeout() {
|
||||
tag = CONNECTION_TIMEOUT_TAG
|
||||
code = CONNECTION_TIMEOUT_CODE
|
||||
message = "Connection timed out when connecting to the backend API"
|
||||
}
|
||||
}
|
||||
|
||||
return api.ErrorResponse{
|
||||
Code: code,
|
||||
Tag: tag,
|
||||
Message: message,
|
||||
RouteserverId: routeserverId,
|
||||
}, status
|
||||
}
|
@ -1,25 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestApiLogSourceError(t *testing.T) {
|
||||
err := fmt.Errorf("an unexpected error occured")
|
||||
|
||||
conf := &Config{
|
||||
Sources: []*SourceConfig{
|
||||
&SourceConfig{
|
||||
Id: "rs1v4",
|
||||
Name: "rs1.example.net (IPv4)",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
AliceConfig = conf
|
||||
|
||||
apiLogSourceError("foo.bar", "rs1v4", 23, "Test")
|
||||
apiLogSourceError("foo.bam", "rs1v4", err)
|
||||
apiLogSourceError("foo.baz", "rs1v4", 23, 42, "foo", err)
|
||||
}
|
@ -1,81 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// Helper: Validate source Id
|
||||
func validateSourceId(id string) (string, error) {
|
||||
if len(id) > 42 {
|
||||
return "unknown", fmt.Errorf("Source ID too long with length: %d", len(id))
|
||||
}
|
||||
return id, nil
|
||||
}
|
||||
|
||||
// Helper: Validate query string
|
||||
func validateQueryString(req *http.Request, key string) (string, error) {
|
||||
query := req.URL.Query()
|
||||
values, ok := query[key]
|
||||
if !ok {
|
||||
return "", fmt.Errorf("Query param %s is missing.", key)
|
||||
}
|
||||
|
||||
if len(values) != 1 {
|
||||
return "", fmt.Errorf("Query param %s is ambigous.", key)
|
||||
}
|
||||
|
||||
value := values[0]
|
||||
if value == "" {
|
||||
return "", fmt.Errorf("Query param %s may not be empty.", key)
|
||||
}
|
||||
|
||||
return value, nil
|
||||
}
|
||||
|
||||
// Helper: Validate prefix query
|
||||
func validatePrefixQuery(value string) (string, error) {
|
||||
|
||||
// We should at least provide 2 chars
|
||||
if len(value) < 2 {
|
||||
return "", fmt.Errorf("Query too short")
|
||||
}
|
||||
|
||||
// Query constraints: Should at least include a dot or colon
|
||||
/* let's try without this :)
|
||||
|
||||
if strings.Index(value, ".") == -1 &&
|
||||
strings.Index(value, ":") == -1 {
|
||||
return "", fmt.Errorf("Query needs at least a ':' or '.'")
|
||||
}
|
||||
*/
|
||||
|
||||
return value, nil
|
||||
}
|
||||
|
||||
// Get pagination parameters: limit and offset
|
||||
// Refer to defaults if none are given.
|
||||
func validatePaginationParams(req *http.Request, limit, offset int) (int, int, error) {
|
||||
query := req.URL.Query()
|
||||
queryLimit, ok := query["limit"]
|
||||
if ok {
|
||||
limit, _ = strconv.Atoi(queryLimit[0])
|
||||
}
|
||||
|
||||
queryOffset, ok := query["offset"]
|
||||
if ok {
|
||||
offset, _ = strconv.Atoi(queryOffset[0])
|
||||
}
|
||||
|
||||
// Cap limit to [1, 1000]
|
||||
if limit < 1 {
|
||||
limit = 1
|
||||
}
|
||||
if limit > 500 {
|
||||
limit = 500
|
||||
}
|
||||
|
||||
return limit, offset, nil
|
||||
}
|
@ -1,53 +0,0 @@
|
||||
package caches
|
||||
|
||||
import (
|
||||
"github.com/alice-lg/alice-lg/backend/api"
|
||||
)
|
||||
|
||||
/*
|
||||
The birdwatcher already caches the responses from
|
||||
bird and provides the API consumers with information
|
||||
on how long the information is valid.
|
||||
|
||||
However, to avoid unnecessary network requests to the
|
||||
birdwatcher, we keep a local cache. (This comes in handy
|
||||
when we are paginating the results for better client performance.)
|
||||
*/
|
||||
|
||||
type NeighborsCache struct {
|
||||
response *api.NeighboursResponse
|
||||
disabled bool
|
||||
}
|
||||
|
||||
func NewNeighborsCache(disabled bool) *NeighborsCache {
|
||||
cache := &NeighborsCache{
|
||||
response: nil,
|
||||
disabled: disabled,
|
||||
}
|
||||
|
||||
return cache
|
||||
}
|
||||
|
||||
func (self *NeighborsCache) Get() *api.NeighboursResponse {
|
||||
if self.disabled {
|
||||
return nil
|
||||
}
|
||||
|
||||
if self.response == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if self.response.CacheTtl() < 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
return self.response
|
||||
}
|
||||
|
||||
func (self *NeighborsCache) Set(response *api.NeighboursResponse) {
|
||||
if self.disabled {
|
||||
return
|
||||
}
|
||||
|
||||
self.response = response
|
||||
}
|
@ -1,94 +0,0 @@
|
||||
package caches
|
||||
|
||||
import (
|
||||
"github.com/alice-lg/alice-lg/backend/api"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
/*
|
||||
Routes Cache:
|
||||
Keep a kv map with neighborId <-> api.RoutesResponse
|
||||
TTL is derived from the api.RoutesResponse.
|
||||
|
||||
To avoid memory issues, we only keep N responses (MRU) (per RS).
|
||||
*/
|
||||
type RoutesCache struct {
|
||||
responses map[string]*api.RoutesResponse
|
||||
accessedAt LRUMap
|
||||
|
||||
disabled bool
|
||||
size int
|
||||
|
||||
sync.Mutex
|
||||
}
|
||||
|
||||
func NewRoutesCache(disabled bool, size int) *RoutesCache {
|
||||
cache := &RoutesCache{
|
||||
responses: make(map[string]*api.RoutesResponse),
|
||||
accessedAt: make(map[string]time.Time),
|
||||
disabled: disabled,
|
||||
size: size,
|
||||
}
|
||||
|
||||
return cache
|
||||
}
|
||||
|
||||
func (self *RoutesCache) Get(neighborId string) *api.RoutesResponse {
|
||||
if self.disabled {
|
||||
return nil
|
||||
}
|
||||
|
||||
self.Lock()
|
||||
defer self.Unlock()
|
||||
|
||||
response, ok := self.responses[neighborId]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
if response.CacheTtl() < 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
self.accessedAt[neighborId] = time.Now()
|
||||
|
||||
return response
|
||||
}
|
||||
|
||||
func (self *RoutesCache) Set(neighborId string, response *api.RoutesResponse) {
|
||||
if self.disabled {
|
||||
return
|
||||
}
|
||||
|
||||
self.Lock()
|
||||
defer self.Unlock()
|
||||
|
||||
if len(self.responses) > self.size {
|
||||
// delete LRU
|
||||
lru := self.accessedAt.LRU()
|
||||
delete(self.accessedAt, lru)
|
||||
delete(self.responses, lru)
|
||||
}
|
||||
|
||||
self.accessedAt[neighborId] = time.Now()
|
||||
self.responses[neighborId] = response
|
||||
}
|
||||
|
||||
func (self *RoutesCache) Expire() int {
|
||||
self.Lock()
|
||||
defer self.Unlock()
|
||||
|
||||
expiredKeys := []string{}
|
||||
for key, response := range self.responses {
|
||||
if response.CacheTtl() < 0 {
|
||||
expiredKeys = append(expiredKeys, key)
|
||||
}
|
||||
}
|
||||
|
||||
for _, key := range expiredKeys {
|
||||
delete(self.responses, key)
|
||||
}
|
||||
|
||||
return len(expiredKeys)
|
||||
}
|
@ -1,753 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/alice-lg/alice-lg/backend/sources"
|
||||
"github.com/alice-lg/alice-lg/backend/sources/birdwatcher"
|
||||
"github.com/alice-lg/alice-lg/backend/sources/gobgp"
|
||||
|
||||
"github.com/go-ini/ini"
|
||||
)
|
||||
|
||||
const SOURCE_UNKNOWN = 0
|
||||
const SOURCE_BIRDWATCHER = 1
|
||||
const SOURCE_GOBGP = 2
|
||||
|
||||
type ServerConfig struct {
|
||||
Listen string `ini:"listen_http"`
|
||||
EnablePrefixLookup bool `ini:"enable_prefix_lookup"`
|
||||
NeighboursStoreRefreshInterval int `ini:"neighbours_store_refresh_interval"`
|
||||
RoutesStoreRefreshInterval int `ini:"routes_store_refresh_interval"`
|
||||
Asn int `ini:"asn"`
|
||||
EnableNeighborsStatusRefresh bool `ini:"enable_neighbors_status_refresh"`
|
||||
}
|
||||
|
||||
type HousekeepingConfig struct {
|
||||
Interval int `ini:"interval"`
|
||||
ForceReleaseMemory bool `ini:"force_release_memory"`
|
||||
}
|
||||
|
||||
type RejectionsConfig struct {
|
||||
Reasons BgpCommunities
|
||||
}
|
||||
|
||||
type NoexportsConfig struct {
|
||||
Reasons BgpCommunities
|
||||
LoadOnDemand bool `ini:"load_on_demand"`
|
||||
}
|
||||
|
||||
type RejectCandidatesConfig struct {
|
||||
Communities BgpCommunities
|
||||
}
|
||||
|
||||
type RpkiConfig struct {
|
||||
// Define communities
|
||||
Enabled bool `ini:"enabled"`
|
||||
Valid []string `ini:"valid"`
|
||||
Unknown []string `ini:"unknown"`
|
||||
NotChecked []string `ini:"not_checked"`
|
||||
Invalid []string `ini:"invalid"`
|
||||
}
|
||||
|
||||
type UiConfig struct {
|
||||
RoutesColumns map[string]string
|
||||
RoutesColumnsOrder []string
|
||||
|
||||
NeighboursColumns map[string]string
|
||||
NeighboursColumnsOrder []string
|
||||
|
||||
LookupColumns map[string]string
|
||||
LookupColumnsOrder []string
|
||||
|
||||
RoutesRejections RejectionsConfig
|
||||
RoutesNoexports NoexportsConfig
|
||||
RoutesRejectCandidates RejectCandidatesConfig
|
||||
|
||||
BgpCommunities BgpCommunities
|
||||
Rpki RpkiConfig
|
||||
|
||||
Theme ThemeConfig
|
||||
|
||||
Pagination PaginationConfig
|
||||
}
|
||||
|
||||
type ThemeConfig struct {
|
||||
Path string `ini:"path"`
|
||||
BasePath string `ini:"url_base"` // Optional, default: /theme
|
||||
}
|
||||
|
||||
type PaginationConfig struct {
|
||||
RoutesFilteredPageSize int `ini:"routes_filtered_page_size"`
|
||||
RoutesAcceptedPageSize int `ini:"routes_accepted_page_size"`
|
||||
RoutesNotExportedPageSize int `ini:"routes_not_exported_page_size"`
|
||||
}
|
||||
|
||||
type SourceConfig struct {
|
||||
Id string
|
||||
Order int
|
||||
Name string
|
||||
Group string
|
||||
|
||||
// Blackhole IPs
|
||||
Blackholes []string
|
||||
|
||||
// Source configurations
|
||||
Type int
|
||||
Birdwatcher birdwatcher.Config
|
||||
GoBGP gobgp.Config
|
||||
|
||||
// Source instance
|
||||
instance sources.Source
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
Server ServerConfig
|
||||
Housekeeping HousekeepingConfig
|
||||
Ui UiConfig
|
||||
Sources []*SourceConfig
|
||||
File string
|
||||
}
|
||||
|
||||
// Get source by id
|
||||
func (self *Config) SourceById(sourceId string) *SourceConfig {
|
||||
for _, sourceConfig := range self.Sources {
|
||||
if sourceConfig.Id == sourceId {
|
||||
return sourceConfig
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Get instance by id
|
||||
func (self *Config) SourceInstanceById(sourceId string) sources.Source {
|
||||
sourceConfig := self.SourceById(sourceId)
|
||||
if sourceConfig == nil {
|
||||
return nil // Nothing to do here.
|
||||
}
|
||||
|
||||
// Get instance from config
|
||||
return sourceConfig.getInstance()
|
||||
}
|
||||
|
||||
// Get sources keys form ini
|
||||
func getSourcesKeys(config *ini.File) []string {
|
||||
sources := []string{}
|
||||
sections := config.SectionStrings()
|
||||
for _, section := range sections {
|
||||
if strings.HasPrefix(section, "source") {
|
||||
sources = append(sources, section)
|
||||
}
|
||||
}
|
||||
return sources
|
||||
}
|
||||
|
||||
func isSourceBase(section *ini.Section) bool {
|
||||
return len(strings.Split(section.Name(), ".")) == 2
|
||||
}
|
||||
|
||||
// Get backend configuration type
|
||||
func getBackendType(section *ini.Section) int {
|
||||
name := section.Name()
|
||||
if strings.HasSuffix(name, "birdwatcher") {
|
||||
return SOURCE_BIRDWATCHER
|
||||
} else if strings.HasSuffix(name, "gobgp") {
|
||||
return SOURCE_GOBGP
|
||||
}
|
||||
|
||||
return SOURCE_UNKNOWN
|
||||
}
|
||||
|
||||
// Get UI config: Routes Columns Default
|
||||
func getRoutesColumnsDefaults() (map[string]string, []string, error) {
|
||||
columns := map[string]string{
|
||||
"network": "Network",
|
||||
"bgp.as_path": "AS Path",
|
||||
"gateway": "Gateway",
|
||||
"interface": "Interface",
|
||||
}
|
||||
|
||||
order := []string{"network", "bgp.as_path", "gateway", "interface"}
|
||||
|
||||
return columns, order, nil
|
||||
}
|
||||
|
||||
// Get UI config: Routes Columns
|
||||
// The columns displayed in the frontend.
|
||||
// The columns are ordered as in the config file.
|
||||
//
|
||||
// In case the configuration is empty, fall back to
|
||||
// the defaults as defined in getRoutesColumnsDefault()
|
||||
//
|
||||
func getRoutesColumns(config *ini.File) (map[string]string, []string, error) {
|
||||
columns := make(map[string]string)
|
||||
order := []string{}
|
||||
|
||||
section := config.Section("routes_columns")
|
||||
keys := section.Keys()
|
||||
|
||||
if len(keys) == 0 {
|
||||
return getRoutesColumnsDefaults()
|
||||
}
|
||||
|
||||
for _, key := range keys {
|
||||
columns[key.Name()] = section.Key(key.Name()).MustString("")
|
||||
order = append(order, key.Name())
|
||||
}
|
||||
|
||||
return columns, order, nil
|
||||
}
|
||||
|
||||
// Get UI config: Get Neighbours Columns Defaults
|
||||
func getNeighboursColumnsDefaults() (map[string]string, []string, error) {
|
||||
columns := map[string]string{
|
||||
"address": "Neighbour",
|
||||
"asn": "ASN",
|
||||
"state": "State",
|
||||
"Uptime": "Uptime",
|
||||
"Description": "Description",
|
||||
"routes_received": "Routes Recv.",
|
||||
"routes_filtered": "Routes Filtered",
|
||||
}
|
||||
|
||||
order := []string{
|
||||
"address", "asn", "state",
|
||||
"Uptime", "Description", "routes_received", "routes_filtered",
|
||||
}
|
||||
|
||||
return columns, order, nil
|
||||
}
|
||||
|
||||
// Get UI config: Get Neighbours Columns
|
||||
// basically the same as with the routes columns.
|
||||
func getNeighboursColumns(config *ini.File) (
|
||||
map[string]string,
|
||||
[]string,
|
||||
error,
|
||||
) {
|
||||
columns := make(map[string]string)
|
||||
order := []string{}
|
||||
|
||||
section := config.Section("neighbours_columns")
|
||||
keys := section.Keys()
|
||||
|
||||
if len(keys) == 0 {
|
||||
return getNeighboursColumnsDefaults()
|
||||
}
|
||||
|
||||
for _, key := range keys {
|
||||
columns[key.Name()] = section.Key(key.Name()).MustString("")
|
||||
order = append(order, key.Name())
|
||||
}
|
||||
|
||||
return columns, order, nil
|
||||
}
|
||||
|
||||
// Get UI config: Get Prefix search / Routes lookup columns
|
||||
// As these differ slightly from our routes in the response
|
||||
// (e.g. the neighbor and source rs is referenced as a nested object)
|
||||
// we provide an additional configuration for this
|
||||
func getLookupColumnsDefaults() (map[string]string, []string, error) {
|
||||
columns := map[string]string{
|
||||
"network": "Network",
|
||||
"gateway": "Gateway",
|
||||
"neighbour.asn": "ASN",
|
||||
"neighbour.description": "Neighbor",
|
||||
"bgp.as_path": "AS Path",
|
||||
"routeserver.name": "RS",
|
||||
}
|
||||
|
||||
order := []string{
|
||||
"network",
|
||||
"gateway",
|
||||
"bgp.as_path",
|
||||
"neighbour.asn",
|
||||
"neighbour.description",
|
||||
"routeserver.name",
|
||||
}
|
||||
|
||||
return columns, order, nil
|
||||
}
|
||||
|
||||
func getLookupColumns(config *ini.File) (
|
||||
map[string]string,
|
||||
[]string,
|
||||
error,
|
||||
) {
|
||||
columns := make(map[string]string)
|
||||
order := []string{}
|
||||
|
||||
section := config.Section("lookup_columns")
|
||||
keys := section.Keys()
|
||||
|
||||
if len(keys) == 0 {
|
||||
return getLookupColumnsDefaults()
|
||||
}
|
||||
|
||||
for _, key := range keys {
|
||||
columns[key.Name()] = section.Key(key.Name()).MustString("")
|
||||
order = append(order, key.Name())
|
||||
}
|
||||
|
||||
return columns, order, nil
|
||||
}
|
||||
|
||||
// Helper parse communities from a section body
|
||||
func parseAndMergeCommunities(
|
||||
communities BgpCommunities, body string,
|
||||
) BgpCommunities {
|
||||
|
||||
// Parse and merge communities
|
||||
lines := strings.Split(body, "\n")
|
||||
for _, line := range lines {
|
||||
kv := strings.SplitN(line, "=", 2)
|
||||
if len(kv) != 2 {
|
||||
log.Println("Skipping malformed BGP community:", line)
|
||||
continue
|
||||
}
|
||||
|
||||
community := strings.TrimSpace(kv[0])
|
||||
label := strings.TrimSpace(kv[1])
|
||||
communities.Set(community, label)
|
||||
}
|
||||
|
||||
return communities
|
||||
}
|
||||
|
||||
// Get UI config: Bgp Communities
|
||||
func getBgpCommunities(config *ini.File) BgpCommunities {
|
||||
// Load defaults
|
||||
communities := MakeWellKnownBgpCommunities()
|
||||
communitiesConfig := config.Section("bgp_communities")
|
||||
if communitiesConfig == nil {
|
||||
return communities // nothing else to do here, go with the default
|
||||
}
|
||||
|
||||
return parseAndMergeCommunities(communities, communitiesConfig.Body())
|
||||
}
|
||||
|
||||
// Get UI config: Get rejections
|
||||
func getRoutesRejections(config *ini.File) (RejectionsConfig, error) {
|
||||
reasonsConfig := config.Section("rejection_reasons")
|
||||
if reasonsConfig == nil {
|
||||
return RejectionsConfig{}, nil
|
||||
}
|
||||
|
||||
reasons := parseAndMergeCommunities(
|
||||
make(BgpCommunities),
|
||||
reasonsConfig.Body())
|
||||
|
||||
rejectionsConfig := RejectionsConfig{
|
||||
Reasons: reasons,
|
||||
}
|
||||
|
||||
return rejectionsConfig, nil
|
||||
}
|
||||
|
||||
// Get UI config: Get no export config
|
||||
func getRoutesNoexports(config *ini.File) (NoexportsConfig, error) {
|
||||
baseConfig := config.Section("noexport")
|
||||
reasonsConfig := config.Section("noexport_reasons")
|
||||
|
||||
// Map base configuration
|
||||
noexportsConfig := NoexportsConfig{}
|
||||
baseConfig.MapTo(&noexportsConfig)
|
||||
|
||||
reasons := parseAndMergeCommunities(
|
||||
make(BgpCommunities),
|
||||
reasonsConfig.Body())
|
||||
|
||||
noexportsConfig.Reasons = reasons
|
||||
|
||||
return noexportsConfig, nil
|
||||
}
|
||||
|
||||
// Get UI config: Reject candidates
|
||||
func getRejectCandidatesConfig(config *ini.File) (RejectCandidatesConfig, error) {
|
||||
candidateCommunities := config.Section(
|
||||
"rejection_candidates").Key("communities").String()
|
||||
|
||||
if candidateCommunities == "" {
|
||||
return RejectCandidatesConfig{}, nil
|
||||
}
|
||||
|
||||
communities := BgpCommunities{}
|
||||
for i, c := range strings.Split(candidateCommunities, ",") {
|
||||
communities.Set(c, fmt.Sprintf("reject-candidate-%d", i+1))
|
||||
}
|
||||
|
||||
conf := RejectCandidatesConfig{
|
||||
Communities: communities,
|
||||
}
|
||||
|
||||
return conf, nil
|
||||
}
|
||||
|
||||
// Get UI config: RPKI configuration
|
||||
func getRpkiConfig(config *ini.File) (RpkiConfig, error) {
|
||||
var rpki RpkiConfig
|
||||
// Defaults taken from:
|
||||
// https://www.euro-ix.net/en/forixps/large-bgp-communities/
|
||||
section := config.Section("rpki")
|
||||
section.MapTo(&rpki)
|
||||
|
||||
fallbackAsn, err := getOwnASN(config)
|
||||
if err != nil {
|
||||
log.Println(
|
||||
"Own ASN is not configured.",
|
||||
"This might lead to unexpected behaviour with BGP large communities",
|
||||
)
|
||||
}
|
||||
ownAsn := fmt.Sprintf("%d", fallbackAsn)
|
||||
|
||||
// Fill in defaults or postprocess config value
|
||||
if len(rpki.Valid) == 0 {
|
||||
rpki.Valid = []string{ownAsn, "1000", "1"}
|
||||
} else {
|
||||
rpki.Valid = strings.SplitN(rpki.Valid[0], ":", 3)
|
||||
}
|
||||
|
||||
if len(rpki.Unknown) == 0 {
|
||||
rpki.Unknown = []string{ownAsn, "1000", "2"}
|
||||
} else {
|
||||
rpki.Unknown = strings.SplitN(rpki.Unknown[0], ":", 3)
|
||||
}
|
||||
|
||||
if len(rpki.NotChecked) == 0 {
|
||||
rpki.NotChecked = []string{ownAsn, "1000", "3"}
|
||||
} else {
|
||||
rpki.NotChecked = strings.SplitN(rpki.NotChecked[0], ":", 3)
|
||||
}
|
||||
|
||||
// As the euro-ix document states, this can be a range.
|
||||
if len(rpki.Invalid) == 0 {
|
||||
rpki.Invalid = []string{ownAsn, "1000", "4", "*"}
|
||||
} else {
|
||||
// Preprocess
|
||||
rpki.Invalid = strings.SplitN(rpki.Invalid[0], ":", 3)
|
||||
tokens := []string{}
|
||||
if len(rpki.Invalid) != 3 {
|
||||
// This is wrong, we should have three parts (RS):1000:[range]
|
||||
return rpki, fmt.Errorf("Unexpected rpki.Invalid configuration: %v", rpki.Invalid)
|
||||
} else {
|
||||
tokens = strings.Split(rpki.Invalid[2], "-")
|
||||
}
|
||||
|
||||
rpki.Invalid = append([]string{rpki.Invalid[0], rpki.Invalid[1]}, tokens...)
|
||||
}
|
||||
|
||||
return rpki, nil
|
||||
}
|
||||
|
||||
// Helper: Get own ASN from ini
|
||||
// This is now easy, since we enforce an ASN in
|
||||
// the [server] section.
|
||||
func getOwnASN(config *ini.File) (int, error) {
|
||||
server := config.Section("server")
|
||||
asn := server.Key("asn").MustInt(-1)
|
||||
|
||||
if asn == -1 {
|
||||
return 0, fmt.Errorf("Could not get own ASN from config")
|
||||
}
|
||||
|
||||
return asn, nil
|
||||
}
|
||||
|
||||
// Get UI config: Theme settings
|
||||
func getThemeConfig(config *ini.File) ThemeConfig {
|
||||
baseConfig := config.Section("theme")
|
||||
|
||||
themeConfig := ThemeConfig{}
|
||||
baseConfig.MapTo(&themeConfig)
|
||||
|
||||
if themeConfig.BasePath == "" {
|
||||
themeConfig.BasePath = "/theme"
|
||||
}
|
||||
|
||||
return themeConfig
|
||||
}
|
||||
|
||||
// Get UI config: Pagination settings
|
||||
func getPaginationConfig(config *ini.File) PaginationConfig {
|
||||
baseConfig := config.Section("pagination")
|
||||
|
||||
paginationConfig := PaginationConfig{}
|
||||
baseConfig.MapTo(&paginationConfig)
|
||||
|
||||
return paginationConfig
|
||||
}
|
||||
|
||||
// Get the UI configuration from the config file
|
||||
func getUiConfig(config *ini.File) (UiConfig, error) {
|
||||
uiConfig := UiConfig{}
|
||||
|
||||
// Get route columns
|
||||
routesColumns, routesColumnsOrder, err := getRoutesColumns(config)
|
||||
if err != nil {
|
||||
return uiConfig, err
|
||||
}
|
||||
|
||||
// Get neighbours table columns
|
||||
neighboursColumns,
|
||||
neighboursColumnsOrder,
|
||||
err := getNeighboursColumns(config)
|
||||
if err != nil {
|
||||
return uiConfig, err
|
||||
}
|
||||
|
||||
// Lookup table columns
|
||||
lookupColumns, lookupColumnsOrder, err := getLookupColumns(config)
|
||||
if err != nil {
|
||||
return uiConfig, err
|
||||
}
|
||||
|
||||
// Get rejections and reasons
|
||||
rejections, err := getRoutesRejections(config)
|
||||
if err != nil {
|
||||
return uiConfig, err
|
||||
}
|
||||
|
||||
noexports, err := getRoutesNoexports(config)
|
||||
if err != nil {
|
||||
return uiConfig, err
|
||||
}
|
||||
|
||||
// Get reject candidates
|
||||
rejectCandidates, _ := getRejectCandidatesConfig(config)
|
||||
|
||||
// RPKI filter config
|
||||
rpki, err := getRpkiConfig(config)
|
||||
if err != nil {
|
||||
return uiConfig, err
|
||||
}
|
||||
|
||||
// Theme configuration: Theming is optional, if no settings
|
||||
// are found, it will be ignored
|
||||
themeConfig := getThemeConfig(config)
|
||||
|
||||
// Pagination
|
||||
paginationConfig := getPaginationConfig(config)
|
||||
|
||||
// Make config
|
||||
uiConfig = UiConfig{
|
||||
RoutesColumns: routesColumns,
|
||||
RoutesColumnsOrder: routesColumnsOrder,
|
||||
|
||||
NeighboursColumns: neighboursColumns,
|
||||
NeighboursColumnsOrder: neighboursColumnsOrder,
|
||||
|
||||
LookupColumns: lookupColumns,
|
||||
LookupColumnsOrder: lookupColumnsOrder,
|
||||
|
||||
RoutesRejections: rejections,
|
||||
RoutesNoexports: noexports,
|
||||
RoutesRejectCandidates: rejectCandidates,
|
||||
|
||||
BgpCommunities: getBgpCommunities(config),
|
||||
Rpki: rpki,
|
||||
|
||||
Theme: themeConfig,
|
||||
|
||||
Pagination: paginationConfig,
|
||||
}
|
||||
|
||||
return uiConfig, nil
|
||||
}
|
||||
|
||||
func getSources(config *ini.File) ([]*SourceConfig, error) {
|
||||
sources := []*SourceConfig{}
|
||||
|
||||
order := 0
|
||||
sourceSections := config.ChildSections("source")
|
||||
for _, section := range sourceSections {
|
||||
if !isSourceBase(section) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Derive source-id from name
|
||||
sourceId := section.Name()[len("source:"):]
|
||||
|
||||
// Try to get child configs and determine
|
||||
// Source type
|
||||
sourceConfigSections := section.ChildSections()
|
||||
if len(sourceConfigSections) == 0 {
|
||||
// This source has no configured backend
|
||||
return sources, fmt.Errorf("%s has no backend configuration", section.Name())
|
||||
}
|
||||
|
||||
if len(sourceConfigSections) > 1 {
|
||||
// The source is ambiguous
|
||||
return sources, fmt.Errorf("%s has ambigous backends", section.Name())
|
||||
}
|
||||
|
||||
// Configure backend
|
||||
backendConfig := sourceConfigSections[0]
|
||||
backendType := getBackendType(backendConfig)
|
||||
|
||||
if backendType == SOURCE_UNKNOWN {
|
||||
return sources, fmt.Errorf("%s has an unsupported backend", section.Name())
|
||||
}
|
||||
|
||||
// Make config
|
||||
sourceName := section.Key("name").MustString("Unknown Source")
|
||||
sourceGroup := section.Key("group").MustString("")
|
||||
sourceBlackholes := TrimmedStringList(
|
||||
section.Key("blackholes").MustString(""))
|
||||
|
||||
config := &SourceConfig{
|
||||
Id: sourceId,
|
||||
Order: order,
|
||||
Name: sourceName,
|
||||
Group: sourceGroup,
|
||||
Blackholes: sourceBlackholes,
|
||||
Type: backendType,
|
||||
}
|
||||
|
||||
// Set backend
|
||||
switch backendType {
|
||||
case SOURCE_BIRDWATCHER:
|
||||
sourceType := backendConfig.Key("type").MustString("")
|
||||
peerTablePrefix := backendConfig.Key("peer_table_prefix").MustString("T")
|
||||
pipeProtocolPrefix := backendConfig.Key("pipe_protocol_prefix").MustString("M")
|
||||
|
||||
if sourceType != "single_table" &&
|
||||
sourceType != "multi_table" {
|
||||
log.Fatal("Configuration error (birdwatcher source) unknown birdwatcher type:", sourceType)
|
||||
}
|
||||
|
||||
log.Println("Adding birdwatcher source of type", sourceType,
|
||||
"with peer_table_prefix", peerTablePrefix,
|
||||
"and pipe_protocol_prefix", pipeProtocolPrefix)
|
||||
|
||||
c := birdwatcher.Config{
|
||||
Id: config.Id,
|
||||
Name: config.Name,
|
||||
|
||||
Timezone: "UTC",
|
||||
ServerTime: "2006-01-02T15:04:05.999999999Z07:00",
|
||||
ServerTimeShort: "2006-01-02",
|
||||
ServerTimeExt: "Mon, 02 Jan 2006 15:04:05 -0700",
|
||||
|
||||
Type: sourceType,
|
||||
PeerTablePrefix: peerTablePrefix,
|
||||
PipeProtocolPrefix: pipeProtocolPrefix,
|
||||
}
|
||||
|
||||
backendConfig.MapTo(&c)
|
||||
config.Birdwatcher = c
|
||||
|
||||
case SOURCE_GOBGP:
|
||||
c := gobgp.Config{
|
||||
Id: config.Id,
|
||||
Name: config.Name,
|
||||
}
|
||||
|
||||
backendConfig.MapTo(&c)
|
||||
config.GoBGP = c
|
||||
}
|
||||
|
||||
// Add to list of sources
|
||||
sources = append(sources, config)
|
||||
order++
|
||||
}
|
||||
|
||||
return sources, nil
|
||||
}
|
||||
|
||||
// Try to load configfiles as specified in the files
|
||||
// list. For example:
|
||||
//
|
||||
// ./etc/alice-lg/alice.conf
|
||||
// /etc/alice-lg/alice.conf
|
||||
// ./etc/alice-lg/alice.local.conf
|
||||
//
|
||||
func loadConfig(file string) (*Config, error) {
|
||||
|
||||
// Try to get config file, fallback to alternatives
|
||||
file, err := getConfigFile(file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Load configuration, but handle bgp communities section
|
||||
// with our own parser
|
||||
parsedConfig, err := ini.LoadSources(ini.LoadOptions{
|
||||
UnparseableSections: []string{
|
||||
"bgp_communities",
|
||||
"rejection_reasons",
|
||||
"noexport_reasons",
|
||||
},
|
||||
}, file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Map sections
|
||||
server := ServerConfig{}
|
||||
parsedConfig.Section("server").MapTo(&server)
|
||||
|
||||
housekeeping := HousekeepingConfig{}
|
||||
parsedConfig.Section("housekeeping").MapTo(&housekeeping)
|
||||
|
||||
// Get all sources
|
||||
sources, err := getSources(parsedConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Get UI configurations
|
||||
ui, err := getUiConfig(parsedConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
config := &Config{
|
||||
Server: server,
|
||||
Housekeeping: housekeeping,
|
||||
Ui: ui,
|
||||
Sources: sources,
|
||||
File: file,
|
||||
}
|
||||
|
||||
return config, nil
|
||||
}
|
||||
|
||||
// Get source instance from config
|
||||
func (self *SourceConfig) getInstance() sources.Source {
|
||||
if self.instance != nil {
|
||||
return self.instance
|
||||
}
|
||||
|
||||
var instance sources.Source
|
||||
switch self.Type {
|
||||
case SOURCE_BIRDWATCHER:
|
||||
instance = birdwatcher.NewBirdwatcher(self.Birdwatcher)
|
||||
case SOURCE_GOBGP:
|
||||
instance = gobgp.NewGoBGP(self.GoBGP)
|
||||
}
|
||||
|
||||
self.instance = instance
|
||||
return instance
|
||||
}
|
||||
|
||||
// Get configuration file with fallbacks
|
||||
func getConfigFile(filename string) (string, error) {
|
||||
// Check if requested file is present
|
||||
if _, err := os.Stat(filename); os.IsNotExist(err) {
|
||||
// Fall back to local filename
|
||||
filename = ".." + filename
|
||||
}
|
||||
|
||||
if _, err := os.Stat(filename); os.IsNotExist(err) {
|
||||
filename = strings.Replace(filename, ".conf", ".local.conf", 1)
|
||||
}
|
||||
|
||||
if _, err := os.Stat(filename); os.IsNotExist(err) {
|
||||
return "not_found", fmt.Errorf("could not find any configuration file")
|
||||
}
|
||||
|
||||
return filename, nil
|
||||
}
|
@ -1,176 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
// Test configuration loading and parsing
|
||||
// using the default config
|
||||
|
||||
func TestLoadConfigs(t *testing.T) {
|
||||
|
||||
config, err := loadConfig("../etc/alice-lg/alice.example.conf")
|
||||
if err != nil {
|
||||
t.Error("Could not load test config:", err)
|
||||
}
|
||||
|
||||
if config.Server.Listen == "" {
|
||||
t.Error("Listen string not present.")
|
||||
}
|
||||
|
||||
if len(config.Ui.RoutesColumns) == 0 {
|
||||
t.Error("Route columns settings missing")
|
||||
}
|
||||
|
||||
if len(config.Ui.RoutesRejections.Reasons) == 0 {
|
||||
t.Error("Rejection reasons missing")
|
||||
}
|
||||
|
||||
// Check communities
|
||||
label, err := config.Ui.BgpCommunities.Lookup("1:23")
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
if label != "some tag" {
|
||||
t.Error("expcted to find example community 1:23 with 'some tag'",
|
||||
"but got:", label)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSourceConfigDefaultsOverride(t *testing.T) {
|
||||
|
||||
config, err := loadConfig("../etc/alice-lg/alice.example.conf")
|
||||
if err != nil {
|
||||
t.Error("Could not load test config:", err)
|
||||
}
|
||||
|
||||
// Get sources
|
||||
rs1 := config.Sources[0]
|
||||
rs2 := config.Sources[1]
|
||||
|
||||
// Source 1 should be on default time
|
||||
// Source 2 should have an override
|
||||
// For now it should be sufficient to test if
|
||||
// the serverTime(rs1) != serverTime(rs2)
|
||||
if rs1.Birdwatcher.ServerTime == rs2.Birdwatcher.ServerTime {
|
||||
t.Error("Server times should be different between",
|
||||
"source 1 and 2 in example configuration",
|
||||
"(alice.example.conf)")
|
||||
}
|
||||
|
||||
// Check presence of timezone, default: UTC (rs1)
|
||||
// override: Europe/Bruessels (rs2)
|
||||
if rs1.Birdwatcher.Timezone != "UTC" {
|
||||
t.Error("Expected RS1 Timezone to be default: UTC")
|
||||
}
|
||||
|
||||
if rs2.Birdwatcher.Timezone != "Europe/Brussels" {
|
||||
t.Error("Expected 'Europe/Brussels', got", rs2.Birdwatcher.Timezone)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRejectAndNoexportReasons(t *testing.T) {
|
||||
config, err := loadConfig("../etc/alice-lg/alice.example.conf")
|
||||
if err != nil {
|
||||
t.Error("Could not load test config:", err)
|
||||
}
|
||||
|
||||
// Rejection reasons
|
||||
description, err := config.Ui.RoutesRejections.Reasons.Lookup("23:42:1")
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
if description != "Some made up reason" {
|
||||
t.Error("Unexpected reason for 23:42:1 -", description)
|
||||
}
|
||||
|
||||
// Noexport reasons
|
||||
description, err = config.Ui.RoutesNoexports.Reasons.Lookup("23:46:1")
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
if description != "Some other made up reason" {
|
||||
t.Error("Unexpected reason for 23:46:1 -", description)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBlackholeParsing(t *testing.T) {
|
||||
config, err := loadConfig("../etc/alice-lg/alice.example.conf")
|
||||
if err != nil {
|
||||
t.Error("Could not load test config:", err)
|
||||
}
|
||||
|
||||
// Get first source
|
||||
rs1 := config.Sources[0]
|
||||
|
||||
if len(rs1.Blackholes) != 2 {
|
||||
t.Error("Rs1 should have configured 2 blackholes. Got:", rs1.Blackholes)
|
||||
return
|
||||
}
|
||||
|
||||
if rs1.Blackholes[0] != "10.23.6.666" {
|
||||
t.Error("Unexpected blackhole, got:", rs1.Blackholes[0])
|
||||
}
|
||||
}
|
||||
|
||||
func TestOwnASN(t *testing.T) {
|
||||
config, err := loadConfig("../etc/alice-lg/alice.example.conf")
|
||||
if err != nil {
|
||||
t.Error("Could not load test config:", err)
|
||||
}
|
||||
|
||||
if config.Server.Asn != 9033 {
|
||||
t.Error("Expected a set server asn")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRpkiConfig(t *testing.T) {
|
||||
config, err := loadConfig("../etc/alice-lg/alice.example.conf")
|
||||
if err != nil {
|
||||
t.Error("Could not load test config:", err)
|
||||
}
|
||||
|
||||
if len(config.Ui.Rpki.Valid) != 3 {
|
||||
t.Error("Unexpected RPKI:VALID,", config.Ui.Rpki.Valid)
|
||||
}
|
||||
if len(config.Ui.Rpki.Invalid) != 4 {
|
||||
t.Error("Unexpected RPKI:INVALID,", config.Ui.Rpki.Invalid)
|
||||
return // We would fail hard later
|
||||
}
|
||||
|
||||
// Check fallback
|
||||
if config.Ui.Rpki.NotChecked[0] != "9033" {
|
||||
t.Error(
|
||||
"Expected NotChecked to fall back to defaults, got:",
|
||||
config.Ui.Rpki.NotChecked,
|
||||
)
|
||||
}
|
||||
|
||||
// Check range postprocessing
|
||||
if config.Ui.Rpki.Invalid[3] != "*" {
|
||||
t.Error("Missing wildcard from config")
|
||||
}
|
||||
|
||||
t.Log(config.Ui.Rpki)
|
||||
}
|
||||
|
||||
func TestRejectCandidatesConfig(t *testing.T) {
|
||||
config, err := loadConfig("../etc/alice-lg/alice.example.conf")
|
||||
if err != nil {
|
||||
t.Error("Could not load test config:", err)
|
||||
return
|
||||
}
|
||||
|
||||
t.Log(config.Ui.RoutesRejectCandidates.Communities)
|
||||
|
||||
description, err := config.Ui.RoutesRejectCandidates.Communities.Lookup("23:42:46")
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
||||
if description != "reject-candidate-3" {
|
||||
t.Error("expected 23:42:46 to be a 'reject-candidate'")
|
||||
}
|
||||
}
|
@ -1,34 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"time"
|
||||
"runtime/debug"
|
||||
)
|
||||
|
||||
func Housekeeping(config *Config) {
|
||||
for {
|
||||
if config.Housekeeping.Interval > 0 {
|
||||
time.Sleep(time.Duration(config.Housekeeping.Interval) * time.Minute)
|
||||
} else {
|
||||
time.Sleep(5 * time.Minute)
|
||||
}
|
||||
|
||||
log.Println("Housekeeping started")
|
||||
|
||||
// Expire the caches
|
||||
log.Println("Expiring caches")
|
||||
for _, source := range config.Sources {
|
||||
count := source.getInstance().ExpireCaches()
|
||||
log.Println("Expired", count, "entries for source", source.Name)
|
||||
}
|
||||
|
||||
if config.Housekeeping.ForceReleaseMemory {
|
||||
// Trigger a GC and SCVG run
|
||||
log.Println("Freeing memory")
|
||||
debug.FreeOSMemory()
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -1,69 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"log"
|
||||
"net/http"
|
||||
|
||||
"github.com/julienschmidt/httprouter"
|
||||
)
|
||||
|
||||
var AliceConfig *Config
|
||||
var AliceRoutesStore *RoutesStore
|
||||
var AliceNeighboursStore *NeighboursStore
|
||||
|
||||
func main() {
|
||||
var err error
|
||||
|
||||
// Handle commandline parameters
|
||||
configFilenameFlag := flag.String(
|
||||
"config", "/etc/alice-lg/alice.conf",
|
||||
"Alice looking glass configuration file",
|
||||
)
|
||||
|
||||
flag.Parse()
|
||||
|
||||
// Load configuration
|
||||
AliceConfig, err = loadConfig(*configFilenameFlag)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// Say hi
|
||||
printBanner()
|
||||
|
||||
log.Println("Using configuration:", AliceConfig.File)
|
||||
|
||||
// Setup local routes store
|
||||
AliceRoutesStore = NewRoutesStore(AliceConfig)
|
||||
|
||||
if AliceConfig.Server.EnablePrefixLookup == true {
|
||||
AliceRoutesStore.Start()
|
||||
}
|
||||
|
||||
// Setup local neighbours store
|
||||
AliceNeighboursStore = NewNeighboursStore(AliceConfig)
|
||||
if AliceConfig.Server.EnablePrefixLookup == true {
|
||||
AliceNeighboursStore.Start()
|
||||
}
|
||||
|
||||
// Start the Housekeeping
|
||||
go Housekeeping(AliceConfig)
|
||||
|
||||
// Setup request routing
|
||||
router := httprouter.New()
|
||||
|
||||
// Serve static content
|
||||
err = webRegisterAssets(AliceConfig.Ui, router)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
err = apiRegisterEndpoints(router)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// Start http server
|
||||
log.Fatal(http.ListenAndServe(AliceConfig.Server.Listen, router))
|
||||
}
|
@ -1,284 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/alice-lg/alice-lg/backend/api"
|
||||
)
|
||||
|
||||
var REGEX_MATCH_ASLOOKUP = regexp.MustCompile(`(?i)^AS(\d+)`)
|
||||
|
||||
type NeighboursIndex map[string]*api.Neighbour
|
||||
|
||||
type NeighboursStore struct {
|
||||
neighboursMap map[string]NeighboursIndex
|
||||
configMap map[string]*SourceConfig
|
||||
statusMap map[string]StoreStatus
|
||||
refreshInterval time.Duration
|
||||
refreshNeighborStatus bool
|
||||
|
||||
sync.RWMutex
|
||||
}
|
||||
|
||||
func NewNeighboursStore(config *Config) *NeighboursStore {
|
||||
|
||||
// Build source mapping
|
||||
neighboursMap := make(map[string]NeighboursIndex)
|
||||
configMap := make(map[string]*SourceConfig)
|
||||
statusMap := make(map[string]StoreStatus)
|
||||
|
||||
for _, source := range config.Sources {
|
||||
sourceId := source.Id
|
||||
configMap[sourceId] = source
|
||||
statusMap[sourceId] = StoreStatus{
|
||||
State: STATE_INIT,
|
||||
}
|
||||
|
||||
neighboursMap[sourceId] = make(NeighboursIndex)
|
||||
}
|
||||
|
||||
// Set refresh interval, default to 5 minutes when
|
||||
// interval is set to 0
|
||||
refreshInterval := time.Duration(
|
||||
config.Server.NeighboursStoreRefreshInterval) * time.Minute
|
||||
if refreshInterval == 0 {
|
||||
refreshInterval = time.Duration(5) * time.Minute
|
||||
}
|
||||
|
||||
refreshNeighborStatus := config.Server.EnableNeighborsStatusRefresh
|
||||
|
||||
store := &NeighboursStore{
|
||||
neighboursMap: neighboursMap,
|
||||
statusMap: statusMap,
|
||||
configMap: configMap,
|
||||
refreshInterval: refreshInterval,
|
||||
refreshNeighborStatus: refreshNeighborStatus,
|
||||
}
|
||||
return store
|
||||
}
|
||||
|
||||
func (self *NeighboursStore) Start() {
|
||||
log.Println("Starting local neighbours store")
|
||||
log.Println("Neighbours Store refresh interval set to:", self.refreshInterval)
|
||||
go self.init()
|
||||
}
|
||||
|
||||
func (self *NeighboursStore) init() {
|
||||
// Perform initial update
|
||||
self.update()
|
||||
|
||||
// Initial logging
|
||||
self.Stats().Log()
|
||||
|
||||
// Periodically update store
|
||||
for {
|
||||
time.Sleep(self.refreshInterval)
|
||||
self.update()
|
||||
}
|
||||
}
|
||||
|
||||
func (self *NeighboursStore) SourceStatus(sourceId string) StoreStatus {
|
||||
self.RLock()
|
||||
status := self.statusMap[sourceId]
|
||||
self.RUnlock()
|
||||
|
||||
return status
|
||||
}
|
||||
|
||||
// Get state by source Id
|
||||
func (self *NeighboursStore) SourceState(sourceId string) int {
|
||||
status := self.SourceStatus(sourceId)
|
||||
return status.State
|
||||
}
|
||||
|
||||
// Update all neighbors
|
||||
func (self *NeighboursStore) update() {
|
||||
successCount := 0
|
||||
errorCount := 0
|
||||
t0 := time.Now()
|
||||
for sourceId, _ := range self.neighboursMap {
|
||||
// Get current state
|
||||
if self.statusMap[sourceId].State == STATE_UPDATING {
|
||||
continue // nothing to do here. really.
|
||||
}
|
||||
|
||||
// Start updating
|
||||
self.Lock()
|
||||
self.statusMap[sourceId] = StoreStatus{
|
||||
State: STATE_UPDATING,
|
||||
}
|
||||
self.Unlock()
|
||||
|
||||
sourceConfig := self.configMap[sourceId]
|
||||
source := sourceConfig.getInstance()
|
||||
|
||||
neighboursRes, err := source.Neighbours()
|
||||
if err != nil {
|
||||
log.Println(
|
||||
"Refreshing the neighbors store failed for:",
|
||||
sourceConfig.Name, "(", sourceConfig.Id, ")",
|
||||
"with:", err,
|
||||
"- NEXT STATE: ERROR",
|
||||
)
|
||||
// That's sad.
|
||||
self.Lock()
|
||||
self.statusMap[sourceId] = StoreStatus{
|
||||
State: STATE_ERROR,
|
||||
LastError: err,
|
||||
LastRefresh: time.Now(),
|
||||
}
|
||||
self.Unlock()
|
||||
|
||||
errorCount++
|
||||
continue
|
||||
}
|
||||
|
||||
neighbours := neighboursRes.Neighbours
|
||||
|
||||
// Update data
|
||||
// Make neighbours index
|
||||
index := make(NeighboursIndex)
|
||||
for _, neighbour := range neighbours {
|
||||
index[neighbour.Id] = neighbour
|
||||
}
|
||||
|
||||
self.Lock()
|
||||
self.neighboursMap[sourceId] = index
|
||||
// Update state
|
||||
self.statusMap[sourceId] = StoreStatus{
|
||||
LastRefresh: time.Now(),
|
||||
State: STATE_READY,
|
||||
}
|
||||
self.Unlock()
|
||||
successCount++
|
||||
}
|
||||
|
||||
refreshDuration := time.Since(t0)
|
||||
log.Println(
|
||||
"Refreshed neighbors store for", successCount, "of", successCount+errorCount,
|
||||
"sources with", errorCount, "error(s) in", refreshDuration,
|
||||
)
|
||||
}
|
||||
|
||||
func (self *NeighboursStore) GetNeighborsAt(sourceId string) api.Neighbours {
|
||||
self.RLock()
|
||||
neighborsIdx := self.neighboursMap[sourceId]
|
||||
self.RUnlock()
|
||||
|
||||
var neighborsStatus map[string]api.NeighbourStatus
|
||||
if self.refreshNeighborStatus {
|
||||
sourceConfig := self.configMap[sourceId]
|
||||
source := sourceConfig.getInstance()
|
||||
|
||||
neighborsStatusData, err := source.NeighboursStatus()
|
||||
if err == nil {
|
||||
neighborsStatus = make(map[string]api.NeighbourStatus, len(neighborsStatusData.Neighbours))
|
||||
|
||||
for _, neighbor := range neighborsStatusData.Neighbours {
|
||||
neighborsStatus[neighbor.Id] = *neighbor
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
neighbors := make(api.Neighbours, 0, len(neighborsIdx))
|
||||
|
||||
for _, neighbor := range neighborsIdx {
|
||||
if self.refreshNeighborStatus {
|
||||
if _, ok := neighborsStatus[neighbor.Id]; ok {
|
||||
self.Lock()
|
||||
neighbor.State = neighborsStatus[neighbor.Id].State
|
||||
self.Unlock()
|
||||
}
|
||||
}
|
||||
|
||||
neighbors = append(neighbors, neighbor)
|
||||
}
|
||||
|
||||
return neighbors
|
||||
}
|
||||
|
||||
func (self *NeighboursStore) GetNeighbourAt(
|
||||
sourceId string,
|
||||
id string,
|
||||
) *api.Neighbour {
|
||||
// Lookup neighbour on RS
|
||||
self.RLock()
|
||||
neighborsIdx := self.neighboursMap[sourceId]
|
||||
self.RUnlock()
|
||||
|
||||
return neighborsIdx[id]
|
||||
}
|
||||
|
||||
func (self *NeighboursStore) LookupNeighboursAt(
|
||||
sourceId string,
|
||||
query string,
|
||||
) api.Neighbours {
|
||||
results := api.Neighbours{}
|
||||
|
||||
self.RLock()
|
||||
neighbours := self.neighboursMap[sourceId]
|
||||
self.RUnlock()
|
||||
|
||||
asn := -1
|
||||
if REGEX_MATCH_ASLOOKUP.MatchString(query) {
|
||||
groups := REGEX_MATCH_ASLOOKUP.FindStringSubmatch(query)
|
||||
if a, err := strconv.Atoi(groups[1]); err == nil {
|
||||
asn = a
|
||||
}
|
||||
}
|
||||
|
||||
for _, neighbour := range neighbours {
|
||||
if asn >= 0 && neighbour.Asn == asn { // only executed if valid AS query is detected
|
||||
results = append(results, neighbour)
|
||||
} else if ContainsCi(neighbour.Description, query) {
|
||||
results = append(results, neighbour)
|
||||
} else {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
func (self *NeighboursStore) LookupNeighbours(
|
||||
query string,
|
||||
) api.NeighboursLookupResults {
|
||||
// Create empty result set
|
||||
results := make(api.NeighboursLookupResults)
|
||||
|
||||
for sourceId, _ := range self.neighboursMap {
|
||||
results[sourceId] = self.LookupNeighboursAt(sourceId, query)
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
// Build some stats for monitoring
|
||||
func (self *NeighboursStore) Stats() NeighboursStoreStats {
|
||||
totalNeighbours := 0
|
||||
rsStats := []RouteServerNeighboursStats{}
|
||||
|
||||
self.RLock()
|
||||
for sourceId, neighbours := range self.neighboursMap {
|
||||
status := self.statusMap[sourceId]
|
||||
totalNeighbours += len(neighbours)
|
||||
serverStats := RouteServerNeighboursStats{
|
||||
Name: self.configMap[sourceId].Name,
|
||||
State: stateToString(status.State),
|
||||
Neighbours: len(neighbours),
|
||||
UpdatedAt: status.LastRefresh,
|
||||
}
|
||||
rsStats = append(rsStats, serverStats)
|
||||
}
|
||||
self.RUnlock()
|
||||
|
||||
storeStats := NeighboursStoreStats{
|
||||
TotalNeighbours: totalNeighbours,
|
||||
RouteServers: rsStats,
|
||||
}
|
||||
return storeStats
|
||||
}
|
@ -1,160 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/alice-lg/alice-lg/backend/api"
|
||||
|
||||
"sort"
|
||||
"testing"
|
||||
)
|
||||
|
||||
/*
|
||||
Start the global neighbours store,
|
||||
because the route store in the tests have
|
||||
this as a dependency.
|
||||
*/
|
||||
func startTestNeighboursStore() {
|
||||
store := makeTestNeighboursStore()
|
||||
AliceNeighboursStore = store
|
||||
}
|
||||
|
||||
/*
|
||||
Make a store and populate it with data
|
||||
*/
|
||||
func makeTestNeighboursStore() *NeighboursStore {
|
||||
|
||||
// Populate neighbours
|
||||
rs1 := NeighboursIndex{
|
||||
"ID2233_AS2342": &api.Neighbour{
|
||||
Id: "ID2233_AS2342",
|
||||
Description: "PEER AS2342 192.9.23.42 Customer Peer 1",
|
||||
},
|
||||
"ID2233_AS2343": &api.Neighbour{
|
||||
Id: "ID2233_AS2343",
|
||||
Description: "PEER AS2343 192.9.23.43 Different Peer 1",
|
||||
},
|
||||
"ID2233_AS2344": &api.Neighbour{
|
||||
Id: "ID2233_AS2344",
|
||||
Description: "PEER AS2344 192.9.23.44 3rd Peer from the sun",
|
||||
},
|
||||
}
|
||||
|
||||
rs2 := NeighboursIndex{
|
||||
"ID2233_AS2342": &api.Neighbour{
|
||||
Id: "ID2233_AS2342",
|
||||
Description: "PEER AS2342 192.9.23.42 Customer Peer 1",
|
||||
},
|
||||
"ID2233_AS4223": &api.Neighbour{
|
||||
Id: "ID2233_AS4223",
|
||||
Description: "PEER AS4223 192.9.42.23 Cloudfoo Inc.",
|
||||
},
|
||||
}
|
||||
|
||||
// Create store
|
||||
store := &NeighboursStore{
|
||||
neighboursMap: map[string]NeighboursIndex{
|
||||
"rs1": rs1,
|
||||
"rs2": rs2,
|
||||
},
|
||||
statusMap: map[string]StoreStatus{
|
||||
"rs1": StoreStatus{
|
||||
State: STATE_READY,
|
||||
},
|
||||
"rs2": StoreStatus{
|
||||
State: STATE_INIT,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
return store
|
||||
}
|
||||
|
||||
func TestGetSourceState(t *testing.T) {
|
||||
store := makeTestNeighboursStore()
|
||||
|
||||
if store.SourceState("rs1") != STATE_READY {
|
||||
t.Error("Expected Source(1) to be STATE_READY")
|
||||
}
|
||||
|
||||
if store.SourceState("rs2") == STATE_READY {
|
||||
t.Error("Expected Source(2) to be NOT STATE_READY")
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetNeighbourAt(t *testing.T) {
|
||||
store := makeTestNeighboursStore()
|
||||
|
||||
neighbour := store.GetNeighbourAt("rs1", "ID2233_AS2343")
|
||||
if neighbour.Id != "ID2233_AS2343" {
|
||||
t.Error("Expected another peer in GetNeighbourAt")
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetNeighbors(t *testing.T) {
|
||||
store := makeTestNeighboursStore()
|
||||
neighbors := store.GetNeighborsAt("rs2")
|
||||
|
||||
if len(neighbors) != 2 {
|
||||
t.Error("Expected 2 neighbors, got:", len(neighbors))
|
||||
}
|
||||
|
||||
sort.Sort(neighbors)
|
||||
|
||||
if neighbors[0].Id != "ID2233_AS2342" {
|
||||
t.Error("Expected neighbor: ID2233_AS2342, got:",
|
||||
neighbors[0])
|
||||
}
|
||||
|
||||
neighbors = store.GetNeighborsAt("rs3")
|
||||
if len(neighbors) != 0 {
|
||||
t.Error("Unknown source should have yielded zero results")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestNeighbourLookupAt(t *testing.T) {
|
||||
store := makeTestNeighboursStore()
|
||||
|
||||
expected := []string{
|
||||
"ID2233_AS2342",
|
||||
"ID2233_AS2343",
|
||||
}
|
||||
|
||||
neighbours := store.LookupNeighboursAt("rs1", "peer 1")
|
||||
|
||||
// Make index
|
||||
index := NeighboursIndex{}
|
||||
for _, n := range neighbours {
|
||||
index[n.Id] = n
|
||||
}
|
||||
|
||||
for _, id := range expected {
|
||||
_, ok := index[id]
|
||||
if !ok {
|
||||
t.Error("Expected", id, "to be in result set")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestNeighbourLookup(t *testing.T) {
|
||||
store := makeTestNeighboursStore()
|
||||
|
||||
// First result set: "Peer 1"
|
||||
_ = store
|
||||
|
||||
results := store.LookupNeighbours("Cloudfoo")
|
||||
|
||||
// Peer should be present at RS2
|
||||
neighbours, ok := results["rs2"]
|
||||
if !ok {
|
||||
t.Error("Lookup on rs2 unsuccessful.")
|
||||
}
|
||||
|
||||
if len(neighbours) > 1 {
|
||||
t.Error("Lookup should match exact 1 peer.")
|
||||
}
|
||||
|
||||
n := neighbours[0]
|
||||
if n.Id != "ID2233_AS4223" {
|
||||
t.Error("Wrong peer in lookup response")
|
||||
}
|
||||
}
|
@ -1,384 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/alice-lg/alice-lg/backend/api"
|
||||
)
|
||||
|
||||
type RoutesStore struct {
|
||||
routesMap map[string]*api.RoutesResponse
|
||||
statusMap map[string]StoreStatus
|
||||
configMap map[string]*SourceConfig
|
||||
|
||||
refreshInterval time.Duration
|
||||
lastRefresh time.Time
|
||||
|
||||
sync.RWMutex
|
||||
}
|
||||
|
||||
func NewRoutesStore(config *Config) *RoutesStore {
|
||||
|
||||
// Build mapping based on source instances
|
||||
routesMap := make(map[string]*api.RoutesResponse)
|
||||
statusMap := make(map[string]StoreStatus)
|
||||
configMap := make(map[string]*SourceConfig)
|
||||
|
||||
for _, source := range config.Sources {
|
||||
id := source.Id
|
||||
|
||||
configMap[id] = source
|
||||
routesMap[id] = &api.RoutesResponse{}
|
||||
statusMap[id] = StoreStatus{
|
||||
State: STATE_INIT,
|
||||
}
|
||||
}
|
||||
|
||||
// Set refresh interval as duration, fall back to
|
||||
// five minutes if no interval is set.
|
||||
refreshInterval := time.Duration(
|
||||
config.Server.RoutesStoreRefreshInterval) * time.Minute
|
||||
if refreshInterval == 0 {
|
||||
refreshInterval = time.Duration(5) * time.Minute
|
||||
}
|
||||
|
||||
store := &RoutesStore{
|
||||
routesMap: routesMap,
|
||||
statusMap: statusMap,
|
||||
configMap: configMap,
|
||||
refreshInterval: refreshInterval,
|
||||
}
|
||||
return store
|
||||
}
|
||||
|
||||
func (self *RoutesStore) Start() {
|
||||
log.Println("Starting local routes store")
|
||||
log.Println("Routes Store refresh interval set to:", self.refreshInterval)
|
||||
go self.init()
|
||||
}
|
||||
|
||||
// Service initialization
|
||||
func (self *RoutesStore) init() {
|
||||
// Initial refresh
|
||||
self.update()
|
||||
|
||||
// Initial stats
|
||||
self.Stats().Log()
|
||||
|
||||
// Periodically update store
|
||||
for {
|
||||
time.Sleep(self.refreshInterval)
|
||||
self.update()
|
||||
}
|
||||
}
|
||||
|
||||
// Update all routes
|
||||
func (self *RoutesStore) update() {
|
||||
successCount := 0
|
||||
errorCount := 0
|
||||
t0 := time.Now()
|
||||
|
||||
for sourceId, _ := range self.routesMap {
|
||||
sourceConfig := self.configMap[sourceId]
|
||||
source := sourceConfig.getInstance()
|
||||
|
||||
// Get current update state
|
||||
if self.statusMap[sourceId].State == STATE_UPDATING {
|
||||
continue // nothing to do here
|
||||
}
|
||||
|
||||
// Set update state
|
||||
self.Lock()
|
||||
self.statusMap[sourceId] = StoreStatus{
|
||||
State: STATE_UPDATING,
|
||||
}
|
||||
self.Unlock()
|
||||
|
||||
routes, err := source.AllRoutes()
|
||||
if err != nil {
|
||||
log.Println(
|
||||
"Refreshing the routes store failed for:", sourceConfig.Name,
|
||||
"(", sourceConfig.Id, ")",
|
||||
"with:", err,
|
||||
"- NEXT STATE: ERROR",
|
||||
)
|
||||
|
||||
self.Lock()
|
||||
self.statusMap[sourceId] = StoreStatus{
|
||||
State: STATE_ERROR,
|
||||
LastError: err,
|
||||
LastRefresh: time.Now(),
|
||||
}
|
||||
self.Unlock()
|
||||
|
||||
errorCount++
|
||||
continue
|
||||
}
|
||||
|
||||
self.Lock()
|
||||
// Update data
|
||||
self.routesMap[sourceId] = routes
|
||||
// Update state
|
||||
self.statusMap[sourceId] = StoreStatus{
|
||||
LastRefresh: time.Now(),
|
||||
State: STATE_READY,
|
||||
}
|
||||
self.lastRefresh = time.Now().UTC()
|
||||
self.Unlock()
|
||||
|
||||
successCount++
|
||||
}
|
||||
|
||||
refreshDuration := time.Since(t0)
|
||||
log.Println(
|
||||
"Refreshed routes store for", successCount, "of", successCount+errorCount,
|
||||
"sources with", errorCount, "error(s) in", refreshDuration,
|
||||
)
|
||||
|
||||
}
|
||||
|
||||
// Calculate store insights
|
||||
func (self *RoutesStore) Stats() RoutesStoreStats {
|
||||
totalImported := 0
|
||||
totalFiltered := 0
|
||||
|
||||
rsStats := []RouteServerRoutesStats{}
|
||||
|
||||
self.RLock()
|
||||
for sourceId, routes := range self.routesMap {
|
||||
status := self.statusMap[sourceId]
|
||||
|
||||
totalImported += len(routes.Imported)
|
||||
totalFiltered += len(routes.Filtered)
|
||||
|
||||
serverStats := RouteServerRoutesStats{
|
||||
Name: self.configMap[sourceId].Name,
|
||||
|
||||
Routes: RoutesStats{
|
||||
Filtered: len(routes.Filtered),
|
||||
Imported: len(routes.Imported),
|
||||
},
|
||||
|
||||
State: stateToString(status.State),
|
||||
UpdatedAt: status.LastRefresh,
|
||||
}
|
||||
|
||||
rsStats = append(rsStats, serverStats)
|
||||
}
|
||||
self.RUnlock()
|
||||
|
||||
// Make stats
|
||||
storeStats := RoutesStoreStats{
|
||||
TotalRoutes: RoutesStats{
|
||||
Imported: totalImported,
|
||||
Filtered: totalFiltered,
|
||||
},
|
||||
RouteServers: rsStats,
|
||||
}
|
||||
return storeStats
|
||||
}
|
||||
|
||||
// Provide cache status
|
||||
func (self *RoutesStore) CachedAt() time.Time {
|
||||
return self.lastRefresh
|
||||
}
|
||||
|
||||
func (self *RoutesStore) CacheTtl() time.Time {
|
||||
return self.lastRefresh.Add(self.refreshInterval)
|
||||
}
|
||||
|
||||
// Lookup routes transform
|
||||
func routeToLookupRoute(
|
||||
source *SourceConfig,
|
||||
state string,
|
||||
route *api.Route,
|
||||
) *api.LookupRoute {
|
||||
|
||||
// Get neighbour
|
||||
neighbour := AliceNeighboursStore.GetNeighbourAt(source.Id, route.NeighbourId)
|
||||
|
||||
// Make route
|
||||
lookup := &api.LookupRoute{
|
||||
Id: route.Id,
|
||||
|
||||
NeighbourId: route.NeighbourId,
|
||||
Neighbour: neighbour,
|
||||
|
||||
Routeserver: api.Routeserver{
|
||||
Id: source.Id,
|
||||
Name: source.Name,
|
||||
},
|
||||
|
||||
State: state,
|
||||
|
||||
Network: route.Network,
|
||||
Interface: route.Interface,
|
||||
Gateway: route.Gateway,
|
||||
Metric: route.Metric,
|
||||
Bgp: route.Bgp,
|
||||
Age: route.Age,
|
||||
Type: route.Type,
|
||||
Primary: route.Primary,
|
||||
}
|
||||
|
||||
return lookup
|
||||
}
|
||||
|
||||
// Routes filter
|
||||
func filterRoutesByPrefix(
|
||||
source *SourceConfig,
|
||||
routes api.Routes,
|
||||
prefix string,
|
||||
state string,
|
||||
) api.LookupRoutes {
|
||||
results := api.LookupRoutes{}
|
||||
for _, route := range routes {
|
||||
// Naiive filtering:
|
||||
if strings.HasPrefix(strings.ToLower(route.Network), prefix) {
|
||||
lookup := routeToLookupRoute(source, state, route)
|
||||
results = append(results, lookup)
|
||||
}
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
||||
func filterRoutesByNeighbourIds(
|
||||
source *SourceConfig,
|
||||
routes api.Routes,
|
||||
neighbourIds []string,
|
||||
state string,
|
||||
) api.LookupRoutes {
|
||||
|
||||
results := api.LookupRoutes{}
|
||||
for _, route := range routes {
|
||||
// Filtering:
|
||||
if MemberOf(neighbourIds, route.NeighbourId) == true {
|
||||
lookup := routeToLookupRoute(source, state, route)
|
||||
results = append(results, lookup)
|
||||
}
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
||||
// Single RS lookup by neighbour id
|
||||
func (self *RoutesStore) LookupNeighboursPrefixesAt(
|
||||
sourceId string,
|
||||
neighbourIds []string,
|
||||
) chan api.LookupRoutes {
|
||||
response := make(chan api.LookupRoutes)
|
||||
|
||||
go func() {
|
||||
self.RLock()
|
||||
source := self.configMap[sourceId]
|
||||
routes := self.routesMap[sourceId]
|
||||
self.RUnlock()
|
||||
|
||||
filtered := filterRoutesByNeighbourIds(
|
||||
source,
|
||||
routes.Filtered,
|
||||
neighbourIds,
|
||||
"filtered")
|
||||
imported := filterRoutesByNeighbourIds(
|
||||
source,
|
||||
routes.Imported,
|
||||
neighbourIds,
|
||||
"imported")
|
||||
|
||||
var result api.LookupRoutes
|
||||
result = append(filtered, imported...)
|
||||
|
||||
response <- result
|
||||
}()
|
||||
|
||||
return response
|
||||
}
|
||||
|
||||
// Single RS lookup
|
||||
func (self *RoutesStore) LookupPrefixAt(
|
||||
sourceId string,
|
||||
prefix string,
|
||||
) chan api.LookupRoutes {
|
||||
|
||||
response := make(chan api.LookupRoutes)
|
||||
|
||||
go func() {
|
||||
self.RLock()
|
||||
config := self.configMap[sourceId]
|
||||
routes := self.routesMap[sourceId]
|
||||
self.RUnlock()
|
||||
|
||||
filtered := filterRoutesByPrefix(
|
||||
config,
|
||||
routes.Filtered,
|
||||
prefix,
|
||||
"filtered")
|
||||
imported := filterRoutesByPrefix(
|
||||
config,
|
||||
routes.Imported,
|
||||
prefix,
|
||||
"imported")
|
||||
|
||||
var result api.LookupRoutes
|
||||
result = append(filtered, imported...)
|
||||
|
||||
response <- result
|
||||
}()
|
||||
|
||||
return response
|
||||
}
|
||||
|
||||
func (self *RoutesStore) LookupPrefix(prefix string) api.LookupRoutes {
|
||||
result := api.LookupRoutes{}
|
||||
responses := []chan api.LookupRoutes{}
|
||||
|
||||
// Normalize prefix to lower case
|
||||
prefix = strings.ToLower(prefix)
|
||||
|
||||
// Dispatch
|
||||
self.RLock()
|
||||
for sourceId, _ := range self.routesMap {
|
||||
res := self.LookupPrefixAt(sourceId, prefix)
|
||||
responses = append(responses, res)
|
||||
}
|
||||
self.RUnlock()
|
||||
|
||||
// Collect
|
||||
for _, response := range responses {
|
||||
routes := <-response
|
||||
result = append(result, routes...)
|
||||
close(response)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (self *RoutesStore) LookupPrefixForNeighbours(
|
||||
neighbours api.NeighboursLookupResults,
|
||||
) api.LookupRoutes {
|
||||
|
||||
result := api.LookupRoutes{}
|
||||
responses := []chan api.LookupRoutes{}
|
||||
|
||||
// Dispatch
|
||||
for sourceId, locals := range neighbours {
|
||||
lookupNeighbourIds := []string{}
|
||||
for _, n := range locals {
|
||||
lookupNeighbourIds = append(lookupNeighbourIds, n.Id)
|
||||
}
|
||||
|
||||
res := self.LookupNeighboursPrefixesAt(sourceId, lookupNeighbourIds)
|
||||
responses = append(responses, res)
|
||||
}
|
||||
|
||||
// Collect
|
||||
for _, response := range responses {
|
||||
routes := <-response
|
||||
result = append(result, routes...)
|
||||
close(response)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
@ -1,224 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
|
||||
"github.com/alice-lg/alice-lg/backend/api"
|
||||
"github.com/alice-lg/alice-lg/backend/sources/birdwatcher"
|
||||
)
|
||||
|
||||
//
|
||||
// Api Tets Helpers
|
||||
//
|
||||
func loadTestRoutesResponse() *api.RoutesResponse {
|
||||
file, err := os.Open("testdata/api/routes_response.json")
|
||||
if err != nil {
|
||||
log.Panic("could not load test data:", err)
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
data, err := ioutil.ReadAll(file)
|
||||
if err != nil {
|
||||
log.Panic("could not read test data:", err)
|
||||
}
|
||||
|
||||
response := &api.RoutesResponse{}
|
||||
err = json.Unmarshal(data, &response)
|
||||
if err != nil {
|
||||
log.Panic("could not unmarshal response test data:", err)
|
||||
}
|
||||
|
||||
return response
|
||||
}
|
||||
|
||||
/*
|
||||
Check for presence of network in result set
|
||||
*/
|
||||
func testCheckPrefixesPresence(prefixes, resultset []string, t *testing.T) {
|
||||
// Check prefixes
|
||||
presence := map[string]bool{}
|
||||
for _, prefix := range prefixes {
|
||||
presence[prefix] = false
|
||||
}
|
||||
|
||||
for _, prefix := range resultset {
|
||||
// Check if prefixes are all accounted for
|
||||
for net, _ := range presence {
|
||||
if prefix == net {
|
||||
presence[net] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for net, present := range presence {
|
||||
if present == false {
|
||||
t.Error(net, "not found in result set")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Route Store Tests
|
||||
//
|
||||
|
||||
func makeTestRoutesStore() *RoutesStore {
|
||||
rs1RoutesResponse := loadTestRoutesResponse()
|
||||
|
||||
// Build mapping based on source instances:
|
||||
// rs : <response>
|
||||
statusMap := make(map[string]StoreStatus)
|
||||
routesMap := map[string]*api.RoutesResponse{
|
||||
"rs1": rs1RoutesResponse,
|
||||
}
|
||||
|
||||
configMap := map[string]*SourceConfig{
|
||||
"rs1": &SourceConfig{
|
||||
Id: "rs1",
|
||||
Name: "rs1.test",
|
||||
Type: SOURCE_BIRDWATCHER,
|
||||
|
||||
Birdwatcher: birdwatcher.Config{
|
||||
Api: "http://localhost:2342",
|
||||
Timezone: "UTC",
|
||||
ServerTime: "2006-01-02T15:04:05",
|
||||
ServerTimeShort: "2006-01-02",
|
||||
ServerTimeExt: "Mon, 02 Jan 2006 15:04: 05 -0700",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
store := &RoutesStore{
|
||||
routesMap: routesMap,
|
||||
statusMap: statusMap,
|
||||
configMap: configMap,
|
||||
}
|
||||
|
||||
return store
|
||||
}
|
||||
|
||||
func TestRoutesStoreStats(t *testing.T) {
|
||||
|
||||
store := makeTestRoutesStore()
|
||||
stats := store.Stats()
|
||||
|
||||
// Check total routes
|
||||
// There should be 8 imported, and 1 filtered route
|
||||
if stats.TotalRoutes.Imported != 8 {
|
||||
t.Error(
|
||||
"expected 8 imported routes, got:",
|
||||
stats.TotalRoutes.Imported,
|
||||
)
|
||||
}
|
||||
|
||||
if stats.TotalRoutes.Filtered != 1 {
|
||||
t.Error(
|
||||
"expected 1 filtered route, got:",
|
||||
stats.TotalRoutes.Filtered,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLookupPrefixAt(t *testing.T) {
|
||||
startTestNeighboursStore()
|
||||
store := makeTestRoutesStore()
|
||||
|
||||
query := "193.200."
|
||||
results := store.LookupPrefixAt("rs1", query)
|
||||
|
||||
prefixes := <-results
|
||||
|
||||
// Check results
|
||||
for _, prefix := range prefixes {
|
||||
if strings.HasPrefix(prefix.Network, query) == false {
|
||||
t.Error(
|
||||
"All network addresses should start with the",
|
||||
"queried prefix",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestLookupPrefix(t *testing.T) {
|
||||
startTestNeighboursStore()
|
||||
store := makeTestRoutesStore()
|
||||
query := "193.200."
|
||||
|
||||
results := store.LookupPrefix(query)
|
||||
|
||||
if len(results) == 0 {
|
||||
t.Error("Expected lookup results. None present.")
|
||||
return
|
||||
}
|
||||
|
||||
// Check results
|
||||
for _, prefix := range results {
|
||||
if strings.HasPrefix(prefix.Network, query) == false {
|
||||
t.Error(
|
||||
"All network addresses should start with the",
|
||||
"queried prefix",
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestLookupNeighboursPrefixesAt(t *testing.T) {
|
||||
startTestNeighboursStore()
|
||||
store := makeTestRoutesStore()
|
||||
|
||||
// Query
|
||||
results := store.LookupNeighboursPrefixesAt("rs1", []string{
|
||||
"ID163_AS31078",
|
||||
})
|
||||
|
||||
// Check prefixes
|
||||
presence := []string{
|
||||
"193.200.230.0/24", "193.34.24.0/22", "31.220.136.0/21",
|
||||
}
|
||||
|
||||
resultset := []string{}
|
||||
for _, prefix := range <-results {
|
||||
resultset = append(resultset, prefix.Network)
|
||||
}
|
||||
|
||||
testCheckPrefixesPresence(presence, resultset, t)
|
||||
}
|
||||
|
||||
func TestLookupPrefixForNeighbours(t *testing.T) {
|
||||
// Construct a neighbours lookup result
|
||||
neighbours := api.NeighboursLookupResults{
|
||||
"rs1": api.Neighbours{
|
||||
&api.Neighbour{
|
||||
Id: "ID163_AS31078",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
startTestNeighboursStore()
|
||||
store := makeTestRoutesStore()
|
||||
|
||||
// Query
|
||||
results := store.LookupPrefixForNeighbours(neighbours)
|
||||
|
||||
// We should have retrived 8 prefixes,
|
||||
if len(results) != 8 {
|
||||
t.Error("Expected result lenght: 8, got:", len(results))
|
||||
}
|
||||
|
||||
presence := []string{
|
||||
"193.200.230.0/24", "193.34.24.0/22", "31.220.136.0/21",
|
||||
}
|
||||
|
||||
resultset := []string{}
|
||||
for _, prefix := range results {
|
||||
resultset = append(resultset, prefix.Network)
|
||||
}
|
||||
|
||||
testCheckPrefixesPresence(presence, resultset, t)
|
||||
}
|
@ -1,63 +0,0 @@
|
||||
package birdwatcher
|
||||
|
||||
// Http Birdwatcher Client
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
type ClientResponse map[string]interface{}
|
||||
|
||||
type Client struct {
|
||||
Api string
|
||||
}
|
||||
|
||||
func NewClient(api string) *Client {
|
||||
client := &Client{
|
||||
Api: api,
|
||||
}
|
||||
return client
|
||||
}
|
||||
|
||||
// Make API request, parse response and return map or error
|
||||
func (self *Client) Get(client *http.Client, url string) (ClientResponse, error) {
|
||||
res, err := client.Get(url)
|
||||
if err != nil {
|
||||
return ClientResponse{}, err
|
||||
}
|
||||
|
||||
// Read body
|
||||
defer res.Body.Close()
|
||||
payload, err := ioutil.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return ClientResponse{}, err
|
||||
}
|
||||
|
||||
// Decode json payload
|
||||
result := make(ClientResponse)
|
||||
err = json.Unmarshal(payload, &result)
|
||||
if err != nil {
|
||||
return ClientResponse{}, err
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// Make API request, parse response and return map or error
|
||||
func (self *Client) GetJson(endpoint string) (ClientResponse, error) {
|
||||
client := &http.Client{}
|
||||
|
||||
return self.Get(client, self.Api + endpoint)
|
||||
}
|
||||
|
||||
// Make API request, parse response and return map or error
|
||||
func (self *Client) GetJsonTimeout(timeout time.Duration, endpoint string) (ClientResponse, error) {
|
||||
client := &http.Client{
|
||||
Timeout: timeout,
|
||||
}
|
||||
|
||||
return self.Get(client, self.Api + endpoint)
|
||||
}
|
@ -1,378 +0,0 @@
|
||||
package birdwatcher
|
||||
|
||||
// Parsers and helpers
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"sort"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/alice-lg/alice-lg/backend/api"
|
||||
)
|
||||
|
||||
// Convert server time string to time
|
||||
func parseServerTime(value interface{}, layout, timezone string) (time.Time, error) {
|
||||
svalue, ok := value.(string)
|
||||
if !ok {
|
||||
return time.Time{}, nil
|
||||
}
|
||||
|
||||
loc, err := time.LoadLocation(timezone)
|
||||
if err != nil {
|
||||
return time.Time{}, err
|
||||
}
|
||||
|
||||
t, err := time.ParseInLocation(layout, svalue, loc)
|
||||
if err != nil {
|
||||
return time.Time{}, err
|
||||
}
|
||||
|
||||
return t.UTC(), nil
|
||||
}
|
||||
|
||||
// Make api status from response:
|
||||
// The api status is always included in a birdwatcher response
|
||||
func parseApiStatus(bird ClientResponse, config Config) (api.ApiStatus, error) {
|
||||
birdApi, ok := bird["api"].(map[string]interface{})
|
||||
if !ok {
|
||||
// Define error status
|
||||
status := api.ApiStatus{
|
||||
Version: "unknown / error",
|
||||
ResultFromCache: false,
|
||||
Ttl: time.Now(),
|
||||
}
|
||||
|
||||
// Try to retrieve the real error from server
|
||||
birdErr, ok := bird["error"].(string)
|
||||
if !ok {
|
||||
// Unknown error
|
||||
return status, fmt.Errorf("Invalid API response received from server")
|
||||
}
|
||||
|
||||
return status, fmt.Errorf(birdErr)
|
||||
}
|
||||
|
||||
// Parse TTL
|
||||
ttl, err := parseServerTime(
|
||||
bird["ttl"],
|
||||
config.ServerTime,
|
||||
config.Timezone,
|
||||
)
|
||||
if err != nil {
|
||||
return api.ApiStatus{}, err
|
||||
}
|
||||
|
||||
// Parse Cache Status
|
||||
cacheStatus, _ := parseCacheStatus(birdApi, config)
|
||||
|
||||
status := api.ApiStatus{
|
||||
Version: birdApi["Version"].(string),
|
||||
ResultFromCache: birdApi["result_from_cache"].(bool),
|
||||
Ttl: ttl,
|
||||
CacheStatus: cacheStatus,
|
||||
}
|
||||
|
||||
return status, nil
|
||||
}
|
||||
|
||||
// Parse cache status from api response
|
||||
func parseCacheStatus(cacheStatus map[string]interface{}, config Config) (api.CacheStatus, error) {
|
||||
cache, ok := cacheStatus["cache_status"].(map[string]interface{})
|
||||
if !ok {
|
||||
return api.CacheStatus{}, fmt.Errorf("Invalid Cache Status")
|
||||
}
|
||||
|
||||
cachedAt, ok := cache["cached_at"].(map[string]interface{})
|
||||
if !ok {
|
||||
return api.CacheStatus{}, fmt.Errorf("Invalid Cache Status")
|
||||
}
|
||||
|
||||
cachedAtTime, err := parseServerTime(cachedAt["date"], config.ServerTime, config.Timezone)
|
||||
if err != nil {
|
||||
return api.CacheStatus{}, err
|
||||
}
|
||||
|
||||
status := api.CacheStatus{
|
||||
CachedAt: cachedAtTime,
|
||||
// We ommit OrigTTL for now...
|
||||
}
|
||||
|
||||
return status, nil
|
||||
}
|
||||
|
||||
// Parse birdwatcher status
|
||||
func parseBirdwatcherStatus(bird ClientResponse, config Config) (api.Status, error) {
|
||||
birdStatus := bird["status"].(map[string]interface{})
|
||||
|
||||
// Get special fields
|
||||
serverTime, _ := parseServerTime(
|
||||
birdStatus["current_server"],
|
||||
config.ServerTimeShort,
|
||||
config.Timezone,
|
||||
)
|
||||
|
||||
lastReboot, _ := parseServerTime(
|
||||
birdStatus["last_reboot"],
|
||||
config.ServerTimeShort,
|
||||
config.Timezone,
|
||||
)
|
||||
|
||||
if config.ShowLastReboot == false {
|
||||
lastReboot = time.Time{}
|
||||
}
|
||||
|
||||
lastReconfig, _ := parseServerTime(
|
||||
birdStatus["last_reconfig"],
|
||||
config.ServerTimeExt,
|
||||
config.Timezone,
|
||||
)
|
||||
|
||||
// Make status response
|
||||
status := api.Status{
|
||||
ServerTime: serverTime,
|
||||
LastReboot: lastReboot,
|
||||
LastReconfig: lastReconfig,
|
||||
Backend: "bird",
|
||||
Version: mustString(birdStatus["version"], "unknown"),
|
||||
Message: mustString(birdStatus["message"], "unknown"),
|
||||
RouterId: mustString(birdStatus["router_id"], "unknown"),
|
||||
}
|
||||
|
||||
return status, nil
|
||||
}
|
||||
|
||||
// Parse neighbour uptime
|
||||
func parseRelativeServerTime(uptime interface{}, config Config) time.Duration {
|
||||
serverTime, _ := parseServerTime(uptime, config.ServerTimeShort, config.Timezone)
|
||||
return time.Since(serverTime)
|
||||
}
|
||||
|
||||
// Parse neighbours response
|
||||
func parseNeighbours(bird ClientResponse, config Config) (api.Neighbours, error) {
|
||||
neighbours := api.Neighbours{}
|
||||
protocols := bird["protocols"].(map[string]interface{})
|
||||
|
||||
// Iterate over protocols map:
|
||||
for protocolId, proto := range protocols {
|
||||
protocol := proto.(map[string]interface{})
|
||||
routes := protocol["routes"].(map[string]interface{})
|
||||
|
||||
uptime := parseRelativeServerTime(protocol["state_changed"], config)
|
||||
lastError := mustString(protocol["last_error"], "")
|
||||
|
||||
routesReceived := float64(0)
|
||||
if routes != nil {
|
||||
if _, ok := routes["imported"]; ok {
|
||||
routesReceived = routesReceived + routes["imported"].(float64)
|
||||
}
|
||||
if _, ok := routes["filtered"]; ok {
|
||||
routesReceived = routesReceived + routes["filtered"].(float64)
|
||||
}
|
||||
}
|
||||
|
||||
neighbour := &api.Neighbour{
|
||||
Id: protocolId,
|
||||
|
||||
Address: mustString(protocol["neighbor_address"], "error"),
|
||||
Asn: mustInt(protocol["neighbor_as"], 0),
|
||||
State: mustString(protocol["state"], "unknown"),
|
||||
Description: mustString(protocol["description"], "no description"),
|
||||
//TODO make these changes configurable
|
||||
RoutesReceived: mustInt(routesReceived, 0),
|
||||
RoutesAccepted: mustInt(routes["imported"], 0),
|
||||
RoutesFiltered: mustInt(routes["filtered"], 0),
|
||||
RoutesExported: mustInt(routes["exported"], 0), //TODO protocol_exported?
|
||||
RoutesPreferred: mustInt(routes["preferred"], 0),
|
||||
|
||||
Uptime: uptime,
|
||||
LastError: lastError,
|
||||
|
||||
Details: protocol,
|
||||
}
|
||||
|
||||
neighbours = append(neighbours, neighbour)
|
||||
}
|
||||
|
||||
sort.Sort(neighbours)
|
||||
|
||||
return neighbours, nil
|
||||
}
|
||||
|
||||
// Parse neighbours response
|
||||
func parseNeighboursShort(bird ClientResponse, config Config) (api.NeighboursStatus, error) {
|
||||
neighbours := api.NeighboursStatus{}
|
||||
protocols := bird["protocols"].(map[string]interface{})
|
||||
|
||||
// Iterate over protocols map:
|
||||
for protocolId, proto := range protocols {
|
||||
protocol := proto.(map[string]interface{})
|
||||
|
||||
uptime := parseRelativeServerTime(protocol["since"], config)
|
||||
|
||||
neighbour := &api.NeighbourStatus{
|
||||
Id: protocolId,
|
||||
State: mustString(protocol["state"], "unknown"),
|
||||
Since: uptime,
|
||||
}
|
||||
|
||||
neighbours = append(neighbours, neighbour)
|
||||
}
|
||||
|
||||
sort.Sort(neighbours)
|
||||
|
||||
return neighbours, nil
|
||||
}
|
||||
|
||||
// Parse route bgp info
|
||||
func parseRouteBgpInfo(data interface{}) api.BgpInfo {
|
||||
bgpData, ok := data.(map[string]interface{})
|
||||
if !ok {
|
||||
// Info is missing
|
||||
return api.BgpInfo{}
|
||||
}
|
||||
|
||||
asPath := mustIntList(bgpData["as_path"])
|
||||
communities := parseBgpCommunities(bgpData["communities"])
|
||||
largeCommunities := parseBgpCommunities(bgpData["large_communities"])
|
||||
extCommunities := parseExtBgpCommunities(bgpData["ext_communities"])
|
||||
|
||||
localPref, _ := strconv.Atoi(mustString(bgpData["local_pref"], "0"))
|
||||
med, _ := strconv.Atoi(mustString(bgpData["med"], "0"))
|
||||
|
||||
bgp := api.BgpInfo{
|
||||
Origin: mustString(bgpData["origin"], "unknown"),
|
||||
AsPath: asPath,
|
||||
NextHop: mustString(bgpData["next_hop"], "unknown"),
|
||||
LocalPref: localPref,
|
||||
Med: med,
|
||||
Communities: communities,
|
||||
ExtCommunities: extCommunities,
|
||||
LargeCommunities: largeCommunities,
|
||||
}
|
||||
return bgp
|
||||
}
|
||||
|
||||
// Extract bgp communities from response
|
||||
func parseBgpCommunities(data interface{}) []api.Community {
|
||||
communities := []api.Community{}
|
||||
|
||||
ldata, ok := data.([]interface{})
|
||||
if !ok { // We don't have any
|
||||
return []api.Community{}
|
||||
}
|
||||
|
||||
for _, c := range ldata {
|
||||
cdata := c.([]interface{})
|
||||
community := api.Community{}
|
||||
for _, cinfo := range cdata {
|
||||
community = append(community, int(cinfo.(float64)))
|
||||
}
|
||||
communities = append(communities, community)
|
||||
}
|
||||
|
||||
return communities
|
||||
}
|
||||
|
||||
// Extract extended communtieis
|
||||
func parseExtBgpCommunities(data interface{}) []api.ExtCommunity {
|
||||
communities := []api.ExtCommunity{}
|
||||
ldata, ok := data.([]interface{})
|
||||
if !ok { // We don't have any
|
||||
return communities
|
||||
}
|
||||
|
||||
for _, c := range ldata {
|
||||
cdata := c.([]interface{})
|
||||
if len(cdata) != 3 {
|
||||
log.Println("Ignoring malformed ext community:", cdata)
|
||||
continue
|
||||
}
|
||||
communities = append(communities, api.ExtCommunity{
|
||||
cdata[0],
|
||||
cdata[1],
|
||||
cdata[2],
|
||||
})
|
||||
}
|
||||
|
||||
return communities
|
||||
}
|
||||
|
||||
// Parse partial routes response
|
||||
func parseRoutesData(birdRoutes []interface{}, config Config) api.Routes {
|
||||
routes := api.Routes{}
|
||||
|
||||
for _, data := range birdRoutes {
|
||||
rdata := data.(map[string]interface{})
|
||||
|
||||
age := parseRelativeServerTime(rdata["age"], config)
|
||||
rtype := mustStringList(rdata["type"])
|
||||
bgpInfo := parseRouteBgpInfo(rdata["bgp"])
|
||||
|
||||
route := &api.Route{
|
||||
Id: mustString(rdata["network"], "unknown"),
|
||||
NeighbourId: mustString(rdata["from_protocol"], "unknown neighbour"),
|
||||
|
||||
Network: mustString(rdata["network"], "unknown net"),
|
||||
Interface: mustString(rdata["interface"], "unknown interface"),
|
||||
Gateway: mustString(rdata["gateway"], "unknown gateway"),
|
||||
Metric: mustInt(rdata["metric"], -1),
|
||||
Primary: mustBool(rdata["primary"], false),
|
||||
Age: age,
|
||||
Type: rtype,
|
||||
Bgp: bgpInfo,
|
||||
|
||||
Details: rdata,
|
||||
}
|
||||
|
||||
routes = append(routes, route)
|
||||
}
|
||||
return routes
|
||||
}
|
||||
|
||||
// Parse routes response
|
||||
func parseRoutes(bird ClientResponse, config Config) (api.Routes, error) {
|
||||
birdRoutes, ok := bird["routes"].([]interface{})
|
||||
if !ok {
|
||||
return api.Routes{}, fmt.Errorf("Routes response missing")
|
||||
}
|
||||
|
||||
routes := parseRoutesData(birdRoutes, config)
|
||||
|
||||
// Sort routes
|
||||
sort.Sort(routes)
|
||||
return routes, nil
|
||||
}
|
||||
|
||||
func parseRoutesDump(bird ClientResponse, config Config) (*api.RoutesResponse, error) {
|
||||
result := &api.RoutesResponse{}
|
||||
|
||||
apiStatus, err := parseApiStatus(bird, config)
|
||||
if err != nil {
|
||||
return result, err
|
||||
}
|
||||
result.Api = apiStatus
|
||||
|
||||
// Fetch imported routes
|
||||
importedRoutes, ok := bird["imported"].([]interface{})
|
||||
if !ok {
|
||||
return result, fmt.Errorf("Imported routes missing")
|
||||
}
|
||||
|
||||
// Sort routes by network for faster querying
|
||||
imported := parseRoutesData(importedRoutes, config)
|
||||
sort.Sort(imported)
|
||||
result.Imported = imported
|
||||
|
||||
// Fetch filtered routes
|
||||
filteredRoutes, ok := bird["filtered"].([]interface{})
|
||||
if !ok {
|
||||
return result, fmt.Errorf("Filtered routes missing")
|
||||
}
|
||||
filtered := parseRoutesData(filteredRoutes, config)
|
||||
sort.Sort(filtered)
|
||||
result.Filtered = filtered
|
||||
|
||||
return result, nil
|
||||
}
|
@ -1,301 +0,0 @@
|
||||
package birdwatcher
|
||||
|
||||
import (
|
||||
"github.com/alice-lg/alice-lg/backend/api"
|
||||
"github.com/alice-lg/alice-lg/backend/caches"
|
||||
"github.com/alice-lg/alice-lg/backend/sources"
|
||||
|
||||
"fmt"
|
||||
"sort"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Birdwatcher interface {
|
||||
sources.Source
|
||||
}
|
||||
|
||||
type GenericBirdwatcher struct {
|
||||
config Config
|
||||
client *Client
|
||||
|
||||
// Caches: Neighbors
|
||||
neighborsCache *caches.NeighborsCache
|
||||
|
||||
// Caches: Routes
|
||||
routesRequiredCache *caches.RoutesCache
|
||||
routesNotExportedCache *caches.RoutesCache
|
||||
|
||||
// Mutices:
|
||||
routesFetchMutex *LockMap
|
||||
}
|
||||
|
||||
func NewBirdwatcher(config Config) Birdwatcher {
|
||||
client := NewClient(config.Api)
|
||||
|
||||
// Cache settings:
|
||||
// TODO: Maybe read from config file
|
||||
neighborsCacheDisable := false
|
||||
|
||||
routesCacheDisabled := false
|
||||
routesCacheMaxSize := 128
|
||||
|
||||
// Initialize caches
|
||||
neighborsCache := caches.NewNeighborsCache(neighborsCacheDisable)
|
||||
routesRequiredCache := caches.NewRoutesCache(
|
||||
routesCacheDisabled, routesCacheMaxSize)
|
||||
routesNotExportedCache := caches.NewRoutesCache(
|
||||
routesCacheDisabled, routesCacheMaxSize)
|
||||
|
||||
var birdwatcher Birdwatcher
|
||||
|
||||
if config.Type == "single_table" {
|
||||
singleTableBirdwatcher := new(SingleTableBirdwatcher)
|
||||
|
||||
singleTableBirdwatcher.config = config
|
||||
singleTableBirdwatcher.client = client
|
||||
|
||||
singleTableBirdwatcher.neighborsCache = neighborsCache
|
||||
|
||||
singleTableBirdwatcher.routesRequiredCache = routesRequiredCache
|
||||
singleTableBirdwatcher.routesNotExportedCache = routesNotExportedCache
|
||||
|
||||
singleTableBirdwatcher.routesFetchMutex = NewLockMap()
|
||||
|
||||
birdwatcher = singleTableBirdwatcher
|
||||
} else if config.Type == "multi_table" {
|
||||
multiTableBirdwatcher := new(MultiTableBirdwatcher)
|
||||
|
||||
multiTableBirdwatcher.config = config
|
||||
multiTableBirdwatcher.client = client
|
||||
|
||||
multiTableBirdwatcher.neighborsCache = neighborsCache
|
||||
|
||||
multiTableBirdwatcher.routesRequiredCache = routesRequiredCache
|
||||
multiTableBirdwatcher.routesNotExportedCache = routesNotExportedCache
|
||||
|
||||
multiTableBirdwatcher.routesFetchMutex = NewLockMap()
|
||||
|
||||
birdwatcher = multiTableBirdwatcher
|
||||
}
|
||||
|
||||
return birdwatcher
|
||||
}
|
||||
|
||||
func (self *GenericBirdwatcher) filterProtocols(protocols map[string]interface{}, protocol string) map[string]interface{} {
|
||||
response := make(map[string]interface{})
|
||||
response["protocols"] = make(map[string]interface{})
|
||||
|
||||
for protocolId, protocolData := range protocols {
|
||||
if protocolData.(map[string]interface{})["bird_protocol"] == protocol {
|
||||
response["protocols"].(map[string]interface{})[protocolId] = protocolData
|
||||
}
|
||||
}
|
||||
|
||||
return response
|
||||
}
|
||||
|
||||
func (self *GenericBirdwatcher) filterProtocolsBgp(bird ClientResponse) map[string]interface{} {
|
||||
return self.filterProtocols(bird["protocols"].(map[string]interface{}), "BGP")
|
||||
}
|
||||
|
||||
func (self *GenericBirdwatcher) filterProtocolsPipe(bird ClientResponse) map[string]interface{} {
|
||||
return self.filterProtocols(bird["protocols"].(map[string]interface{}), "Pipe")
|
||||
}
|
||||
|
||||
func (self *GenericBirdwatcher) filterRoutesByPeerOrLearntFrom(routes api.Routes, peer string, learntFrom string) api.Routes {
|
||||
result_routes := make(api.Routes, 0, len(routes))
|
||||
|
||||
// Choose routes with next_hop == gateway of this neighbour
|
||||
for _, route := range routes {
|
||||
if (route.Gateway == peer) ||
|
||||
(route.Gateway == learntFrom) ||
|
||||
(route.Details["learnt_from"] == peer) {
|
||||
result_routes = append(result_routes, route)
|
||||
}
|
||||
}
|
||||
|
||||
// Sort routes for deterministic ordering
|
||||
sort.Sort(result_routes)
|
||||
routes = result_routes
|
||||
|
||||
return routes
|
||||
}
|
||||
|
||||
func (self *GenericBirdwatcher) filterRoutesByDuplicates(routes api.Routes, filterRoutes api.Routes) api.Routes {
|
||||
result_routes := make(api.Routes, 0, len(routes))
|
||||
|
||||
routesMap := make(map[string]*api.Route) // for O(1) access
|
||||
for _, route := range routes {
|
||||
routesMap[route.Id] = route
|
||||
}
|
||||
|
||||
// Remove routes from "routes" that are contained within filterRoutes
|
||||
for _, filterRoute := range filterRoutes {
|
||||
if _, ok := routesMap[filterRoute.Id]; ok {
|
||||
delete(routesMap, filterRoute.Id)
|
||||
}
|
||||
}
|
||||
|
||||
for _, route := range routesMap {
|
||||
result_routes = append(result_routes, route)
|
||||
}
|
||||
|
||||
// Sort routes for deterministic ordering
|
||||
sort.Sort(result_routes)
|
||||
routes = result_routes
|
||||
|
||||
return routes
|
||||
}
|
||||
|
||||
func (self *GenericBirdwatcher) filterRoutesByNeighborId(routes api.Routes, neighborId string) api.Routes {
|
||||
result_routes := make(api.Routes, 0, len(routes))
|
||||
|
||||
// Choose routes with next_hop == gateway of this neighbour
|
||||
for _, route := range routes {
|
||||
if route.Details["from_protocol"] == neighborId {
|
||||
result_routes = append(result_routes, route)
|
||||
}
|
||||
}
|
||||
|
||||
// Sort routes for deterministic ordering
|
||||
sort.Sort(result_routes)
|
||||
routes = result_routes
|
||||
|
||||
return routes
|
||||
}
|
||||
|
||||
func (self *GenericBirdwatcher) fetchProtocolsShort() (*api.ApiStatus, map[string]interface{}, error) {
|
||||
// Query birdwatcher
|
||||
timeout := 2 * time.Second
|
||||
if self.config.NeighborsRefreshTimeout > 0 {
|
||||
timeout = time.Duration(self.config.NeighborsRefreshTimeout) * time.Second
|
||||
}
|
||||
bird, err := self.client.GetJsonTimeout(timeout, "/protocols/short?uncached=true")
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Use api status from first request
|
||||
apiStatus, err := parseApiStatus(bird, self.config)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
if _, ok := bird["protocols"]; !ok {
|
||||
return nil, nil, fmt.Errorf("Failed to fetch protocols")
|
||||
}
|
||||
|
||||
return &apiStatus, bird, nil
|
||||
}
|
||||
|
||||
func (self *GenericBirdwatcher) ExpireCaches() int {
|
||||
count := self.routesRequiredCache.Expire()
|
||||
count += self.routesNotExportedCache.Expire()
|
||||
|
||||
return count
|
||||
}
|
||||
|
||||
func (self *GenericBirdwatcher) Status() (*api.StatusResponse, error) {
|
||||
// Query birdwatcher
|
||||
bird, err := self.client.GetJson("/status")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Use api status from first request
|
||||
apiStatus, err := parseApiStatus(bird, self.config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Parse the status
|
||||
birdStatus, err := parseBirdwatcherStatus(bird, self.config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response := &api.StatusResponse{
|
||||
Api: apiStatus,
|
||||
Status: birdStatus,
|
||||
}
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Get live neighbor status
|
||||
func (self *GenericBirdwatcher) NeighboursStatus() (*api.NeighboursStatusResponse, error) {
|
||||
// Query birdwatcher
|
||||
apiStatus, birdProtocols, err := self.fetchProtocolsShort()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Parse the neighbors short
|
||||
neighbours, err := parseNeighboursShort(birdProtocols, self.config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response := &api.NeighboursStatusResponse{
|
||||
Api: *apiStatus,
|
||||
Neighbours: neighbours,
|
||||
}
|
||||
|
||||
return response, nil // dereference for now
|
||||
}
|
||||
|
||||
// Make routes lookup
|
||||
func (self *GenericBirdwatcher) LookupPrefix(prefix string) (*api.RoutesLookupResponse, error) {
|
||||
// Get RS info
|
||||
rs := api.Routeserver{
|
||||
Id: self.config.Id,
|
||||
Name: self.config.Name,
|
||||
}
|
||||
|
||||
// Query prefix on RS
|
||||
bird, err := self.client.GetJson("/routes/prefix?prefix=" + prefix)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Parse API status
|
||||
apiStatus, err := parseApiStatus(bird, self.config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Parse routes
|
||||
routes, err := parseRoutes(bird, self.config)
|
||||
|
||||
// Add corresponding neighbour and source rs to result
|
||||
results := api.LookupRoutes{}
|
||||
for _, src := range routes {
|
||||
// Okay. This is actually really hacky.
|
||||
// A less bruteforce approach would be highly appreciated
|
||||
route := &api.LookupRoute{
|
||||
Id: src.Id,
|
||||
|
||||
Routeserver: rs,
|
||||
|
||||
NeighbourId: src.NeighbourId,
|
||||
|
||||
Network: src.Network,
|
||||
Interface: src.Interface,
|
||||
Gateway: src.Gateway,
|
||||
Metric: src.Metric,
|
||||
Bgp: src.Bgp,
|
||||
Age: src.Age,
|
||||
Type: src.Type,
|
||||
|
||||
Details: src.Details,
|
||||
}
|
||||
results = append(results, route)
|
||||
}
|
||||
|
||||
// Make result
|
||||
response := &api.RoutesLookupResponse{
|
||||
Api: apiStatus,
|
||||
Routes: results,
|
||||
}
|
||||
return response, nil
|
||||
}
|
@ -1,527 +0,0 @@
|
||||
package birdwatcher
|
||||
|
||||
import (
|
||||
"github.com/alice-lg/alice-lg/backend/api"
|
||||
|
||||
"strings"
|
||||
|
||||
"fmt"
|
||||
"sort"
|
||||
"log"
|
||||
)
|
||||
|
||||
|
||||
type MultiTableBirdwatcher struct {
|
||||
GenericBirdwatcher
|
||||
}
|
||||
|
||||
|
||||
func (self *MultiTableBirdwatcher) getMasterPipeName(table string) string {
|
||||
if strings.HasPrefix(table, self.config.PeerTablePrefix) {
|
||||
return self.config.PipeProtocolPrefix + table[1:]
|
||||
} else {
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
func (self *MultiTableBirdwatcher) parseProtocolToTableTree(bird ClientResponse) map[string]interface{} {
|
||||
protocols := bird["protocols"].(map[string]interface{})
|
||||
|
||||
response := make(map[string]interface{})
|
||||
|
||||
for _, protocolData := range protocols {
|
||||
protocol := protocolData.(map[string]interface{})
|
||||
|
||||
if protocol["bird_protocol"] == "BGP" {
|
||||
table := protocol["table"].(string)
|
||||
neighborAddress := protocol["neighbor_address"].(string)
|
||||
|
||||
if _, ok := response[table]; !ok {
|
||||
response[table] = make(map[string]interface{})
|
||||
}
|
||||
|
||||
if _, ok := response[table].(map[string]interface{})[neighborAddress]; !ok {
|
||||
response[table].(map[string]interface{})[neighborAddress] = make(map[string]interface{})
|
||||
}
|
||||
|
||||
response[table].(map[string]interface{})[neighborAddress] = protocol
|
||||
}
|
||||
}
|
||||
|
||||
return response
|
||||
}
|
||||
|
||||
|
||||
func (self *MultiTableBirdwatcher) fetchProtocols() (*api.ApiStatus, map[string]interface{}, error) {
|
||||
// Query birdwatcher
|
||||
bird, err := self.client.GetJson("/protocols")
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Use api status from first request
|
||||
apiStatus, err := parseApiStatus(bird, self.config)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
if _, ok := bird["protocols"]; !ok {
|
||||
return nil, nil, fmt.Errorf("Failed to fetch protocols")
|
||||
}
|
||||
|
||||
return &apiStatus, bird, nil
|
||||
}
|
||||
|
||||
func (self *MultiTableBirdwatcher) fetchReceivedRoutes(neighborId string) (*api.ApiStatus, api.Routes, error) {
|
||||
// Query birdwatcher
|
||||
_, birdProtocols, err := self.fetchProtocols()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
protocols := birdProtocols["protocols"].(map[string]interface{})
|
||||
|
||||
if _, ok := protocols[neighborId]; !ok {
|
||||
return nil, nil, fmt.Errorf("Invalid Neighbor")
|
||||
}
|
||||
|
||||
peer := protocols[neighborId].(map[string]interface{})["neighbor_address"].(string)
|
||||
|
||||
// Query birdwatcher
|
||||
bird, err := self.client.GetJson("/routes/peer/" + peer)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Use api status from first request
|
||||
apiStatus, err := parseApiStatus(bird, self.config)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Parse the routes
|
||||
received, err := parseRoutes(bird, self.config)
|
||||
if err != nil {
|
||||
log.Println("WARNING Could not retrieve received routes:", err)
|
||||
log.Println("Is the 'routes_peer' module active in birdwatcher?")
|
||||
return &apiStatus, nil, err
|
||||
}
|
||||
|
||||
return &apiStatus, received, nil
|
||||
}
|
||||
|
||||
func (self *MultiTableBirdwatcher) fetchFilteredRoutes(neighborId string) (*api.ApiStatus, api.Routes, error) {
|
||||
// Query birdwatcher
|
||||
_, birdProtocols, err := self.fetchProtocols()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
protocols := birdProtocols["protocols"].(map[string]interface{})
|
||||
|
||||
if _, ok := protocols[neighborId]; !ok {
|
||||
return nil, nil, fmt.Errorf("Invalid Neighbor")
|
||||
}
|
||||
|
||||
// Stage 1 filters
|
||||
birdFiltered, err := self.client.GetJson("/routes/filtered/" + neighborId)
|
||||
if err != nil {
|
||||
log.Println("WARNING Could not retrieve filtered routes:", err)
|
||||
log.Println("Is the 'routes_filtered' module active in birdwatcher?")
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Use api status from first request
|
||||
apiStatus, err := parseApiStatus(birdFiltered, self.config)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Parse the routes
|
||||
filtered := parseRoutesData(birdFiltered["routes"].([]interface{}), self.config)
|
||||
|
||||
// Stage 2 filters
|
||||
table := protocols[neighborId].(map[string]interface{})["table"].(string)
|
||||
pipeName := self.getMasterPipeName(table)
|
||||
|
||||
// If there is no pipe to master, there is nothing left to do
|
||||
if pipeName == "" {
|
||||
return &apiStatus, filtered, nil
|
||||
}
|
||||
|
||||
// Query birdwatcher
|
||||
birdPipeFiltered, err := self.client.GetJson("/routes/pipe/filtered/?table=" + table + "&pipe=" + pipeName)
|
||||
if err != nil {
|
||||
log.Println("WARNING Could not retrieve filtered routes:", err)
|
||||
log.Println("Is the 'pipe_filtered' module active in birdwatcher?")
|
||||
return &apiStatus, nil, err
|
||||
}
|
||||
|
||||
// Parse the routes
|
||||
pipeFiltered := parseRoutesData(birdPipeFiltered["routes"].([]interface{}), self.config)
|
||||
|
||||
// Sort routes for deterministic ordering
|
||||
filtered = append(filtered, pipeFiltered...)
|
||||
sort.Sort(filtered)
|
||||
|
||||
return &apiStatus, filtered, nil
|
||||
}
|
||||
|
||||
func (self *MultiTableBirdwatcher) fetchNotExportedRoutes(neighborId string) (*api.ApiStatus, api.Routes, error) {
|
||||
// Query birdwatcher
|
||||
_, birdProtocols, err := self.fetchProtocols()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
protocols := birdProtocols["protocols"].(map[string]interface{})
|
||||
|
||||
if _, ok := protocols[neighborId]; !ok {
|
||||
return nil, nil, fmt.Errorf("Invalid Neighbor")
|
||||
}
|
||||
|
||||
table := protocols[neighborId].(map[string]interface{})["table"].(string)
|
||||
pipeName := self.getMasterPipeName(table)
|
||||
|
||||
// Query birdwatcher
|
||||
bird, err := self.client.GetJson("/routes/noexport/" + pipeName)
|
||||
|
||||
// Use api status from first request
|
||||
apiStatus, err := parseApiStatus(bird, self.config)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
notExported, err := parseRoutes(bird, self.config)
|
||||
if err != nil {
|
||||
log.Println("WARNING Could not retrieve routes not exported:", err)
|
||||
log.Println("Is the 'routes_noexport' module active in birdwatcher?")
|
||||
}
|
||||
|
||||
return &apiStatus, notExported, nil
|
||||
}
|
||||
|
||||
/*
|
||||
RoutesRequired is a specialized request to fetch:
|
||||
|
||||
- RoutesExported and
|
||||
- RoutesFiltered
|
||||
|
||||
from Birdwatcher. As the not exported routes can be very many
|
||||
these are optional and can be loaded on demand using the
|
||||
RoutesNotExported() API.
|
||||
|
||||
A route deduplication is applied.
|
||||
*/
|
||||
func (self *MultiTableBirdwatcher) fetchRequiredRoutes(neighborId string) (*api.RoutesResponse, error) {
|
||||
// Allow only one concurrent request for this neighbor
|
||||
// to our backend server.
|
||||
self.routesFetchMutex.Lock(neighborId)
|
||||
defer self.routesFetchMutex.Unlock(neighborId)
|
||||
|
||||
// Check if we have a cache hit
|
||||
response := self.routesRequiredCache.Get(neighborId)
|
||||
if response != nil {
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// First: get routes received
|
||||
apiStatus, receivedRoutes, err := self.fetchReceivedRoutes(neighborId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Second: get routes filtered
|
||||
_, filteredRoutes, err := self.fetchFilteredRoutes(neighborId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Perform route deduplication
|
||||
importedRoutes := api.Routes{}
|
||||
if len(receivedRoutes) > 0 {
|
||||
peer := receivedRoutes[0].Gateway
|
||||
learntFrom := mustString(receivedRoutes[0].Details["learnt_from"], peer)
|
||||
|
||||
filteredRoutes = self.filterRoutesByPeerOrLearntFrom(filteredRoutes, peer, learntFrom)
|
||||
importedRoutes = self.filterRoutesByDuplicates(receivedRoutes, filteredRoutes)
|
||||
}
|
||||
|
||||
response = &api.RoutesResponse{
|
||||
Api: *apiStatus,
|
||||
Imported: importedRoutes,
|
||||
Filtered: filteredRoutes,
|
||||
}
|
||||
|
||||
// Cache result
|
||||
self.routesRequiredCache.Set(neighborId, response)
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
|
||||
// Get neighbors from protocols
|
||||
func (self *MultiTableBirdwatcher) Neighbours() (*api.NeighboursResponse, error) {
|
||||
// Check if we hit the cache
|
||||
response := self.neighborsCache.Get()
|
||||
if response != nil {
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Query birdwatcher
|
||||
apiStatus, birdProtocols, err := self.fetchProtocols()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Parse the neighbors
|
||||
neighbours, err := parseNeighbours(self.filterProtocolsBgp(birdProtocols), self.config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pipes := self.filterProtocolsPipe(birdProtocols)["protocols"].(map[string]interface{})
|
||||
tree := self.parseProtocolToTableTree(birdProtocols)
|
||||
|
||||
// Now determine the session count for each neighbor and check if the pipe
|
||||
// did filter anything
|
||||
filtered := make(map[string]int)
|
||||
for table, _ := range tree {
|
||||
allRoutesImported := int64(0)
|
||||
pipeRoutesImported := int64(0)
|
||||
|
||||
// Sum up all routes from all peers for a table
|
||||
for _, protocol := range tree[table].(map[string]interface{}) {
|
||||
// Skip peers that are not up (start/down)
|
||||
if protocol.(map[string]interface{})["state"].(string) != "up" {
|
||||
continue
|
||||
}
|
||||
allRoutesImported += int64(protocol.(map[string]interface{})["routes"].(map[string]interface{})["imported"].(float64))
|
||||
|
||||
pipeName := self.getMasterPipeName(table)
|
||||
|
||||
if _, ok := pipes[pipeName]; ok {
|
||||
if _, ok := pipes[pipeName].(map[string]interface{})["routes"].(map[string]interface{})["imported"]; ok {
|
||||
pipeRoutesImported = int64(pipes[pipeName].(map[string]interface{})["routes"].(map[string]interface{})["imported"].(float64))
|
||||
} else {
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// If no routes were imported, there is nothing left to filter
|
||||
if allRoutesImported == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// If the pipe did not filter anything, there is nothing left to do
|
||||
if pipeRoutesImported == allRoutesImported {
|
||||
continue
|
||||
}
|
||||
|
||||
if len(tree[table].(map[string]interface{})) == 1 {
|
||||
// Single router
|
||||
for _, protocol := range tree[table].(map[string]interface{}) {
|
||||
filtered[protocol.(map[string]interface{})["protocol"].(string)] = int(allRoutesImported-pipeRoutesImported)
|
||||
}
|
||||
} else {
|
||||
// Multiple routers
|
||||
if pipeRoutesImported == 0 {
|
||||
// 0 is a special condition, which means that the pipe did filter ALL routes of
|
||||
// all peers. Therefore we already know the amount of filtered routes and don't have
|
||||
// to query birdwatcher again.
|
||||
for _, protocol := range tree[table].(map[string]interface{}) {
|
||||
// Skip peers that are not up (start/down)
|
||||
if protocol.(map[string]interface{})["state"].(string) != "up" {
|
||||
continue
|
||||
}
|
||||
filtered[protocol.(map[string]interface{})["protocol"].(string)] = int(protocol.(map[string]interface{})["routes"].(map[string]interface{})["imported"].(float64))
|
||||
}
|
||||
} else {
|
||||
// Otherwise the pipe did import at least some routes which means that
|
||||
// we have to query birdwatcher to get the count for each peer.
|
||||
for neighborAddress, protocol := range tree[table].(map[string]interface{}) {
|
||||
table := protocol.(map[string]interface{})["table"].(string)
|
||||
pipe := self.getMasterPipeName(table)
|
||||
|
||||
count, err := self.client.GetJson("/routes/pipe/filtered/count?table=" + table + "&pipe=" + pipe + "&address=" + neighborAddress)
|
||||
if err != nil {
|
||||
log.Println("WARNING Could not retrieve filtered routes count:", err)
|
||||
log.Println("Is the 'pipe_filtered_count' module active in birdwatcher?")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if _, ok := count["routes"]; ok {
|
||||
filtered[protocol.(map[string]interface{})["protocol"].(string)] = int(count["routes"].(float64))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update the results with the information about filtered routes from the pipe
|
||||
for _, neighbor := range neighbours {
|
||||
if pipeRoutesFiltered, ok := filtered[neighbor.Id]; ok {
|
||||
neighbor.RoutesAccepted -= pipeRoutesFiltered
|
||||
neighbor.RoutesFiltered += pipeRoutesFiltered
|
||||
}
|
||||
}
|
||||
|
||||
response = &api.NeighboursResponse{
|
||||
Api: *apiStatus,
|
||||
Neighbours: neighbours,
|
||||
}
|
||||
|
||||
// Cache result
|
||||
self.neighborsCache.Set(response)
|
||||
|
||||
return response, nil // dereference for now
|
||||
}
|
||||
|
||||
// Get filtered and exported routes
|
||||
func (self *MultiTableBirdwatcher) Routes(neighbourId string) (*api.RoutesResponse, error) {
|
||||
response := &api.RoutesResponse{}
|
||||
// Fetch required routes first (received and filtered)
|
||||
// However: Store in separate cache for faster access
|
||||
required, err := self.fetchRequiredRoutes(neighbourId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Optional: NoExport
|
||||
_, notExported, err := self.fetchNotExportedRoutes(neighbourId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response.Api = required.Api
|
||||
response.Imported = required.Imported
|
||||
response.Filtered = required.Filtered
|
||||
response.NotExported = notExported
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Get all received routes
|
||||
func (self *MultiTableBirdwatcher) RoutesReceived(neighborId string) (*api.RoutesResponse, error) {
|
||||
response := &api.RoutesResponse{}
|
||||
|
||||
// Check if we have a cache hit
|
||||
cachedRoutes := self.routesRequiredCache.Get(neighborId)
|
||||
if cachedRoutes != nil {
|
||||
response.Api = cachedRoutes.Api
|
||||
response.Imported = cachedRoutes.Imported
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Fetch required routes first (received and filtered)
|
||||
routes, err := self.fetchRequiredRoutes(neighborId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response.Api = routes.Api
|
||||
response.Imported = routes.Imported
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Get all filtered routes
|
||||
func (self *MultiTableBirdwatcher) RoutesFiltered(neighborId string) (*api.RoutesResponse, error) {
|
||||
response := &api.RoutesResponse{}
|
||||
|
||||
// Check if we have a cache hit
|
||||
cachedRoutes := self.routesRequiredCache.Get(neighborId)
|
||||
if cachedRoutes != nil {
|
||||
response.Api = cachedRoutes.Api
|
||||
response.Filtered = cachedRoutes.Filtered
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Fetch required routes first (received and filtered)
|
||||
routes, err := self.fetchRequiredRoutes(neighborId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response.Api = routes.Api
|
||||
response.Filtered = routes.Filtered
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Get all not exported routes
|
||||
func (self *MultiTableBirdwatcher) RoutesNotExported(neighborId string) (*api.RoutesResponse, error) {
|
||||
// Check if we have a cache hit
|
||||
response := self.routesNotExportedCache.Get(neighborId)
|
||||
if response != nil {
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Fetch not exported routes
|
||||
apiStatus, routes, err := self.fetchNotExportedRoutes(neighborId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response = &api.RoutesResponse{
|
||||
Api: *apiStatus,
|
||||
NotExported: routes,
|
||||
}
|
||||
|
||||
// Cache result
|
||||
self.routesNotExportedCache.Set(neighborId, response)
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
func (self *MultiTableBirdwatcher) AllRoutes() (*api.RoutesResponse, error) {
|
||||
// Query birdwatcher
|
||||
_, birdProtocols, err := self.fetchProtocols()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Fetch received routes first
|
||||
birdImported, err := self.client.GetJson("/routes/table/master")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Use api status from first request
|
||||
apiStatus, err := parseApiStatus(birdImported, self.config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response := &api.RoutesResponse{
|
||||
Api: apiStatus,
|
||||
}
|
||||
|
||||
// Parse the routes
|
||||
imported := parseRoutesData(birdImported["routes"].([]interface{}), self.config)
|
||||
// Sort routes for deterministic ordering
|
||||
sort.Sort(imported)
|
||||
response.Imported = imported
|
||||
|
||||
// Iterate over all the protocols and fetch the filtered routes for everyone
|
||||
protocolsBgp := self.filterProtocolsBgp(birdProtocols)
|
||||
for protocolId, protocolsData := range protocolsBgp["protocols"].(map[string]interface{}) {
|
||||
peer := protocolsData.(map[string]interface{})["neighbor_address"].(string)
|
||||
learntFrom := mustString(protocolsData.(map[string]interface{})["learnt_from"], peer)
|
||||
|
||||
// Fetch filtered routes
|
||||
_, filtered, err := self.fetchFilteredRoutes(protocolId)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// Perform route deduplication
|
||||
filtered = self.filterRoutesByPeerOrLearntFrom(filtered, peer, learntFrom)
|
||||
response.Filtered = append(response.Filtered, filtered...)
|
||||
}
|
||||
|
||||
return response, nil
|
||||
}
|
@ -1,315 +0,0 @@
|
||||
package birdwatcher
|
||||
|
||||
import (
|
||||
"github.com/alice-lg/alice-lg/backend/api"
|
||||
|
||||
"log"
|
||||
"sort"
|
||||
)
|
||||
|
||||
|
||||
type SingleTableBirdwatcher struct {
|
||||
GenericBirdwatcher
|
||||
}
|
||||
|
||||
|
||||
func (self *SingleTableBirdwatcher) fetchReceivedRoutes(neighborId string) (*api.ApiStatus, api.Routes, error) {
|
||||
// Query birdwatcher
|
||||
bird, err := self.client.GetJson("/routes/protocol/" + neighborId)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Use api status from first request
|
||||
apiStatus, err := parseApiStatus(bird, self.config)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Parse the routes
|
||||
received, err := parseRoutes(bird, self.config)
|
||||
if err != nil {
|
||||
log.Println("WARNING Could not retrieve received routes:", err)
|
||||
log.Println("Is the 'routes_protocol' module active in birdwatcher?")
|
||||
return &apiStatus, nil, err
|
||||
}
|
||||
|
||||
return &apiStatus, received, nil
|
||||
}
|
||||
|
||||
func (self *SingleTableBirdwatcher) fetchFilteredRoutes(neighborId string) (*api.ApiStatus, api.Routes, error) {
|
||||
// Query birdwatcher
|
||||
bird, err := self.client.GetJson("/routes/filtered/" + neighborId)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Use api status from first request
|
||||
apiStatus, err := parseApiStatus(bird, self.config)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Parse the routes
|
||||
filtered, err := parseRoutes(bird, self.config)
|
||||
if err != nil {
|
||||
log.Println("WARNING Could not retrieve filtered routes:", err)
|
||||
log.Println("Is the 'routes_filtered' module active in birdwatcher?")
|
||||
return &apiStatus, nil, err
|
||||
}
|
||||
|
||||
return &apiStatus, filtered, nil
|
||||
}
|
||||
|
||||
func (self *SingleTableBirdwatcher) fetchNotExportedRoutes(neighborId string) (*api.ApiStatus, api.Routes, error) {
|
||||
// Query birdwatcher
|
||||
bird, err := self.client.GetJson("/routes/noexport/" + neighborId)
|
||||
|
||||
// Use api status from first request
|
||||
apiStatus, err := parseApiStatus(bird, self.config)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Parse the routes
|
||||
notExported, err := parseRoutes(bird, self.config)
|
||||
if err != nil {
|
||||
log.Println("WARNING Could not retrieve routes not exported:", err)
|
||||
log.Println("Is the 'routes_noexport' module active in birdwatcher?")
|
||||
}
|
||||
|
||||
return &apiStatus, notExported, nil
|
||||
}
|
||||
|
||||
/*
|
||||
RoutesRequired is a specialized request to fetch:
|
||||
|
||||
- RoutesExported and
|
||||
- RoutesFiltered
|
||||
|
||||
from Birdwatcher. As the not exported routes can be very many
|
||||
these are optional and can be loaded on demand using the
|
||||
RoutesNotExported() API.
|
||||
|
||||
A route deduplication is applied.
|
||||
*/
|
||||
func (self *SingleTableBirdwatcher) fetchRequiredRoutes(neighborId string) (*api.RoutesResponse, error) {
|
||||
// Allow only one concurrent request for this neighbor
|
||||
// to our backend server.
|
||||
self.routesFetchMutex.Lock(neighborId)
|
||||
defer self.routesFetchMutex.Unlock(neighborId)
|
||||
|
||||
// Check if we have a cache hit
|
||||
response := self.routesRequiredCache.Get(neighborId)
|
||||
if response != nil {
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// First: get routes received
|
||||
apiStatus, receivedRoutes, err := self.fetchReceivedRoutes(neighborId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Second: get routes filtered
|
||||
_, filteredRoutes, err := self.fetchFilteredRoutes(neighborId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Perform route deduplication
|
||||
importedRoutes := api.Routes{}
|
||||
if len(receivedRoutes) > 0 {
|
||||
peer := receivedRoutes[0].Gateway
|
||||
learntFrom := mustString(receivedRoutes[0].Details["learnt_from"], peer)
|
||||
|
||||
filteredRoutes = self.filterRoutesByPeerOrLearntFrom(filteredRoutes, peer, learntFrom)
|
||||
importedRoutes = self.filterRoutesByDuplicates(receivedRoutes, filteredRoutes)
|
||||
}
|
||||
|
||||
response = &api.RoutesResponse{
|
||||
Api: *apiStatus,
|
||||
Imported: importedRoutes,
|
||||
Filtered: filteredRoutes,
|
||||
}
|
||||
|
||||
// Cache result
|
||||
self.routesRequiredCache.Set(neighborId, response)
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
|
||||
// Get neighbors from protocols
|
||||
func (self *SingleTableBirdwatcher) Neighbours() (*api.NeighboursResponse, error) {
|
||||
// Check if we hit the cache
|
||||
response := self.neighborsCache.Get()
|
||||
if response != nil {
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Query birdwatcher
|
||||
bird, err := self.client.GetJson("/protocols/bgp")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Use api status from first request
|
||||
apiStatus, err := parseApiStatus(bird, self.config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Parse the neighbors
|
||||
neighbours, err := parseNeighbours(bird, self.config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response = &api.NeighboursResponse{
|
||||
Api: apiStatus,
|
||||
Neighbours: neighbours,
|
||||
}
|
||||
|
||||
// Cache result
|
||||
self.neighborsCache.Set(response)
|
||||
|
||||
return response, nil // dereference for now
|
||||
}
|
||||
|
||||
// Get filtered and exported routes
|
||||
func (self *SingleTableBirdwatcher) Routes(neighbourId string) (*api.RoutesResponse, error) {
|
||||
response := &api.RoutesResponse{}
|
||||
|
||||
// Fetch required routes first (received and filtered)
|
||||
required, err := self.fetchRequiredRoutes(neighbourId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Optional: NoExport
|
||||
_, notExported, err := self.fetchNotExportedRoutes(neighbourId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response.Api = required.Api
|
||||
response.Imported = required.Imported
|
||||
response.Filtered = required.Filtered
|
||||
response.NotExported = notExported
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Get all received routes
|
||||
func (self *SingleTableBirdwatcher) RoutesReceived(neighborId string) (*api.RoutesResponse, error) {
|
||||
response := &api.RoutesResponse{}
|
||||
|
||||
// Check if we hit the cache
|
||||
cachedRoutes := self.routesRequiredCache.Get(neighborId)
|
||||
if cachedRoutes != nil {
|
||||
response.Api = cachedRoutes.Api
|
||||
response.Imported = cachedRoutes.Imported
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Fetch required routes first (received and filtered)
|
||||
// However: Store in separate cache for faster access
|
||||
routes, err := self.fetchRequiredRoutes(neighborId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response.Api = routes.Api
|
||||
response.Imported = routes.Imported
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Get all filtered routes
|
||||
func (self *SingleTableBirdwatcher) RoutesFiltered(neighborId string) (*api.RoutesResponse, error) {
|
||||
response := &api.RoutesResponse{}
|
||||
|
||||
// Check if we hit the cache
|
||||
cachedRoutes := self.routesRequiredCache.Get(neighborId)
|
||||
if cachedRoutes != nil {
|
||||
response.Api = cachedRoutes.Api
|
||||
response.Filtered = cachedRoutes.Filtered
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Fetch required routes first (received and filtered)
|
||||
// However: Store in separate cache for faster access
|
||||
routes, err := self.fetchRequiredRoutes(neighborId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response.Api = routes.Api
|
||||
response.Filtered = routes.Filtered
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Get all not exported routes
|
||||
func (self *SingleTableBirdwatcher) RoutesNotExported(neighborId string) (*api.RoutesResponse, error) {
|
||||
// Check if we hit the cache
|
||||
response := self.routesNotExportedCache.Get(neighborId)
|
||||
if response != nil {
|
||||
return response, nil
|
||||
}
|
||||
|
||||
// Fetch not exported routes
|
||||
apiStatus, routes, err := self.fetchNotExportedRoutes(neighborId)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response = &api.RoutesResponse{
|
||||
Api: *apiStatus,
|
||||
NotExported: routes,
|
||||
}
|
||||
|
||||
// Cache result
|
||||
self.routesNotExportedCache.Set(neighborId, response)
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
func (self *SingleTableBirdwatcher) AllRoutes() (*api.RoutesResponse, error) {
|
||||
// First fetch all routes from the master table
|
||||
birdImported, err := self.client.GetJson("/routes/table/master")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Then fetch all filtered routes from the master table
|
||||
birdFiltered, err := self.client.GetJson("/routes/table/master/filtered")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Use api status from second request
|
||||
apiStatus, err := parseApiStatus(birdFiltered, self.config)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
response := &api.RoutesResponse{
|
||||
Api: apiStatus,
|
||||
}
|
||||
|
||||
// Parse the routes
|
||||
imported := parseRoutesData(birdImported["routes"].([]interface{}), self.config)
|
||||
// Sort routes for deterministic ordering
|
||||
sort.Sort(imported)
|
||||
response.Imported = imported
|
||||
|
||||
// Parse the routes
|
||||
filtered := parseRoutesData(birdFiltered["routes"].([]interface{}), self.config)
|
||||
// Sort routes for deterministic ordering
|
||||
sort.Sort(filtered)
|
||||
response.Filtered = filtered
|
||||
|
||||
return response, nil
|
||||
}
|
@ -1,61 +0,0 @@
|
||||
package birdwatcher
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
/*
|
||||
* Types helper for parser
|
||||
*/
|
||||
|
||||
// Assert string, provide default
|
||||
func mustString(value interface{}, fallback string) string {
|
||||
sval, ok := value.(string)
|
||||
if !ok {
|
||||
return fallback
|
||||
}
|
||||
return sval
|
||||
}
|
||||
|
||||
// Assert list of strings
|
||||
func mustStringList(data interface{}) []string {
|
||||
list := []string{}
|
||||
ldata, ok := data.([]interface{})
|
||||
if !ok {
|
||||
return []string{}
|
||||
}
|
||||
for _, e := range ldata {
|
||||
s, ok := e.(string)
|
||||
if ok {
|
||||
list = append(list, s)
|
||||
}
|
||||
}
|
||||
return list
|
||||
}
|
||||
|
||||
// Convert list of strings to int
|
||||
func mustIntList(data interface{}) []int {
|
||||
list := []int{}
|
||||
sdata := mustStringList(data)
|
||||
for _, e := range sdata {
|
||||
val, _ := strconv.Atoi(e)
|
||||
list = append(list, val)
|
||||
}
|
||||
return list
|
||||
}
|
||||
|
||||
func mustInt(value interface{}, fallback int) int {
|
||||
fval, ok := value.(float64)
|
||||
if !ok {
|
||||
return fallback
|
||||
}
|
||||
return int(fval)
|
||||
}
|
||||
|
||||
func mustBool(value interface{}, fallback bool) bool {
|
||||
val, ok := value.(bool)
|
||||
if !ok {
|
||||
return fallback
|
||||
}
|
||||
return val
|
||||
}
|
@ -1,53 +0,0 @@
|
||||
package birdwatcher
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sync"
|
||||
|
||||
"github.com/alice-lg/alice-lg/backend/api"
|
||||
)
|
||||
|
||||
/*
|
||||
Helper functions for dealing with birdwatcher API data
|
||||
*/
|
||||
|
||||
// Get neighbour by protocol id
|
||||
func getNeighbourById(neighbours api.Neighbours, id string) (*api.Neighbour, error) {
|
||||
for _, n := range neighbours {
|
||||
if n.Id == id {
|
||||
return n, nil
|
||||
}
|
||||
}
|
||||
unknown := &api.Neighbour{
|
||||
Id: "unknown",
|
||||
Description: "Unknown neighbour",
|
||||
}
|
||||
return unknown, fmt.Errorf("Neighbour not found")
|
||||
}
|
||||
|
||||
/*
|
||||
LockMap: Uses the sync.Map to manage locks, accessed by a key.
|
||||
TODO: Maybe this would be a nice generic helper
|
||||
*/
|
||||
type LockMap struct {
|
||||
locks *sync.Map
|
||||
}
|
||||
|
||||
func NewLockMap() *LockMap {
|
||||
return &LockMap{
|
||||
locks: &sync.Map{},
|
||||
}
|
||||
}
|
||||
|
||||
func (self *LockMap) Lock(key string) {
|
||||
mutex, _ := self.locks.LoadOrStore(key, &sync.Mutex{})
|
||||
mutex.(*sync.Mutex).Lock()
|
||||
}
|
||||
|
||||
func (self *LockMap) Unlock(key string) {
|
||||
mutex, ok := self.locks.Load(key)
|
||||
if !ok {
|
||||
return // Nothing to unlock
|
||||
}
|
||||
mutex.(*sync.Mutex).Unlock()
|
||||
}
|
@ -1,11 +0,0 @@
|
||||
package gobgp
|
||||
|
||||
type Config struct {
|
||||
Id string
|
||||
Name string
|
||||
|
||||
Host string `ini:"host"`
|
||||
Insecure bool `ini:"insecure"`
|
||||
TLSCert string `ini:"tls_crt"`
|
||||
TLSCommonName string `ini:"tls_common_name"`
|
||||
}
|
@ -1,194 +0,0 @@
|
||||
package gobgp
|
||||
|
||||
import (
|
||||
"github.com/alice-lg/alice-lg/backend/sources/gobgp/apiutil"
|
||||
"github.com/osrg/gobgp/pkg/packet/bgp"
|
||||
|
||||
"github.com/alice-lg/alice-lg/backend/api"
|
||||
gobgpapi "github.com/osrg/gobgp/api"
|
||||
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"time"
|
||||
)
|
||||
|
||||
var families []gobgpapi.Family = []gobgpapi.Family{gobgpapi.Family{
|
||||
Afi: gobgpapi.Family_AFI_IP,
|
||||
Safi: gobgpapi.Family_SAFI_UNICAST,
|
||||
}, gobgpapi.Family{
|
||||
Afi: gobgpapi.Family_AFI_IP6,
|
||||
Safi: gobgpapi.Family_SAFI_UNICAST,
|
||||
},
|
||||
}
|
||||
|
||||
func NewRoutesResponse() api.RoutesResponse {
|
||||
routes := api.RoutesResponse{}
|
||||
routes.Imported = make(api.Routes, 0)
|
||||
routes.Filtered = make(api.Routes, 0)
|
||||
routes.NotExported = make(api.Routes, 0)
|
||||
return routes
|
||||
}
|
||||
|
||||
func (gobgp *GoBGP) lookupNeighbour(neighborId string) (*gobgpapi.Peer, error) {
|
||||
|
||||
peers, err := gobgp.GetNeighbours()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, peer := range peers {
|
||||
peerId := PeerHash(peer)
|
||||
if neighborId == "" || peerId == neighborId {
|
||||
return peer, nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("Could not lookup neighbour")
|
||||
}
|
||||
|
||||
func (gobgp *GoBGP) GetNeighbours() ([]*gobgpapi.Peer, error) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Second)
|
||||
defer cancel()
|
||||
|
||||
peerStream, err := gobgp.client.ListPeer(ctx, &gobgpapi.ListPeerRequest{EnableAdvertised: true})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
peers := make([]*gobgpapi.Peer, 0)
|
||||
|
||||
for {
|
||||
peer, err := peerStream.Recv()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
peers = append(peers, peer.Peer)
|
||||
}
|
||||
return peers, nil
|
||||
}
|
||||
|
||||
func (gobgp *GoBGP) parsePathIntoRoute(path *gobgpapi.Path, prefix string) (error, *api.Route) {
|
||||
|
||||
route := api.Route{}
|
||||
route.Id = fmt.Sprintf("%s_%s", path.SourceId, prefix)
|
||||
route.NeighbourId = PeerHashWithASAndAddress(path.SourceAsn, path.NeighborIp)
|
||||
route.Network = prefix
|
||||
route.Interface = "Unknown"
|
||||
route.Age = time.Now().Sub(time.Unix(path.Age.GetSeconds(), int64(path.Age.GetNanos())))
|
||||
route.Primary = path.Best
|
||||
|
||||
attrs, err := apiutil.GetNativePathAttributes(path)
|
||||
if err != nil {
|
||||
return err, nil
|
||||
}
|
||||
|
||||
route.Bgp.Communities = make(api.Communities, 0)
|
||||
route.Bgp.LargeCommunities = make(api.Communities, 0)
|
||||
route.Bgp.ExtCommunities = make(api.ExtCommunities, 0)
|
||||
|
||||
for _, attr := range attrs {
|
||||
switch attr.(type) {
|
||||
case *bgp.PathAttributeMultiExitDisc:
|
||||
med := attr.(*bgp.PathAttributeMultiExitDisc)
|
||||
route.Bgp.Med = int(med.Value)
|
||||
case *bgp.PathAttributeNextHop:
|
||||
nh := attr.(*bgp.PathAttributeNextHop)
|
||||
route.Gateway = nh.Value.String()
|
||||
route.Bgp.NextHop = nh.Value.String()
|
||||
case *bgp.PathAttributeLocalPref:
|
||||
lp := attr.(*bgp.PathAttributeLocalPref)
|
||||
route.Bgp.LocalPref = int(lp.Value)
|
||||
case *bgp.PathAttributeOrigin:
|
||||
origin := attr.(*bgp.PathAttributeOrigin)
|
||||
switch origin.Value {
|
||||
case bgp.BGP_ORIGIN_ATTR_TYPE_IGP:
|
||||
route.Bgp.Origin = "IGP"
|
||||
case bgp.BGP_ORIGIN_ATTR_TYPE_EGP:
|
||||
route.Bgp.Origin = "EGP"
|
||||
case bgp.BGP_ORIGIN_ATTR_TYPE_INCOMPLETE:
|
||||
route.Bgp.Origin = "Incomplete"
|
||||
}
|
||||
case *bgp.PathAttributeAsPath:
|
||||
aspath := attr.(*bgp.PathAttributeAsPath)
|
||||
for _, aspth := range aspath.Value {
|
||||
for _, as := range aspth.GetAS() {
|
||||
route.Bgp.AsPath = append(route.Bgp.AsPath, int(as))
|
||||
}
|
||||
}
|
||||
case *bgp.PathAttributeCommunities:
|
||||
communities := attr.(*bgp.PathAttributeCommunities)
|
||||
for _, community := range communities.Value {
|
||||
_community := api.Community{int((0xffff0000 & community) >> 16), int(0xffff & community)}
|
||||
route.Bgp.Communities = append(route.Bgp.Communities, _community)
|
||||
}
|
||||
|
||||
case *bgp.PathAttributeExtendedCommunities:
|
||||
communities := attr.(*bgp.PathAttributeExtendedCommunities)
|
||||
for _, community := range communities.Value {
|
||||
if _community, ok := community.(*bgp.TwoOctetAsSpecificExtended); ok {
|
||||
route.Bgp.ExtCommunities = append(route.Bgp.ExtCommunities, api.ExtCommunity{_community.AS, _community.LocalAdmin})
|
||||
}
|
||||
}
|
||||
case *bgp.PathAttributeLargeCommunities:
|
||||
communities := attr.(*bgp.PathAttributeLargeCommunities)
|
||||
for _, community := range communities.Values {
|
||||
route.Bgp.LargeCommunities = append(route.Bgp.LargeCommunities, api.Community{int(community.ASN), int(community.LocalData1), int(community.LocalData2)})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
route.Metric = (route.Bgp.LocalPref + route.Bgp.Med)
|
||||
|
||||
return nil, &route
|
||||
}
|
||||
|
||||
func (gobgp *GoBGP) GetRoutes(peer *gobgpapi.Peer, tableType gobgpapi.TableType, response *api.RoutesResponse) error {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Second)
|
||||
defer cancel()
|
||||
|
||||
for _, family := range families {
|
||||
|
||||
pathStream, err := gobgp.client.ListPath(ctx, &gobgpapi.ListPathRequest{
|
||||
Name: peer.State.NeighborAddress,
|
||||
TableType: tableType,
|
||||
Family: &family,
|
||||
EnableFiltered: true,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
log.Print(err)
|
||||
continue
|
||||
}
|
||||
|
||||
rib := make([]*gobgpapi.Destination, 0)
|
||||
for {
|
||||
_path, err := pathStream.Recv()
|
||||
if err == io.EOF {
|
||||
break
|
||||
} else if err != nil {
|
||||
log.Print(err)
|
||||
return err
|
||||
}
|
||||
rib = append(rib, _path.Destination)
|
||||
}
|
||||
|
||||
for _, destination := range rib {
|
||||
for _, path := range destination.Paths {
|
||||
err, route := gobgp.parsePathIntoRoute(path, destination.Prefix)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
continue
|
||||
}
|
||||
|
||||
if path.Filtered {
|
||||
response.Filtered = append(response.Filtered, route)
|
||||
} else {
|
||||
response.Imported = append(response.Imported, route)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
@ -1,17 +0,0 @@
|
||||
package sources
|
||||
|
||||
import (
|
||||
"github.com/alice-lg/alice-lg/backend/api"
|
||||
)
|
||||
|
||||
type Source interface {
|
||||
ExpireCaches() int
|
||||
Status() (*api.StatusResponse, error)
|
||||
Neighbours() (*api.NeighboursResponse, error)
|
||||
NeighboursStatus() (*api.NeighboursStatusResponse, error)
|
||||
Routes(neighbourId string) (*api.RoutesResponse, error)
|
||||
RoutesReceived(neighbourId string) (*api.RoutesResponse, error)
|
||||
RoutesFiltered(neighbourId string) (*api.RoutesResponse, error)
|
||||
RoutesNotExported(neighbourId string) (*api.RoutesResponse, error)
|
||||
AllRoutes() (*api.RoutesResponse, error)
|
||||
}
|
@ -1,31 +0,0 @@
|
||||
package main
|
||||
|
||||
var version = "unknown"
|
||||
|
||||
// Gather application status information
|
||||
type AppStatus struct {
|
||||
Version string `json:"version"`
|
||||
Routes RoutesStoreStats `json:"routes"`
|
||||
Neighbours NeighboursStoreStats `json:"neighbours"`
|
||||
}
|
||||
|
||||
// Get application status, perform health checks
|
||||
// on backends.
|
||||
func NewAppStatus() (*AppStatus, error) {
|
||||
routesStatus := RoutesStoreStats{}
|
||||
if AliceRoutesStore != nil {
|
||||
routesStatus = AliceRoutesStore.Stats()
|
||||
}
|
||||
|
||||
neighboursStatus := NeighboursStoreStats{}
|
||||
if AliceRoutesStore != nil {
|
||||
neighboursStatus = AliceNeighboursStore.Stats()
|
||||
}
|
||||
|
||||
status := &AppStatus{
|
||||
Version: version,
|
||||
Routes: routesStatus,
|
||||
Neighbours: neighboursStatus,
|
||||
}
|
||||
return status, nil
|
||||
}
|
@ -1,33 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
STATE_INIT = iota
|
||||
STATE_READY
|
||||
STATE_UPDATING
|
||||
STATE_ERROR
|
||||
)
|
||||
|
||||
type StoreStatus struct {
|
||||
LastRefresh time.Time
|
||||
LastError error
|
||||
State int
|
||||
}
|
||||
|
||||
// Helper: stateToString
|
||||
func stateToString(state int) string {
|
||||
switch state {
|
||||
case STATE_INIT:
|
||||
return "INIT"
|
||||
case STATE_READY:
|
||||
return "READY"
|
||||
case STATE_UPDATING:
|
||||
return "UPDATING"
|
||||
case STATE_ERROR:
|
||||
return "ERROR"
|
||||
}
|
||||
return "INVALID"
|
||||
}
|
@ -1,78 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Routes Store
|
||||
|
||||
type RoutesStats struct {
|
||||
Filtered int `json:"filtered"`
|
||||
Imported int `json:"imported"`
|
||||
}
|
||||
|
||||
type RouteServerRoutesStats struct {
|
||||
Name string `json:"name"`
|
||||
Routes RoutesStats `json:"routes"`
|
||||
|
||||
State string `json:"state"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
type RoutesStoreStats struct {
|
||||
TotalRoutes RoutesStats `json:"total_routes"`
|
||||
RouteServers []RouteServerRoutesStats `json:"route_servers"`
|
||||
}
|
||||
|
||||
// Write stats to the log
|
||||
func (stats RoutesStoreStats) Log() {
|
||||
log.Println("Routes store:")
|
||||
|
||||
log.Println(" Routes Imported:",
|
||||
stats.TotalRoutes.Imported,
|
||||
"Filtered:",
|
||||
stats.TotalRoutes.Filtered)
|
||||
log.Println(" Routeservers:")
|
||||
|
||||
for _, rs := range stats.RouteServers {
|
||||
log.Println(" -", rs.Name)
|
||||
log.Println(" State:", rs.State)
|
||||
log.Println(" UpdatedAt:", rs.UpdatedAt)
|
||||
log.Println(" Routes Imported:",
|
||||
rs.Routes.Imported,
|
||||
"Filtered:",
|
||||
rs.Routes.Filtered)
|
||||
}
|
||||
}
|
||||
|
||||
// Neighbours Store
|
||||
|
||||
type RouteServerNeighboursStats struct {
|
||||
Name string `json:"name"`
|
||||
State string `json:"state"`
|
||||
Neighbours int `json:"neighbours"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
type NeighboursStoreStats struct {
|
||||
TotalNeighbours int `json:"total_neighbours"`
|
||||
|
||||
RouteServers []RouteServerNeighboursStats `json:"route_servers"`
|
||||
}
|
||||
|
||||
// Print stats
|
||||
func (stats NeighboursStoreStats) Log() {
|
||||
log.Println("Neighbours store:")
|
||||
|
||||
log.Println(" Neighbours:",
|
||||
stats.TotalNeighbours)
|
||||
|
||||
for _, rs := range stats.RouteServers {
|
||||
log.Println(" -", rs.Name)
|
||||
log.Println(" State:", rs.State)
|
||||
log.Println(" UpdatedAt:", rs.UpdatedAt)
|
||||
log.Println(" Neighbours:",
|
||||
rs.Neighbours)
|
||||
}
|
||||
}
|
191
backend/theme.go
191
backend/theme.go
@ -1,191 +0,0 @@
|
||||
package main
|
||||
|
||||
/*
|
||||
The theme provides a method for adding customized CSS
|
||||
or Javascript to Alice:
|
||||
|
||||
A theme directory can be specified in the config.
|
||||
Stylesheets and Javascript residing in the theme root
|
||||
directory will be included in the frontends HTML.
|
||||
|
||||
Additional files can be added in subdirectories.
|
||||
These are served aswell and can be used for additional
|
||||
assets. (E.g. a logo)
|
||||
*/
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/julienschmidt/httprouter"
|
||||
)
|
||||
|
||||
type Theme struct {
|
||||
Config ThemeConfig
|
||||
}
|
||||
|
||||
func NewTheme(config ThemeConfig) *Theme {
|
||||
theme := &Theme{
|
||||
Config: config,
|
||||
}
|
||||
|
||||
return theme
|
||||
}
|
||||
|
||||
/*
|
||||
Get includable files from theme directory
|
||||
*/
|
||||
func (self *Theme) listIncludes(suffix string) []string {
|
||||
includes := []string{}
|
||||
|
||||
files, err := ioutil.ReadDir(self.Config.Path)
|
||||
if err != nil {
|
||||
return []string{}
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
if file.IsDir() {
|
||||
continue
|
||||
}
|
||||
|
||||
filename := file.Name()
|
||||
if strings.HasPrefix(filename, ".") {
|
||||
continue
|
||||
}
|
||||
|
||||
if strings.HasSuffix(filename, suffix) {
|
||||
includes = append(includes, filename)
|
||||
}
|
||||
}
|
||||
|
||||
return includes
|
||||
}
|
||||
|
||||
/*
|
||||
Calculate a hashvalue for an include file,
|
||||
to help with cache invalidation, when the file changes.
|
||||
|
||||
We are using the timestamp of the last access as Unix()
|
||||
encoded as hex
|
||||
*/
|
||||
func (self *Theme) HashInclude(include string) string {
|
||||
path := filepath.Join(self.Config.Path, include)
|
||||
stat, err := os.Stat(path)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
modTime := stat.ModTime().UTC()
|
||||
timestamp := modTime.Unix()
|
||||
|
||||
return strconv.FormatInt(timestamp, 16)
|
||||
}
|
||||
|
||||
/*
|
||||
Retrieve a list of includeable stylesheets, with
|
||||
their md5sum as hash
|
||||
*/
|
||||
func (self *Theme) Stylesheets() []string {
|
||||
return self.listIncludes(".css")
|
||||
}
|
||||
|
||||
/*
|
||||
Make include statement: stylesheet
|
||||
*/
|
||||
func (self *Theme) StylesheetIncludes() string {
|
||||
|
||||
includes := []string{}
|
||||
for _, stylesheet := range self.Stylesheets() {
|
||||
hash := self.HashInclude(stylesheet)
|
||||
include := fmt.Sprintf(
|
||||
"<link rel=\"stylesheet\" href=\"%s/%s?%s\" />",
|
||||
self.Config.BasePath, stylesheet, hash,
|
||||
)
|
||||
includes = append(includes, include)
|
||||
}
|
||||
|
||||
return strings.Join(includes, "\n")
|
||||
}
|
||||
|
||||
/*
|
||||
Retrieve a list of includeable javascipts
|
||||
*/
|
||||
func (self *Theme) Scripts() []string {
|
||||
return self.listIncludes(".js")
|
||||
}
|
||||
|
||||
/*
|
||||
Make include statement: script
|
||||
*/
|
||||
func (self *Theme) ScriptIncludes() string {
|
||||
includes := []string{}
|
||||
for _, script := range self.Scripts() {
|
||||
hash := self.HashInclude(script)
|
||||
include := fmt.Sprintf(
|
||||
"<script type=\"text/javascript\" src=\"%s/%s?%s\"></script>",
|
||||
self.Config.BasePath, script, hash,
|
||||
)
|
||||
includes = append(includes, include)
|
||||
}
|
||||
|
||||
return strings.Join(includes, "\n")
|
||||
}
|
||||
|
||||
/*
|
||||
Theme HTTP Handler
|
||||
*/
|
||||
func (self *Theme) Handler() http.Handler {
|
||||
|
||||
// Serve the content using the file server
|
||||
path := self.Config.Path
|
||||
themeFilesHandler := http.StripPrefix(
|
||||
self.Config.BasePath, http.FileServer(http.Dir(path)))
|
||||
|
||||
return themeFilesHandler
|
||||
}
|
||||
|
||||
/*
|
||||
Register theme at path
|
||||
*/
|
||||
func (self *Theme) RegisterThemeAssets(router *httprouter.Router) error {
|
||||
fsPath := self.Config.Path
|
||||
if fsPath == "" {
|
||||
return nil // nothing to do here
|
||||
}
|
||||
|
||||
if _, err := os.Stat(fsPath); err != nil {
|
||||
return fmt.Errorf("Theme path '%s' could not be found!", fsPath)
|
||||
}
|
||||
|
||||
log.Println("Using theme at:", fsPath)
|
||||
|
||||
// We have a theme, install handler
|
||||
path := fmt.Sprintf("%s/*path", self.Config.BasePath)
|
||||
router.Handler("GET", path, self.Handler())
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
/*
|
||||
Prepare document, fill placeholder with scripts and stylesheet
|
||||
*/
|
||||
func (self *Theme) PrepareClientHtml(html string) string {
|
||||
stylesheets := self.StylesheetIncludes()
|
||||
scripts := self.ScriptIncludes()
|
||||
|
||||
html = strings.Replace(html,
|
||||
"<!-- ###THEME_STYLESHEETS### -->",
|
||||
stylesheets, 1)
|
||||
html = strings.Replace(html,
|
||||
"<!-- ###THEME_SCRIPTS### -->",
|
||||
scripts, 1)
|
||||
|
||||
return html
|
||||
}
|
@ -1,91 +0,0 @@
|
||||
package main
|
||||
|
||||
// Some helper functions
|
||||
import (
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var REGEX_MATCH_IP_PREFIX = regexp.MustCompile(`([a-f0-9/]+[\.:]*)+`)
|
||||
|
||||
/*
|
||||
Case Insensitive Contains
|
||||
*/
|
||||
func ContainsCi(s, substr string) bool {
|
||||
return strings.Contains(
|
||||
strings.ToLower(s),
|
||||
strings.ToLower(substr),
|
||||
)
|
||||
}
|
||||
|
||||
/*
|
||||
Check array membership
|
||||
*/
|
||||
func MemberOf(list []string, key string) bool {
|
||||
for _, v := range list {
|
||||
if v == key {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/*
|
||||
Check if something could be a prefix
|
||||
*/
|
||||
func MaybePrefix(s string) bool {
|
||||
s = strings.ToLower(s)
|
||||
|
||||
// Rule out anything which can not be
|
||||
if strings.ContainsAny(s, "ghijklmnopqrstuvwxyz][;'_") {
|
||||
return false
|
||||
}
|
||||
|
||||
// Test using regex
|
||||
matches := REGEX_MATCH_IP_PREFIX.FindAllStringIndex(s, -1)
|
||||
if len(matches) == 1 {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
/*
|
||||
Since havin ints as keys in json is
|
||||
acutally undefined behaviour, we keep these interally
|
||||
but provide a string as a key for serialization
|
||||
*/
|
||||
func SerializeReasons(reasons map[int]string) map[string]string {
|
||||
res := make(map[string]string)
|
||||
for id, reason := range reasons {
|
||||
res[strconv.Itoa(id)] = reason
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
/*
|
||||
Make trimmed list of CSV strings.
|
||||
Ommits empty values.
|
||||
*/
|
||||
func TrimmedStringList(s string) []string {
|
||||
tokens := strings.Split(s, ",")
|
||||
list := []string{}
|
||||
for _, t := range tokens {
|
||||
if t == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
list = append(list, strings.TrimSpace(t))
|
||||
}
|
||||
return list
|
||||
}
|
||||
|
||||
/*
|
||||
Convert time.Duration to milliseconds
|
||||
*/
|
||||
|
||||
func DurationMs(d time.Duration) float64 {
|
||||
return float64(d) / 1000.0 / 1000.0 // nano -> micro -> milli
|
||||
}
|
@ -1 +0,0 @@
|
||||
node_modules/
|
@ -1,15 +0,0 @@
|
||||
|
||||
#
|
||||
# Client build Dockerfile
|
||||
# Use node:10 as base image
|
||||
#
|
||||
|
||||
FROM node:11
|
||||
|
||||
RUN npm install -g gulp@4.0.0
|
||||
RUN npm install -g gulp-cli
|
||||
|
||||
|
||||
WORKDIR /client
|
||||
VOLUME ["/client"]
|
||||
|
@ -1,53 +0,0 @@
|
||||
#
|
||||
# Build Helper
|
||||
# ------------
|
||||
#
|
||||
# Create a full build by just typing make.
|
||||
# This will automatically install all dependencies from NPM and
|
||||
# start the build process.
|
||||
#
|
||||
# While developing, you might want to use 'make watch'
|
||||
# which will automatically restart gulp in case something went
|
||||
# wrong.
|
||||
#
|
||||
|
||||
VERSION=$(shell cat ../VERSION)
|
||||
|
||||
DIST_BUILDS=../../birdseye-static/builds
|
||||
DIST=birdseye-ui-dist-$(VERSION).tar.gz
|
||||
|
||||
# == END CONFIGURATION ==
|
||||
|
||||
DIST_BUILD=$(addprefix $(DIST_BUILDS)/, $(DIST))
|
||||
|
||||
all: client
|
||||
|
||||
deps:
|
||||
@echo "Installing dependencies"
|
||||
yarn install
|
||||
|
||||
client: deps
|
||||
@echo "Building alice UI"
|
||||
gulp
|
||||
|
||||
client_prod: deps
|
||||
@echo "Building alice UI (production)"
|
||||
DISABLE_LOGGING=1 NODE_ENV=production gulp
|
||||
|
||||
watch:
|
||||
while true; do gulp watch; done
|
||||
|
||||
$(DIST_BUILD): deps client_prod
|
||||
@echo "Creating alice ui distribution"
|
||||
tar cvzf $(DIST) build/
|
||||
mv $(DIST) $(DIST_BUILDS)
|
||||
@echo ""
|
||||
@echo "Done. Don't forget to push the dist to github"
|
||||
|
||||
|
||||
clean:
|
||||
rm -rf build/
|
||||
|
||||
|
||||
dist: $(DIST_BUILD)
|
||||
echo $(DIST_BUILD)
|
@ -1,40 +0,0 @@
|
||||
|
||||
DOCKER_CONTAINER := alice-lg-node-build
|
||||
DOCKER_IMAGE := alice-lg-node:latest
|
||||
DOCKER_EXEC := docker run --rm -t -i \
|
||||
-a stdin -a stdout -a stderr \
|
||||
-v `pwd`:/client/ \
|
||||
--name $(DOCKER_CONTAINER) \
|
||||
$(DOCKER_IMAGE) /bin/bash -c
|
||||
|
||||
all: client
|
||||
@echo "Built alice-lg client"
|
||||
|
||||
image:
|
||||
docker build . -t $(DOCKER_IMAGE)
|
||||
|
||||
deps: image
|
||||
$(DOCKER_EXEC) "yarn install"
|
||||
|
||||
client: stop deps
|
||||
@echo "Building alice UI"
|
||||
$(DOCKER_EXEC) "./node_modules/.bin/gulp"
|
||||
|
||||
client_prod: stop deps
|
||||
@echo "Building alice UI (production)"
|
||||
$(DOCKER_EXEC) "DISABLE_LOGGING=1 NODE_ENV=production ./node_modules/.bin/gulp"
|
||||
|
||||
watch:
|
||||
$(DOCKER_EXEC) "while true; do ./node_modules/.bin/gulp watch; done"
|
||||
|
||||
stop:
|
||||
@echo "Stopping docker container: $(DOCKER_CONTAINER)"
|
||||
-docker stop $(DOCKER_CONTAINER)
|
||||
@sleep 1
|
||||
|
||||
kill:
|
||||
@echo "Killing docker container: $(DOCKER_CONTAINER)"
|
||||
-docker kill $(DOCKER_CONTAINER)
|
||||
@sleep 1
|
||||
|
||||
|
118
client/app.jsx
118
client/app.jsx
@ -1,118 +0,0 @@
|
||||
|
||||
/**
|
||||
* Alice (formerly known as Birdseye) v.2.0.0
|
||||
* ------------------------------------------
|
||||
*
|
||||
* @author Matthias Hannig <mha@ecix.net>
|
||||
*/
|
||||
|
||||
import axios from 'axios'
|
||||
|
||||
import React from 'react'
|
||||
import ReactDOM from 'react-dom'
|
||||
|
||||
import { Component } from 'react'
|
||||
|
||||
// Config
|
||||
import { configureAxios } from './config'
|
||||
|
||||
// Content
|
||||
import { contentUpdate } from './components/content/actions'
|
||||
|
||||
// Redux
|
||||
import { createStore, applyMiddleware } from 'redux'
|
||||
import { Provider } from 'react-redux'
|
||||
|
||||
// Router
|
||||
import { createHistory } from 'history'
|
||||
import { Router,
|
||||
Route,
|
||||
IndexRoute,
|
||||
IndexRedirect,
|
||||
useRouterHistory } from 'react-router'
|
||||
|
||||
import { syncHistoryWithStore } from 'react-router-redux'
|
||||
|
||||
|
||||
// Components
|
||||
import LayoutMain from 'layouts/main'
|
||||
|
||||
|
||||
import WelcomePage
|
||||
from 'components/welcome'
|
||||
import RouteserverPage
|
||||
from 'components/routeservers/page'
|
||||
import RoutesPage
|
||||
from 'components/routeservers/routes/page'
|
||||
import LookupPage
|
||||
from 'components/lookup/page'
|
||||
|
||||
// Middlewares
|
||||
import thunkMiddleware from 'redux-thunk'
|
||||
import createLogger from 'redux-logger'
|
||||
import { routerMiddleware as createRouterMiddleware }
|
||||
from 'react-router-redux'
|
||||
|
||||
// Reducer
|
||||
import combinedReducer from './reducer/app-reducer'
|
||||
|
||||
// Setup routing
|
||||
const browserHistory = useRouterHistory(createHistory)({
|
||||
basename: '/'
|
||||
});
|
||||
|
||||
|
||||
// Setup application
|
||||
let store;
|
||||
const routerMiddleware = createRouterMiddleware(browserHistory);
|
||||
if (window.NO_LOG) {
|
||||
store = createStore(combinedReducer, applyMiddleware(
|
||||
routerMiddleware,
|
||||
thunkMiddleware
|
||||
));
|
||||
} else {
|
||||
const loggerMiddleware = createLogger();
|
||||
store = createStore(combinedReducer, applyMiddleware(
|
||||
routerMiddleware,
|
||||
thunkMiddleware,
|
||||
loggerMiddleware
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
// Create extension endpoint:
|
||||
window.Alice = {
|
||||
updateContent: (content) => {
|
||||
store.dispatch(contentUpdate(content));
|
||||
}
|
||||
};
|
||||
|
||||
const history = syncHistoryWithStore(browserHistory, store);
|
||||
|
||||
// Setup axios
|
||||
configureAxios(axios);
|
||||
|
||||
// Create App
|
||||
class Birdseye extends Component {
|
||||
render() {
|
||||
return (
|
||||
<Provider store={store}>
|
||||
<Router history={history}>
|
||||
<Route path="/" component={LayoutMain}>
|
||||
<IndexRoute component={WelcomePage}/>
|
||||
<Route path="/search"
|
||||
component={LookupPage} />
|
||||
<Route path="/routeservers">
|
||||
<Route path=":routeserverId" component={RouteserverPage} />
|
||||
<Route path=":routeserverId/protocols/:protocolId/routes" component={RoutesPage} />
|
||||
</Route>
|
||||
</Route>
|
||||
</Router>
|
||||
</Provider>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
var mount = document.getElementById('app');
|
||||
ReactDOM.render(<Birdseye />, mount);
|
||||
|
@ -1,8 +0,0 @@
|
||||
|
||||
.welcome-page {
|
||||
.jumbotron {
|
||||
padding: 20px;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,30 +0,0 @@
|
||||
|
||||
import React from 'react'
|
||||
import {connect} from 'react-redux'
|
||||
|
||||
import {parseServerTime} from 'components/datetime/parse'
|
||||
|
||||
import moment from 'moment'
|
||||
|
||||
/*
|
||||
* Calculate age (generated_at), and set from_cache_status
|
||||
*/
|
||||
export const apiCacheStatus = function(apiStatus) {
|
||||
if (apiStatus == {}) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const cacheStatus = apiStatus["cache_status"] || {};
|
||||
const cachedAt = cacheStatus.cached_at;
|
||||
if (!cachedAt) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const fromCache = apiStatus.result_from_cache;
|
||||
const ttl = parseServerTime(apiStatus.ttl);
|
||||
const generatedAt = parseServerTime(cachedAt);
|
||||
const age = ttl.diff(generatedAt); // ms
|
||||
|
||||
return {fromCache, age, generatedAt, ttl};
|
||||
};
|
||||
|
@ -1,22 +0,0 @@
|
||||
import axios from 'axios';
|
||||
import {apiError} from 'components/errors/actions'
|
||||
|
||||
export const LOAD_CONFIG_SUCCESS = "@config/LOAD_CONFIG_SUCCESS";
|
||||
|
||||
function loadConfigSuccess(config) {
|
||||
return {
|
||||
type: LOAD_CONFIG_SUCCESS,
|
||||
payload: config
|
||||
};
|
||||
}
|
||||
|
||||
export function loadConfig() {
|
||||
return (dispatch) => {
|
||||
axios.get(`/api/v1/config`)
|
||||
.then(
|
||||
({data}) => {
|
||||
dispatch(loadConfigSuccess(data));
|
||||
},
|
||||
(error) => dispatch(apiError(error)));
|
||||
}
|
||||
}
|
@ -1,72 +0,0 @@
|
||||
import {LOAD_CONFIG_SUCCESS} from './actions'
|
||||
import {LOAD_ROUTESERVERS_SUCCESS} from 'components/routeservers/actions'
|
||||
|
||||
const initialState = {
|
||||
asn: 0, // Our own ASN (might be abstracted in the future)
|
||||
|
||||
routes_columns: {},
|
||||
routes_columns_order: [],
|
||||
neighbours_columns: {},
|
||||
neighbours_columns_order: [],
|
||||
lookup_columns: {},
|
||||
lookup_columns_order: [],
|
||||
prefix_lookup_enabled: false,
|
||||
content: {},
|
||||
noexport_load_on_demand: true, // we have to assume this
|
||||
// otherwise fetch will start.
|
||||
rpki: {
|
||||
enabled: false,
|
||||
},
|
||||
|
||||
bgp_communities: {},
|
||||
|
||||
blackholes: {}, // Map blackholes to routeservers
|
||||
asns: {}, // Map ASNs to routeservers (for future use)
|
||||
};
|
||||
|
||||
const _handleRouteserversConfig = function(state, payload) {
|
||||
let blackholes = {};
|
||||
let asns = {};
|
||||
let asn = 0;
|
||||
for (const rs of payload.routeservers) {
|
||||
blackholes[rs.id] = rs.blackholes;
|
||||
asns[rs.id] = rs.asn;
|
||||
if (!asn) {
|
||||
asn = rs.asn; // Just go with the first asn as our own
|
||||
}
|
||||
}
|
||||
|
||||
return Object.assign({}, state, {
|
||||
asn: asn,
|
||||
blackholes: blackholes,
|
||||
asns: asns,
|
||||
});
|
||||
}
|
||||
|
||||
export default function reducer(state = initialState, action) {
|
||||
switch(action.type) {
|
||||
case LOAD_CONFIG_SUCCESS:
|
||||
return Object.assign({}, state, {
|
||||
routes_columns: action.payload.routes_columns,
|
||||
routes_columns_order: action.payload.routes_columns_order,
|
||||
|
||||
neighbours_columns: action.payload.neighbours_columns,
|
||||
neighbours_columns_order: action.payload.neighbours_columns_order,
|
||||
|
||||
lookup_columns: action.payload.lookup_columns,
|
||||
lookup_columns_order: action.payload.lookup_columns_order,
|
||||
|
||||
prefix_lookup_enabled: action.payload.prefix_lookup_enabled,
|
||||
|
||||
rpki: action.payload.rpki,
|
||||
bgp_communities: action.payload.bgp_communities,
|
||||
|
||||
noexport_load_on_demand: action.payload.noexport.load_on_demand
|
||||
});
|
||||
|
||||
case LOAD_ROUTESERVERS_SUCCESS:
|
||||
return _handleRouteserversConfig(state, action.payload);
|
||||
}
|
||||
return state;
|
||||
}
|
||||
|
@ -1,17 +0,0 @@
|
||||
import React from 'react'
|
||||
import { connect } from 'react-redux'
|
||||
|
||||
import { loadConfig } from 'components/config/actions'
|
||||
|
||||
|
||||
class Config extends React.Component {
|
||||
componentDidMount() {
|
||||
this.props.dispatch(loadConfig());
|
||||
}
|
||||
|
||||
render() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export default connect()(Config);
|
@ -1,10 +0,0 @@
|
||||
|
||||
export const CONTENT_UPDATE = "@content/CONTENT_UPDATE";
|
||||
|
||||
export function contentUpdate(content) {
|
||||
return {
|
||||
type: CONTENT_UPDATE,
|
||||
payload: content
|
||||
}
|
||||
}
|
||||
|
@ -1,39 +0,0 @@
|
||||
|
||||
import React from 'react'
|
||||
import {connect} from 'react-redux'
|
||||
|
||||
/*
|
||||
* Content Component
|
||||
*/
|
||||
function ContentComponent(props) {
|
||||
let key = props.id;
|
||||
let defaultValue = props.children;
|
||||
|
||||
if (!key) {
|
||||
return <span>{defaultValue}</span>;
|
||||
}
|
||||
|
||||
// Traverse content by key, if content is found
|
||||
// return content, otherwise fall back to the default
|
||||
let tokens = key.split(".");
|
||||
let resolved = props.content;
|
||||
for (let part of tokens) {
|
||||
resolved = resolved[part];
|
||||
if (!resolved) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!resolved) {
|
||||
resolved = defaultValue;
|
||||
}
|
||||
|
||||
return (<span dangerouslySetInnerHTML={{__html: resolved}}></span>);
|
||||
}
|
||||
|
||||
export default connect(
|
||||
(state) => ({
|
||||
content: state.content
|
||||
})
|
||||
)(ContentComponent);
|
||||
|
@ -1,17 +0,0 @@
|
||||
/*
|
||||
* Content reducer
|
||||
*/
|
||||
|
||||
import {CONTENT_UPDATE} from './actions'
|
||||
|
||||
const initialState = {};
|
||||
|
||||
export default function reducer(state = initialState, action) {
|
||||
switch(action.type) {
|
||||
case CONTENT_UPDATE:
|
||||
return Object.assign({}, state, action.payload);
|
||||
}
|
||||
|
||||
return state;
|
||||
}
|
||||
|
@ -1,29 +0,0 @@
|
||||
|
||||
/**
|
||||
* Datetime Component
|
||||
*
|
||||
* @author Matthias Hannig <mha@ecix.net>
|
||||
*/
|
||||
|
||||
|
||||
import React from 'react'
|
||||
|
||||
import moment from 'moment'
|
||||
|
||||
import {parseServerTime} from './parse'
|
||||
|
||||
|
||||
export default class Datetime extends React.Component {
|
||||
render() {
|
||||
let timefmt = this.props.format;
|
||||
if (!timefmt) {
|
||||
timefmt = 'LLLL';
|
||||
}
|
||||
|
||||
let time = parseServerTime(this.props.value);
|
||||
return (
|
||||
<span>{time.format(timefmt)}</span>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1,16 +0,0 @@
|
||||
|
||||
/*
|
||||
* Some datetime parsing helper functions
|
||||
*/
|
||||
|
||||
import moment from 'moment'
|
||||
|
||||
|
||||
window.moment = moment;
|
||||
|
||||
export function parseServerTime(serverTime) {
|
||||
const fmt = "YYYY-MM-DDTHH:mm:ss.SSSSSSSSZ"; // S was 4 byte short
|
||||
return moment(serverTime, fmt);
|
||||
}
|
||||
|
||||
|
@ -1,16 +0,0 @@
|
||||
|
||||
import React from 'react'
|
||||
import moment from 'moment'
|
||||
|
||||
export default class RelativeTimestamp extends React.Component {
|
||||
render() {
|
||||
const tsMs = this.props.value / 1000.0 / 1000.0; // nano -> micro -> milli
|
||||
const now = moment.utc()
|
||||
const rel = now.subtract(tsMs, 'ms');
|
||||
|
||||
return (
|
||||
<span>{rel.fromNow(this.props.suffix)}</span>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1,80 +0,0 @@
|
||||
|
||||
import moment from 'moment'
|
||||
|
||||
import React from 'react'
|
||||
|
||||
export default class RelativeTime extends React.Component {
|
||||
|
||||
// Local state updates, to trigger a rerender
|
||||
// every second for time updates.
|
||||
componentDidMount() {
|
||||
this.timer = setInterval(() => {
|
||||
this.setState({
|
||||
now: Date.now()
|
||||
})
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
// Stop timer
|
||||
componentWillUnmount() {
|
||||
clearInterval(this.timer);
|
||||
}
|
||||
|
||||
// Helper: Assert time is an instance of moment
|
||||
getTime() {
|
||||
if (!this.props.value) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let time = false;
|
||||
if (this.props.value instanceof moment) {
|
||||
time = this.props.value;
|
||||
} else {
|
||||
time = moment.utc(this.props.value);
|
||||
}
|
||||
return time
|
||||
}
|
||||
|
||||
|
||||
// Time can be capped, if we are handling a past
|
||||
// or future event:
|
||||
capTime(time) {
|
||||
const now = moment.utc();
|
||||
if (this.props.pastEvent && time.isAfter(now)) {
|
||||
return now;
|
||||
}
|
||||
|
||||
if (this.props.futureEvent && time.isBefore(now)) {
|
||||
return now;
|
||||
}
|
||||
|
||||
return time;
|
||||
}
|
||||
|
||||
|
||||
render() {
|
||||
let time = this.getTime();
|
||||
if (!time) {
|
||||
return null; // Well, nothing to do here
|
||||
}
|
||||
|
||||
time = this.capTime(time);
|
||||
|
||||
// A few seconds ago / in a few seconds can be replaced
|
||||
// with 'just now'.
|
||||
// fuzzyNow can be set as a threshold of seconds
|
||||
if (this.props.fuzzyNow) {
|
||||
const now = moment.utc();
|
||||
if (Math.abs(now - time) / 1000.0 < this.props.fuzzyNow) {
|
||||
return (
|
||||
<span>just now</span>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<span>{time.fromNow(this.props.suffix)}</span>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1,12 +0,0 @@
|
||||
export const API_ERROR = '@birdseye/API_ERROR';
|
||||
|
||||
export function apiError(error) {
|
||||
return {
|
||||
type: API_ERROR,
|
||||
error,
|
||||
};
|
||||
}
|
||||
|
||||
export function resetApiError() {
|
||||
return apiError(null);
|
||||
}
|
@ -1,95 +0,0 @@
|
||||
import _ from 'underscore'
|
||||
|
||||
import React from 'react'
|
||||
import {connect} from 'react-redux'
|
||||
|
||||
import {resetApiError} from './actions'
|
||||
import {infoFromError} from './utils'
|
||||
|
||||
class ErrorsPage extends React.Component {
|
||||
|
||||
resetApiError() {
|
||||
this.props.dispatch(resetApiError());
|
||||
}
|
||||
|
||||
render() {
|
||||
if (!this.props.error) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let status = null;
|
||||
if (this.props.error.response) {
|
||||
status = this.props.error.response.status;
|
||||
} else {
|
||||
status = 600;
|
||||
}
|
||||
|
||||
|
||||
if (!status || (status != 429 && status < 500)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let body = null;
|
||||
|
||||
|
||||
// Find affected routeserver
|
||||
let rs = null;
|
||||
const errorInfo = infoFromError(this.props.error);
|
||||
if (errorInfo) {
|
||||
const rsId = errorInfo.routeserver_id;
|
||||
if (rsId !== null) {
|
||||
rs = _.findWhere(this.props.routeservers, { id: rsId });
|
||||
}
|
||||
}
|
||||
|
||||
if (status == 429) {
|
||||
body = (
|
||||
<div className="error-message">
|
||||
<p>Alice reached the request limit.</p>
|
||||
<p>We suggest you try at a less busy time.</p>
|
||||
</div>
|
||||
);
|
||||
} else {
|
||||
let errorStatus = "";
|
||||
if (this.props.error.response) {
|
||||
errorStatus = " (got HTTP " + this.props.error.response.status + ")";
|
||||
}
|
||||
if (errorInfo) {
|
||||
errorStatus = ` (got ${errorInfo.tag})`;
|
||||
}
|
||||
|
||||
body = (
|
||||
<div className="error-message">
|
||||
<p>
|
||||
Alice has trouble connecting to the API
|
||||
{rs &&
|
||||
<span> of <b>{rs.name}</b></span>}
|
||||
{errorStatus}
|
||||
.
|
||||
</p>
|
||||
<p>If this problem persist, we suggest you try again later.</p>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="error-notify">
|
||||
<div className="error-dismiss">
|
||||
<i className="fa fa-times-circle" aria-hidden="true"
|
||||
onClick={() => this.resetApiError()}></i>
|
||||
</div>
|
||||
<div className="error-icon">
|
||||
<i className="fa fa-times-circle" aria-hidden="true"></i>
|
||||
</div>
|
||||
{body}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export default connect(
|
||||
(state) => ({
|
||||
error: state.errors.error,
|
||||
routeservers: state.routeservers.byId,
|
||||
})
|
||||
)(ErrorsPage);
|
@ -1,17 +0,0 @@
|
||||
import {API_ERROR} from './actions'
|
||||
|
||||
const initialState = {
|
||||
error: null,
|
||||
};
|
||||
|
||||
|
||||
export default function reducer(state = initialState, action) {
|
||||
switch(action.type) {
|
||||
case API_ERROR:
|
||||
return {error: action.error};
|
||||
}
|
||||
return state;
|
||||
}
|
||||
|
||||
|
||||
|
@ -1,12 +0,0 @@
|
||||
|
||||
/*
|
||||
* Helper: Get info from api error
|
||||
*/
|
||||
|
||||
export const infoFromError = function(error) {
|
||||
if (error.response && error.response.data && error.response.data.code) {
|
||||
return error.response.data;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
@ -1,126 +0,0 @@
|
||||
|
||||
import _ from 'underscore'
|
||||
|
||||
import React from 'react'
|
||||
import {connect} from 'react-redux'
|
||||
|
||||
import {push} from 'react-router-redux'
|
||||
|
||||
import {cloneFilters,
|
||||
hasFilters}
|
||||
from 'components/filters/state'
|
||||
|
||||
import {FILTER_GROUP_SOURCES,
|
||||
FILTER_GROUP_ASNS,
|
||||
FILTER_GROUP_COMMUNITIES,
|
||||
FILTER_GROUP_EXT_COMMUNITIES,
|
||||
FILTER_GROUP_LARGE_COMMUNITIES}
|
||||
from './groups'
|
||||
|
||||
import {RouteserversSelect,
|
||||
PeersFilterSelect,
|
||||
CommunitiesSelect}
|
||||
from './widgets'
|
||||
|
||||
/*
|
||||
* Helper: Add and remove filter
|
||||
*/
|
||||
function _applyFilterValue(filters, group, value) {
|
||||
let nextFilters = cloneFilters(filters);
|
||||
nextFilters[group].filters.push({
|
||||
value: value,
|
||||
});
|
||||
return nextFilters;
|
||||
}
|
||||
|
||||
function _removeFilterValue(filters, group, value) {
|
||||
const svalue = value.toString();
|
||||
let nextFilters = cloneFilters(filters);
|
||||
let groupFilters = nextFilters[group].filters;
|
||||
nextFilters[group].filters = _.filter(groupFilters, (f) => {
|
||||
return f.value.toString() !== svalue;
|
||||
});
|
||||
return nextFilters;
|
||||
}
|
||||
|
||||
class FiltersEditor extends React.Component {
|
||||
addFilter(group, value) {
|
||||
let nextFilters = _applyFilterValue(
|
||||
this.props.applied, group, value
|
||||
);
|
||||
this.props.dispatch(push(
|
||||
this.props.makeLinkProps(Object.assign({}, this.props.link, {
|
||||
filtersApplied: nextFilters,
|
||||
}))
|
||||
));
|
||||
}
|
||||
|
||||
removeFilter(group, sourceId) {
|
||||
let nextFilters = _removeFilterValue(
|
||||
this.props.applied, group, sourceId
|
||||
);
|
||||
|
||||
this.props.dispatch(push(
|
||||
this.props.makeLinkProps(Object.assign({}, this.props.link, {
|
||||
filtersApplied: nextFilters,
|
||||
}))
|
||||
));
|
||||
}
|
||||
|
||||
render() {
|
||||
if (!hasFilters(this.props.available) && !hasFilters(this.props.applied)) {
|
||||
return null;
|
||||
}
|
||||
/*
|
||||
|
||||
|
||||
*/
|
||||
return (
|
||||
<div className="card lookup-filters-editor">
|
||||
<RouteserversSelect onChange={(value) => this.addFilter(FILTER_GROUP_SOURCES, value)}
|
||||
onRemove={(value) => this.removeFilter(FILTER_GROUP_SOURCES, value)}
|
||||
available={this.props.availableSources}
|
||||
applied={this.props.appliedSources} />
|
||||
|
||||
<PeersFilterSelect onChange={(value) => this.addFilter(FILTER_GROUP_ASNS, value)}
|
||||
onRemove={(value) => this.removeFilter(FILTER_GROUP_ASNS, value)}
|
||||
available={this.props.availableAsns}
|
||||
applied={this.props.appliedAsns} />
|
||||
|
||||
<CommunitiesSelect onChange={(group, value) => this.addFilter(group, value)}
|
||||
onRemove={(group, value) => this.removeFilter(group, value)}
|
||||
available={this.props.availableCommunities}
|
||||
applied={this.props.appliedCommunities} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export default connect(
|
||||
(state, props) => ({
|
||||
isLoading: state.lookup.isLoading,
|
||||
|
||||
link: props.linkProps,
|
||||
|
||||
available: props.filtersAvailable,
|
||||
applied: props.filtersApplied,
|
||||
|
||||
availableSources: props.filtersAvailable[FILTER_GROUP_SOURCES].filters,
|
||||
appliedSources: props.filtersApplied[FILTER_GROUP_SOURCES].filters,
|
||||
|
||||
availableAsns: props.filtersAvailable[FILTER_GROUP_ASNS].filters,
|
||||
appliedAsns: props.filtersApplied[FILTER_GROUP_ASNS].filters,
|
||||
|
||||
availableCommunities: {
|
||||
communities: props.filtersAvailable[FILTER_GROUP_COMMUNITIES].filters,
|
||||
ext: props.filtersAvailable[FILTER_GROUP_EXT_COMMUNITIES].filters,
|
||||
large: props.filtersAvailable[FILTER_GROUP_LARGE_COMMUNITIES].filters,
|
||||
},
|
||||
appliedCommunities: {
|
||||
communities: props.filtersApplied[FILTER_GROUP_COMMUNITIES].filters,
|
||||
ext: props.filtersApplied[FILTER_GROUP_EXT_COMMUNITIES].filters,
|
||||
large: props.filtersApplied[FILTER_GROUP_LARGE_COMMUNITIES].filters,
|
||||
},
|
||||
})
|
||||
)(FiltersEditor);
|
||||
|
@ -1,96 +0,0 @@
|
||||
|
||||
import {
|
||||
FILTER_GROUP_SOURCES,
|
||||
FILTER_GROUP_ASNS,
|
||||
FILTER_GROUP_COMMUNITIES,
|
||||
FILTER_GROUP_EXT_COMMUNITIES,
|
||||
FILTER_GROUP_LARGE_COMMUNITIES,
|
||||
} from './groups'
|
||||
|
||||
|
||||
function _makeFilter(value) {
|
||||
return {
|
||||
name: "",
|
||||
value: value,
|
||||
cardinality: 1,
|
||||
}
|
||||
}
|
||||
|
||||
export function decodeFiltersSources(params) {
|
||||
if (!params.sources) {
|
||||
return []; // No params available
|
||||
}
|
||||
const sources = params.sources.split(",");
|
||||
return sources.map((sid) => _makeFilter(sid));
|
||||
}
|
||||
|
||||
export function decodeFiltersAsns(params) {
|
||||
if (!params.asns) {
|
||||
return []; // No params available
|
||||
}
|
||||
const asns = params.asns.split(",");
|
||||
return asns.map((asn) => _makeFilter(parseInt(asn, 10)));
|
||||
}
|
||||
|
||||
function _decodeCommunity(community) {
|
||||
const parts = community.split(":");
|
||||
return parts.map((p) => parseInt(p, 10));
|
||||
}
|
||||
|
||||
function _decodeExtCommunity(community) {
|
||||
return community.split(":");
|
||||
}
|
||||
|
||||
export function decodeFiltersCommunities(params) {
|
||||
if (!params.communities) {
|
||||
return []; // No params available
|
||||
}
|
||||
const communities = params.communities.split(",");
|
||||
return communities.map((c) => _makeFilter(_decodeCommunity(c)));
|
||||
}
|
||||
|
||||
export function decodeFiltersExtCommunities(params) {
|
||||
if (!params.ext_communities) {
|
||||
return []; // No params available
|
||||
}
|
||||
const communities = params.ext_communities.split(",");
|
||||
return communities.map((c) => _makeFilter(_decodeExtCommunity(c)));
|
||||
}
|
||||
|
||||
export function decodeFiltersLargeCommunities(params) {
|
||||
if (!params.large_communities) {
|
||||
return []; // No params available
|
||||
}
|
||||
const communities = params.large_communities.split(",");
|
||||
return communities.map((c) => _makeFilter(_decodeCommunity(c)));
|
||||
}
|
||||
|
||||
|
||||
export function encodeGroupInt(group) {
|
||||
if (!group.filters.length) {
|
||||
return "";
|
||||
}
|
||||
const values = group.filters.map((f) => f.value).join(",");
|
||||
return `&${group.key}=${values}`;
|
||||
}
|
||||
|
||||
export function encodeGroupCommunities(group) {
|
||||
if (!group.filters.length) {
|
||||
return "";
|
||||
}
|
||||
const values = group.filters.map((f) => f.value.join(":")).join(",");
|
||||
return `&${group.key}=${values}`;
|
||||
}
|
||||
|
||||
export function filtersUrlEncode(filters) {
|
||||
let encoded = "";
|
||||
|
||||
encoded += encodeGroupInt(filters[FILTER_GROUP_SOURCES]);
|
||||
encoded += encodeGroupInt(filters[FILTER_GROUP_ASNS]);
|
||||
encoded += encodeGroupCommunities(filters[FILTER_GROUP_COMMUNITIES]);
|
||||
encoded += encodeGroupCommunities(filters[FILTER_GROUP_EXT_COMMUNITIES]);
|
||||
encoded += encodeGroupCommunities(filters[FILTER_GROUP_LARGE_COMMUNITIES]);
|
||||
|
||||
return encoded;
|
||||
}
|
||||
|
@ -1,31 +0,0 @@
|
||||
|
||||
export const FILTER_KEY_SOURCES = "sources"
|
||||
export const FILTER_KEY_ASNS = "asns"
|
||||
export const FILTER_KEY_COMMUNITIES = "communities"
|
||||
export const FILTER_KEY_EXT_COMMUNITIES = "ext_communities"
|
||||
export const FILTER_KEY_LARGE_COMMUNITIES = "large_communities"
|
||||
|
||||
export const FILTER_GROUP_SOURCES = 0
|
||||
export const FILTER_GROUP_ASNS = 1
|
||||
export const FILTER_GROUP_COMMUNITIES = 2
|
||||
export const FILTER_GROUP_EXT_COMMUNITIES = 3
|
||||
export const FILTER_GROUP_LARGE_COMMUNITIES = 4
|
||||
|
||||
|
||||
export function filtersEqual(a, b) {
|
||||
return (a[FILTER_GROUP_SOURCES].filters.length ===
|
||||
b[FILTER_GROUP_SOURCES].filters.length) &&
|
||||
|
||||
(a[FILTER_GROUP_ASNS].filters.length ===
|
||||
b[FILTER_GROUP_ASNS].filters.length) &&
|
||||
|
||||
(a[FILTER_GROUP_COMMUNITIES].filters.length ===
|
||||
b[FILTER_GROUP_COMMUNITIES].filters.length) &&
|
||||
|
||||
(a[FILTER_GROUP_EXT_COMMUNITIES].filters.length ===
|
||||
b[FILTER_GROUP_EXT_COMMUNITIES].filters.length) &&
|
||||
|
||||
(a[FILTER_GROUP_LARGE_COMMUNITIES].filters.length ===
|
||||
b[FILTER_GROUP_LARGE_COMMUNITIES].filters.length);
|
||||
}
|
||||
|
@ -1,161 +0,0 @@
|
||||
|
||||
import _ from 'underscore'
|
||||
|
||||
import {FILTER_GROUP_SOURCES,
|
||||
FILTER_GROUP_ASNS,
|
||||
FILTER_GROUP_COMMUNITIES,
|
||||
FILTER_GROUP_EXT_COMMUNITIES,
|
||||
FILTER_GROUP_LARGE_COMMUNITIES}
|
||||
from './groups'
|
||||
|
||||
import {decodeFiltersSources,
|
||||
decodeFiltersAsns,
|
||||
decodeFiltersCommunities,
|
||||
decodeFiltersExtCommunities,
|
||||
decodeFiltersLargeCommunities}
|
||||
from 'components/filters/encoding'
|
||||
|
||||
export const initializeFilterState = () => ([
|
||||
{"key": "sources", "filters": []},
|
||||
{"key": "asns", "filters": []},
|
||||
{"key": "communities", "filters": []},
|
||||
{"key": "ext_communities", "filters": []},
|
||||
{"key": "large_communities", "filters": []},
|
||||
]);
|
||||
|
||||
export function cloneFilters(filters) {
|
||||
const nextFilters = [
|
||||
Object.assign({}, filters[FILTER_GROUP_SOURCES]),
|
||||
Object.assign({}, filters[FILTER_GROUP_ASNS]),
|
||||
Object.assign({}, filters[FILTER_GROUP_COMMUNITIES]),
|
||||
Object.assign({}, filters[FILTER_GROUP_EXT_COMMUNITIES]),
|
||||
Object.assign({}, filters[FILTER_GROUP_LARGE_COMMUNITIES]),
|
||||
];
|
||||
|
||||
nextFilters[FILTER_GROUP_SOURCES].filters =
|
||||
[...nextFilters[FILTER_GROUP_SOURCES].filters];
|
||||
|
||||
nextFilters[FILTER_GROUP_ASNS].filters =
|
||||
[...nextFilters[FILTER_GROUP_ASNS].filters];
|
||||
|
||||
nextFilters[FILTER_GROUP_COMMUNITIES].filters =
|
||||
[...nextFilters[FILTER_GROUP_COMMUNITIES].filters];
|
||||
|
||||
nextFilters[FILTER_GROUP_EXT_COMMUNITIES].filters =
|
||||
[...nextFilters[FILTER_GROUP_EXT_COMMUNITIES].filters];
|
||||
|
||||
nextFilters[FILTER_GROUP_LARGE_COMMUNITIES].filters =
|
||||
[...nextFilters[FILTER_GROUP_LARGE_COMMUNITIES].filters];
|
||||
|
||||
return nextFilters;
|
||||
}
|
||||
|
||||
/*
|
||||
* Decode filters applied from params
|
||||
*/
|
||||
export function decodeFiltersApplied(params) {
|
||||
const groups = initializeFilterState();
|
||||
|
||||
groups[FILTER_GROUP_SOURCES].filters = decodeFiltersSources(params);
|
||||
groups[FILTER_GROUP_ASNS].filters = decodeFiltersAsns(params);
|
||||
groups[FILTER_GROUP_COMMUNITIES].filters = decodeFiltersCommunities(params);
|
||||
groups[FILTER_GROUP_EXT_COMMUNITIES].filters = decodeFiltersExtCommunities(params);
|
||||
groups[FILTER_GROUP_LARGE_COMMUNITIES].filters = decodeFiltersLargeCommunities(params);
|
||||
|
||||
return groups;
|
||||
}
|
||||
|
||||
/*
|
||||
* Merge filters
|
||||
*/
|
||||
function _mergeFilters(a, b) {
|
||||
let groups = initializeFilterState();
|
||||
let setCmp = [];
|
||||
setCmp[FILTER_GROUP_SOURCES] = cmpFilterValue;
|
||||
setCmp[FILTER_GROUP_ASNS] = cmpFilterValue;
|
||||
setCmp[FILTER_GROUP_COMMUNITIES] = cmpFilterCommunity;
|
||||
setCmp[FILTER_GROUP_EXT_COMMUNITIES] = cmpFilterCommunity;
|
||||
setCmp[FILTER_GROUP_LARGE_COMMUNITIES] = cmpFilterCommunity;
|
||||
|
||||
for (const i in groups) {
|
||||
groups[i].filters = mergeFilterSet(setCmp[i], a[i].filters, b[i].filters);
|
||||
}
|
||||
|
||||
return groups;
|
||||
}
|
||||
|
||||
export function mergeFilters(a, ...other) {
|
||||
let result = cloneFilters(a);
|
||||
for (const filters of other) {
|
||||
result = _mergeFilters(result, cloneFilters(filters));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/*
|
||||
* Merge list of filters
|
||||
*/
|
||||
function mergeFilterSet(inSet, a, b) {
|
||||
let result = a;
|
||||
for (const f of b) {
|
||||
const present = inSet(result, f);
|
||||
if (present) {
|
||||
// Update filter cardinality
|
||||
// present.cardinality = Math.max(f.cardinality, present.cardinality);
|
||||
present.cardinality += f.cardinality;
|
||||
continue;
|
||||
}
|
||||
result.push(f);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/*
|
||||
* Does a single group have any filters?
|
||||
*/
|
||||
export function groupHasFilters(group) {
|
||||
return group.filters.length > 0;
|
||||
}
|
||||
|
||||
/*
|
||||
* Filters set compare
|
||||
*/
|
||||
function cmpFilterValue(set, filter) {
|
||||
for (const f of set) {
|
||||
if(f.value == filter.value) {
|
||||
return f;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function cmpFilterCommunity(set, filter) {
|
||||
for (const f of set) {
|
||||
let match = true;
|
||||
for (const i in f.value) {
|
||||
if (f.value[i] != filter.value[i]) {
|
||||
match = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (match) {
|
||||
return f;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/*
|
||||
* Do we have filters in general?
|
||||
*/
|
||||
export function hasFilters(groups) {
|
||||
for (const g of groups) {
|
||||
if (groupHasFilters(g)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -1,318 +0,0 @@
|
||||
|
||||
import _ from 'underscore'
|
||||
|
||||
import React from 'react'
|
||||
import {connect} from 'react-redux'
|
||||
|
||||
import CommunityLabel
|
||||
from 'components/routeservers/communities/label'
|
||||
import {makeReadableCommunity}
|
||||
from 'components/routeservers/communities/utils'
|
||||
|
||||
import {FILTER_GROUP_COMMUNITIES,
|
||||
FILTER_GROUP_EXT_COMMUNITIES,
|
||||
FILTER_GROUP_LARGE_COMMUNITIES}
|
||||
from './groups'
|
||||
|
||||
|
||||
/*
|
||||
* Add a title to the widget, if something needs to be rendered
|
||||
*/
|
||||
const withTitle = (title) => (Widget) => (class WidgetWithTitle extends Widget {
|
||||
render() {
|
||||
const result = super.render();
|
||||
if (result == null) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<div className="filter-editor-widget">
|
||||
<h2>{title}</h2>
|
||||
{result}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
class _RouteserversSelect extends React.Component {
|
||||
render() {
|
||||
// Nothing to do if we don't have filters
|
||||
if (this.props.available.length == 0 &&
|
||||
this.props.applied.length == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Sort filters available
|
||||
const sortedFiltersAvailable = this.props.available.sort((a, b) => {
|
||||
return a.value - b.value;
|
||||
});
|
||||
|
||||
// For now we allow only one applied
|
||||
const appliedFilter = this.props.applied[0] || {value: undefined};
|
||||
|
||||
if (appliedFilter.value !== undefined) {
|
||||
// Just render this, with a button for removal
|
||||
return (
|
||||
<table className="select-ctrl">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td className="select-container">
|
||||
{appliedFilter.name}
|
||||
</td>
|
||||
<td>
|
||||
<button className="btn btn-remove"
|
||||
onClick={() => this.props.onRemove(appliedFilter.value)}>
|
||||
<i className="fa fa-times" />
|
||||
</button>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
);
|
||||
}
|
||||
|
||||
// Build options
|
||||
const optionsAvailable = sortedFiltersAvailable.map((filter) => {
|
||||
return <option key={filter.value} value={filter.value}>
|
||||
{filter.name} ({filter.cardinality})
|
||||
</option>;
|
||||
});
|
||||
|
||||
return (
|
||||
<table className="select-ctrl">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td className="select-container">
|
||||
<select className="form-control"
|
||||
onChange={(e) => this.props.onChange(e.target.value)}
|
||||
value={appliedFilter.value}>
|
||||
<option value="none" className="options-title">Show results from RS...</option>
|
||||
{optionsAvailable}
|
||||
</select>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export const RouteserversSelect = withTitle("Route Server")(_RouteserversSelect);
|
||||
|
||||
|
||||
class _PeersFilterSelect extends React.Component {
|
||||
render() {
|
||||
// Nothing to do if we don't have filters
|
||||
if (this.props.available.length == 0 &&
|
||||
this.props.applied.length == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Sort filters available
|
||||
const sortedFiltersAvailable = this.props.available.sort((a, b) => {
|
||||
return a.name.localeCompare(b.name);
|
||||
});
|
||||
|
||||
// For now we allow only one applied
|
||||
const appliedFilter = this.props.applied[0] || {value: undefined};
|
||||
|
||||
if (appliedFilter.value !== undefined) {
|
||||
|
||||
// Just render this, with a button for removal
|
||||
return (
|
||||
<table className="select-ctrl">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td className="select-container">
|
||||
{appliedFilter.name}
|
||||
</td>
|
||||
<td>
|
||||
<button className="btn btn-remove"
|
||||
onClick={() => this.props.onRemove(appliedFilter.value)}>
|
||||
<i className="fa fa-times" />
|
||||
</button>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
);
|
||||
}
|
||||
|
||||
// Build options
|
||||
const optionsAvailable = sortedFiltersAvailable.map((filter) => {
|
||||
return <option key={filter.value} value={filter.value}>
|
||||
{filter.name}, AS{filter.value} ({filter.cardinality})
|
||||
</option>;
|
||||
});
|
||||
|
||||
return (
|
||||
<table className="select-ctrl">
|
||||
<tbody>
|
||||
<tr>
|
||||
<td className="select-container">
|
||||
<select className="form-control"
|
||||
onChange={(e) => this.props.onChange(e.target.value)}
|
||||
value={appliedFilter.value}>
|
||||
<option className="options-title"
|
||||
value="none">Show only results from AS...</option>
|
||||
{optionsAvailable}
|
||||
</select>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export const PeersFilterSelect = withTitle("Neighbor")(_PeersFilterSelect);
|
||||
|
||||
|
||||
class __CommunitiesSelect extends React.Component {
|
||||
propagateChange(value) {
|
||||
// Decode value
|
||||
const [group, community] = value.split(",", 2);
|
||||
const filterValue = community.split(":"); // spew.
|
||||
|
||||
this.props.onChange(group, filterValue);
|
||||
}
|
||||
|
||||
render() {
|
||||
// Nothing to do if we don't have filters
|
||||
const hasAvailable = this.props.available.communities.length > 0 ||
|
||||
this.props.available.ext.length > 0 ||
|
||||
this.props.available.large.length > 0;
|
||||
|
||||
const hasApplied = this.props.applied.communities.length > 0 ||
|
||||
this.props.applied.ext.length > 0 ||
|
||||
this.props.applied.large.length > 0;
|
||||
|
||||
if (!hasApplied && !hasAvailable) {
|
||||
return null; // nothing to do here.
|
||||
}
|
||||
|
||||
const communitiesAvailable = this.props.available.communities.sort((a, b) => {
|
||||
return (a.value[0] - b.value[0]) * 100000 + (a.value[1] - b.value[1]);
|
||||
});
|
||||
|
||||
const extCommunitiesAvailable = this.props.available.ext.sort((a, b) => {
|
||||
return (a.value[1] - b.value[1]) * 100000 + (a.value[2] - b.value[2]);
|
||||
});
|
||||
|
||||
// const extCommunitiesAvailable = []; // They don't work. for now.
|
||||
|
||||
const largeCommunitiesAvailable = this.props.available.large.sort((a, b) => {
|
||||
return (a.value[0] - b.value[0]) * 10000000000 +
|
||||
(a.value[1] - b.value[1]) * 100000 +
|
||||
(a.value[2] - b.value[2]);
|
||||
});
|
||||
|
||||
const makeOption = (group, name, filter, cls) => {
|
||||
const value = `${group},${filter.value.join(":")}`; // yikes.
|
||||
return (
|
||||
<option key={filter.value} value={value} className={cls}>
|
||||
{filter.name} {name} ({filter.cardinality})
|
||||
</option>
|
||||
);
|
||||
}
|
||||
|
||||
const communitiesOptions = communitiesAvailable.map((filter) => {
|
||||
const name = makeReadableCommunity(this.props.communities, filter.value);
|
||||
const cls = `select-bgp-community-0-${filter.value[0]} ` +
|
||||
`select-bgp-community-1-${filter.value[1]}`;
|
||||
return makeOption(FILTER_GROUP_COMMUNITIES, name, filter, cls);
|
||||
});
|
||||
|
||||
const extCommunitiesOptions = extCommunitiesAvailable.map((filter) => {
|
||||
const name = makeReadableCommunity(this.props.communities, filter.value);
|
||||
const cls = `select-bgp-community-0-${filter.value[0]} ` +
|
||||
`select-bgp-community-1-${filter.value[1]} ` +
|
||||
`select-bgp-community-2-${filter.value[2]}`;
|
||||
return makeOption(FILTER_GROUP_EXT_COMMUNITIES, name, filter, cls);
|
||||
});
|
||||
|
||||
const largeCommunitiesOptions = largeCommunitiesAvailable.map((filter) => {
|
||||
const name = makeReadableCommunity(this.props.communities, filter.value);
|
||||
const cls = `select-bgp-community-0-${filter.value[0]} ` +
|
||||
`select-bgp-community-1-${filter.value[1]} ` +
|
||||
`select-bgp-community-2-${filter.value[2]}`;
|
||||
return makeOption(FILTER_GROUP_LARGE_COMMUNITIES, name, filter, cls);
|
||||
});
|
||||
|
||||
// Render list of applied communities
|
||||
const makeCommunity = (group, name, filter) => (
|
||||
<tr key={filter.value}>
|
||||
<td className="select-container">
|
||||
<CommunityLabel community={filter.value} />
|
||||
</td>
|
||||
<td>
|
||||
<button className="btn btn-remove"
|
||||
onClick={() => this.props.onRemove(group, filter.value)}>
|
||||
<i className="fa fa-times" />
|
||||
</button>
|
||||
</td>
|
||||
</tr>
|
||||
);
|
||||
|
||||
const appliedCommunities = this.props.applied.communities.map((filter) => {
|
||||
const name = makeReadableCommunity(this.props.communities, filter.value);
|
||||
return makeCommunity(FILTER_GROUP_COMMUNITIES, name, filter);
|
||||
});
|
||||
|
||||
const appliedExtCommunities = this.props.applied.ext.map((filter) => {
|
||||
const name = makeReadableCommunity(this.props.communities, filter.value);
|
||||
return makeCommunity(FILTER_GROUP_EXT_COMMUNITIES, name, filter);
|
||||
});
|
||||
|
||||
const appliedLargeCommunities = this.props.applied.large.map((filter) => {
|
||||
const name = makeReadableCommunity(this.props.communities, filter.value);
|
||||
return makeCommunity(FILTER_GROUP_LARGE_COMMUNITIES, name, filter);
|
||||
});
|
||||
|
||||
return (
|
||||
<table className="select-ctrl">
|
||||
<tbody>
|
||||
{appliedCommunities}
|
||||
{appliedExtCommunities}
|
||||
{appliedLargeCommunities}
|
||||
{hasAvailable &&
|
||||
<tr>
|
||||
<td className="select-container" colSpan="2">
|
||||
<select value="none"
|
||||
onChange={(e) => this.propagateChange(e.target.value)}
|
||||
className="form-control">
|
||||
<option value="none" className="options-title">
|
||||
Select BGP Communities to match...
|
||||
</option>
|
||||
{communitiesOptions.length > 0 &&
|
||||
<optgroup label="Communities">
|
||||
{communitiesOptions}
|
||||
</optgroup>}
|
||||
|
||||
{extCommunitiesOptions.length > 0 &&
|
||||
<optgroup label="Ext. Communities">
|
||||
{extCommunitiesOptions}
|
||||
</optgroup>}
|
||||
|
||||
{largeCommunitiesOptions.length > 0 &&
|
||||
<optgroup label="Large Communities">
|
||||
{largeCommunitiesOptions}
|
||||
</optgroup>}
|
||||
</select>
|
||||
</td>
|
||||
</tr>}
|
||||
</tbody>
|
||||
</table>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const _CommunitiesSelect = connect(
|
||||
(state) => ({
|
||||
communities: state.config.bgp_communities,
|
||||
})
|
||||
)(__CommunitiesSelect);
|
||||
|
||||
export const CommunitiesSelect = withTitle("BGP Communities")(_CommunitiesSelect);
|
||||
|
||||
|
@ -1,19 +0,0 @@
|
||||
|
||||
import React from 'react'
|
||||
import Spinner from 'react-spinkit'
|
||||
|
||||
export default class Indicator extends React.Component {
|
||||
render() {
|
||||
if (this.props.show == false) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="loading-indicator">
|
||||
<Spinner spinnerName="circle" />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,83 +0,0 @@
|
||||
|
||||
/*
|
||||
* Prefix lookup actions
|
||||
*/
|
||||
|
||||
import axios from 'axios'
|
||||
|
||||
import {filtersUrlEncode} from 'components/filters/encoding'
|
||||
|
||||
export const SET_LOOKUP_QUERY_VALUE = '@lookup/SET_LOOKUP_QUERY_VALUE';
|
||||
|
||||
export const LOAD_RESULTS_REQUEST = '@lookup/LOAD_RESULTS_REQUEST';
|
||||
export const LOAD_RESULTS_SUCCESS = '@lookup/LOAD_RESULTS_SUCCESS';
|
||||
export const LOAD_RESULTS_ERROR = '@lookup/LOAD_RESULTS_ERROR';
|
||||
|
||||
export const RESET = "@lookup/RESET";
|
||||
|
||||
// Action creators
|
||||
export function setLookupQueryValue(value) {
|
||||
return {
|
||||
type: SET_LOOKUP_QUERY_VALUE,
|
||||
payload: {
|
||||
value: value,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export function loadResultsRequest(query) {
|
||||
return {
|
||||
type: LOAD_RESULTS_REQUEST,
|
||||
payload: {
|
||||
query: query
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function loadResultsSuccess(query, results) {
|
||||
return {
|
||||
type: LOAD_RESULTS_SUCCESS,
|
||||
payload: {
|
||||
query: query,
|
||||
results: results
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function loadResultsError(query, error) {
|
||||
return {
|
||||
type: LOAD_RESULTS_ERROR,
|
||||
payload: {
|
||||
query: query,
|
||||
error: error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function loadResults(query, filters, pageImported=0, pageFiltered=0) {
|
||||
return (dispatch) => {
|
||||
dispatch(loadResultsRequest(query));
|
||||
|
||||
// Build querystring
|
||||
const q = `q=${query}&page_filtered=${pageFiltered}&page_imported=${pageImported}`;
|
||||
const f = filtersUrlEncode(filters);
|
||||
axios.get(`/api/v1/lookup/prefix?${q}${f}`)
|
||||
.then(
|
||||
(res) => {
|
||||
dispatch(loadResultsSuccess(query, res.data));
|
||||
},
|
||||
(error) => {
|
||||
dispatch(loadResultsError(query, error));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export function reset() {
|
||||
return {
|
||||
type: RESET,
|
||||
payload: {}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,122 +0,0 @@
|
||||
|
||||
/*
|
||||
* Alice (Prefix-)Lookup
|
||||
*/
|
||||
|
||||
import {debounce} from 'underscore'
|
||||
|
||||
import React from 'react'
|
||||
import {connect} from 'react-redux'
|
||||
import {replace} from 'react-router-redux'
|
||||
|
||||
import {setLookupQueryValue} from './actions'
|
||||
|
||||
import Content from 'components/content'
|
||||
|
||||
import LookupResults from './results'
|
||||
import SearchInput from 'components/search-input'
|
||||
|
||||
import QuickLinks from 'components/routeservers/routes/quick-links'
|
||||
|
||||
|
||||
class LookupHelp extends React.Component {
|
||||
render() {
|
||||
if(this.props.query != '') {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="lookup-help">
|
||||
<h3>Did you know?</h3>
|
||||
<p>You can search for</p>
|
||||
<ul>
|
||||
<li><b>Prefixes</b>,</li>
|
||||
<li><b>Peers</b> by entering their name and</li>
|
||||
<li><b>ASNs</b> by prefixing them with 'AS'</li>
|
||||
</ul>
|
||||
<p>Just start typing!</p>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class Lookup extends React.Component {
|
||||
|
||||
constructor(props) {
|
||||
super(props);
|
||||
this.debouncedDispatch = debounce(this.props.dispatch, 400);
|
||||
}
|
||||
|
||||
doLookup(q) {
|
||||
// Make path
|
||||
const destination = {
|
||||
pathname: "/search",
|
||||
search: `?q=${q}`
|
||||
};
|
||||
|
||||
// Set lookup params
|
||||
this.props.dispatch(setLookupQueryValue(q));
|
||||
this.debouncedDispatch(replace(destination));
|
||||
}
|
||||
|
||||
componentDidMount() {
|
||||
// this is yucky but the debounced
|
||||
// search input seems to kill the ref=
|
||||
let input = document.getElementById('lookup-search-input');
|
||||
input.focus();
|
||||
let value = input.value;
|
||||
input.value = "";
|
||||
input.value = value;
|
||||
}
|
||||
|
||||
render() {
|
||||
return (
|
||||
<div className="lookup-container">
|
||||
<div className="card">
|
||||
<h2><Content id="lookup.title">Search on all route servers</Content></h2>
|
||||
<SearchInput
|
||||
ref="searchInput"
|
||||
id="lookup-search-input"
|
||||
value={this.props.queryValue}
|
||||
placeholder="Search for Prefixes, Peers or ASNs on all Route Servers"
|
||||
onChange={(e) => this.doLookup(e.target.value)} />
|
||||
</div>
|
||||
|
||||
<QuickLinks routes={this.props.routes}
|
||||
excludeNotExported={true} />
|
||||
|
||||
<LookupHelp query={this.props.query} />
|
||||
|
||||
<LookupResults />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export default connect(
|
||||
(state) => {
|
||||
const lookup = state.lookup;
|
||||
return {
|
||||
query: state.lookup.query,
|
||||
queryValue: state.lookup.queryValue,
|
||||
isLoading: state.lookup.isLoading,
|
||||
error: state.lookup.error,
|
||||
routes: {
|
||||
filtered: {
|
||||
loading: lookup.isLoading,
|
||||
totalResults: lookup.totalRoutesFiltered,
|
||||
},
|
||||
received: {
|
||||
loading: lookup.isLoading,
|
||||
totalResults: lookup.totalRoutesImported,
|
||||
},
|
||||
notExported: {
|
||||
loading: false,
|
||||
totalResults: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
)(Lookup);
|
||||
|
@ -1,71 +0,0 @@
|
||||
|
||||
import React from 'react'
|
||||
import {connect} from 'react-redux'
|
||||
|
||||
import PageHeader from 'components/page-header'
|
||||
|
||||
import Lookup from 'components/lookup'
|
||||
import LookupSummary from 'components/lookup/results-summary'
|
||||
import FiltersEditor from 'components/filters/editor'
|
||||
|
||||
import Content from 'components/content'
|
||||
|
||||
import {makeLinkProps} from './state'
|
||||
|
||||
class _LookupView extends React.Component {
|
||||
render() {
|
||||
if (this.props.enabled == false) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="lookup-container details-main">
|
||||
<div className="col-main col-lg-9 col-md-12">
|
||||
<Lookup />
|
||||
</div>
|
||||
<div className="col-aside-details col-lg-3 col-md-12">
|
||||
<LookupSummary />
|
||||
<FiltersEditor makeLinkProps={makeLinkProps}
|
||||
linkProps={this.props.linkProps}
|
||||
filtersApplied={this.props.filtersApplied}
|
||||
filtersAvailable={this.props.filtersAvailable} />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const LookupView = connect(
|
||||
(state) => {
|
||||
return {
|
||||
enabled: state.config.prefix_lookup_enabled,
|
||||
|
||||
filtersAvailable: state.lookup.filtersAvailable,
|
||||
filtersApplied: state.lookup.filtersApplied,
|
||||
|
||||
linkProps: {
|
||||
anchor: "filtered",
|
||||
page: 0,
|
||||
pageReceived: 0, // Reset pagination on filter change
|
||||
pageFiltered: 0,
|
||||
query: state.lookup.query,
|
||||
filtersApplied: state.lookup.filtersApplied,
|
||||
routing: state.routing.locationBeforeTransitions,
|
||||
},
|
||||
}
|
||||
}
|
||||
)(_LookupView);
|
||||
|
||||
|
||||
export default class LookupPage extends React.Component {
|
||||
render() {
|
||||
return (
|
||||
<div className="welcome-page">
|
||||
<PageHeader></PageHeader>
|
||||
<p></p>
|
||||
<LookupView />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1,218 +0,0 @@
|
||||
|
||||
/*
|
||||
* Routes Lookup Pagination
|
||||
* ------------------------
|
||||
*
|
||||
* This code contains a lot of overlap with the pagination
|
||||
* code in components/routeservers/routes/pagination.jsx
|
||||
*
|
||||
* Because time right now is at the essence, we will use
|
||||
* this as a base and generalize the pagionation code later.
|
||||
* (I'm so sorry :/)
|
||||
*
|
||||
* TODO: Refactor an generalize pagination links
|
||||
*/
|
||||
|
||||
|
||||
|
||||
import React from 'react'
|
||||
import {connect} from 'react-redux'
|
||||
|
||||
import {Link} from 'react-router'
|
||||
import {push} from 'react-router-redux'
|
||||
|
||||
import {makeLinkProps} from './state'
|
||||
|
||||
|
||||
const PageLink = function(props) {
|
||||
const linkPage = parseInt(props.page, 10);
|
||||
const label = props.label || (linkPage + 1);
|
||||
|
||||
if (props.disabled) {
|
||||
return <span>{label}</span>;
|
||||
}
|
||||
|
||||
const linkTo = makeLinkProps(props);
|
||||
return (
|
||||
<Link to={linkTo}>{label}</Link>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
const PageSelect = (props) => {
|
||||
const {pages, options} = props;
|
||||
|
||||
if (pages.length == 0) {
|
||||
return null; // nothing to do here.
|
||||
}
|
||||
|
||||
const items = pages.map((p) => (
|
||||
<option key={p} value={p}>{p + 1}</option>
|
||||
));
|
||||
|
||||
const active = props.page >= pages[0];
|
||||
let itemClassName = "";
|
||||
if (active) {
|
||||
itemClassName = "active";
|
||||
}
|
||||
|
||||
return (
|
||||
<li className={itemClassName}>
|
||||
<select className="form-control pagination-select"
|
||||
value={props.page}
|
||||
onChange={(e) => props.onChange(e.target.value) }>
|
||||
{ props.page < pages[0] && <option value={pages[0]}>more...</option> }
|
||||
|
||||
{items}
|
||||
</select>
|
||||
</li>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
|
||||
class RoutesPaginatorView extends React.Component {
|
||||
|
||||
/*
|
||||
* Create an array of page "ids" we can use to map our
|
||||
* pagination items.
|
||||
* Split result into items for direct link access and
|
||||
* select for a dropdown like access.
|
||||
*/
|
||||
makePaginationPages(numPages) {
|
||||
const MAX_ITEMS = 12;
|
||||
const pages = Array.from(Array(numPages), (_, i) => i);
|
||||
return {
|
||||
items: pages.slice(0, MAX_ITEMS),
|
||||
select: pages.slice(MAX_ITEMS)
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Dispatch navigation event and go to page
|
||||
*/
|
||||
navigateToPage(page) {
|
||||
const linkProps = makeLinkProps(Object.assign({}, this.props, {
|
||||
page: page
|
||||
}));
|
||||
|
||||
this.props.dispatch(push(linkProps));
|
||||
}
|
||||
|
||||
render() {
|
||||
|
||||
if (this.props.totalPages <= 1) {
|
||||
return null; // Nothing to paginate
|
||||
}
|
||||
|
||||
const pages = this.makePaginationPages(this.props.totalPages);
|
||||
const pageLinks = pages.items.map((p) => {
|
||||
let className = "";
|
||||
if (p == this.props.page) {
|
||||
className = "active";
|
||||
}
|
||||
|
||||
return (
|
||||
<li key={p} className={className}>
|
||||
<PageLink page={p}
|
||||
routing={this.props.routing}
|
||||
anchor={this.props.anchor}
|
||||
loadNotExported={this.props.loadNotExported}
|
||||
filtersApplied={this.props.filtersApplied}
|
||||
pageReceived={this.props.pageReceived}
|
||||
pageFiltered={this.props.pageFiltered}
|
||||
pageNotExported={this.props.pageNotExported} />
|
||||
</li>
|
||||
);
|
||||
});
|
||||
|
||||
|
||||
let prevLinkClass = "";
|
||||
if (this.props.page == 0) {
|
||||
prevLinkClass = "disabled";
|
||||
}
|
||||
|
||||
let nextLinkClass = "";
|
||||
if (this.props.page + 1 == this.props.totalPages) {
|
||||
nextLinkClass = "disabled";
|
||||
}
|
||||
|
||||
return (
|
||||
<nav aria-label="Routes Pagination">
|
||||
<ul className="pagination">
|
||||
<li className={prevLinkClass}>
|
||||
<PageLink page={this.props.page - 1}
|
||||
label="«"
|
||||
disabled={this.props.page == 0}
|
||||
routing={this.props.routing}
|
||||
anchor={this.props.anchor}
|
||||
loadNotExported={this.props.loadNotExported}
|
||||
filtersApplied={this.props.filtersApplied}
|
||||
pageReceived={this.props.pageReceived}
|
||||
pageFiltered={this.props.pageFiltered}
|
||||
pageNotExported={this.props.pageNotExported} />
|
||||
</li>
|
||||
{pageLinks}
|
||||
<PageSelect pages={pages.select}
|
||||
page={this.props.page}
|
||||
onChange={(page) => this.navigateToPage(page)} />
|
||||
|
||||
{pages.select.length == 0 &&
|
||||
<li className={nextLinkClass}>
|
||||
<PageLink page={this.props.page + 1}
|
||||
disabled={this.props.page + 1 == this.props.totalPages}
|
||||
label="»"
|
||||
routing={this.props.routing}
|
||||
anchor={this.props.anchor}
|
||||
loadNotExported={this.props.loadNotExported}
|
||||
filtersApplied={this.props.filtersApplied}
|
||||
pageReceived={this.props.pageReceived}
|
||||
pageFiltered={this.props.pageFiltered}
|
||||
pageNotExported={this.props.pageNotExported} />
|
||||
</li>}
|
||||
</ul>
|
||||
</nav>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const RoutesPaginator = connect(
|
||||
(state) => ({
|
||||
filtersApplied: state.lookup.filtersApplied,
|
||||
|
||||
pageReceived: state.lookup.pageImported,
|
||||
pageFiltered: state.lookup.pageFiltered,
|
||||
pageNotExported: 0,
|
||||
|
||||
routing: state.routing.locationBeforeTransitions
|
||||
})
|
||||
)(RoutesPaginatorView);
|
||||
|
||||
|
||||
export class RoutesPaginationInfo extends React.Component {
|
||||
render() {
|
||||
const totalResults = this.props.totalResults;
|
||||
const perPage = this.props.pageSize;
|
||||
const start = this.props.page * perPage + 1;
|
||||
const end = Math.min(start + perPage - 1, totalResults);
|
||||
if (this.props.totalPages <= 1) {
|
||||
let routes = "route";
|
||||
if (totalResults > 1) {
|
||||
routes = "routes";
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="routes-pagination-info pull-right">
|
||||
Showing <b>all</b> of <b>{totalResults}</b> {routes}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<div className="routes-pagination-info pull-right">
|
||||
Showing <b>{start} - {end}</b> of <b>{totalResults}</b> total routes
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1,168 +0,0 @@
|
||||
/*
|
||||
* Prefix Lookup Reducer
|
||||
*/
|
||||
|
||||
import {
|
||||
LOAD_RESULTS_REQUEST,
|
||||
LOAD_RESULTS_SUCCESS,
|
||||
LOAD_RESULTS_ERROR,
|
||||
|
||||
SET_LOOKUP_QUERY_VALUE,
|
||||
|
||||
RESET,
|
||||
} from './actions'
|
||||
|
||||
import {cloneFilters,
|
||||
decodeFiltersApplied,
|
||||
initializeFilterState}
|
||||
from 'components/filters/state'
|
||||
|
||||
const LOCATION_CHANGE = '@@router/LOCATION_CHANGE'
|
||||
|
||||
const initialState = {
|
||||
query: "",
|
||||
queryValue: "",
|
||||
|
||||
anchor: "",
|
||||
|
||||
filtersAvailable: initializeFilterState(),
|
||||
filtersApplied: initializeFilterState(),
|
||||
|
||||
routesImported: [],
|
||||
routesFiltered: [],
|
||||
|
||||
error: null,
|
||||
|
||||
queryDurationMs: 0.0,
|
||||
|
||||
cachedAt: false,
|
||||
cacheTtl: false,
|
||||
|
||||
pageImported: 0,
|
||||
pageFiltered: 0,
|
||||
|
||||
pageSizeImported: 0,
|
||||
pageSizeFiltered: 0,
|
||||
|
||||
totalPagesImported: 0,
|
||||
totalPagesFiltered: 0,
|
||||
|
||||
totalRoutesImported: 0,
|
||||
totalRoutesFiltered: 0,
|
||||
|
||||
isLoading: false
|
||||
}
|
||||
|
||||
/*
|
||||
* Helper: Get scroll anchor from hash
|
||||
*/
|
||||
const getScrollAnchor = function(hash) {
|
||||
return hash.substr(hash.indexOf('-')+1);
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Restore lookup query state from location paramenters
|
||||
*/
|
||||
const _handleLocationChange = function(state, payload) {
|
||||
const params = payload.query;
|
||||
const query = params["q"] || "";
|
||||
const pageFiltered = parseInt(params["pf"] || 0, 10);
|
||||
const pageReceived = parseInt(params["pr"] || 0, 10);
|
||||
const anchor = getScrollAnchor(payload.hash);
|
||||
|
||||
// Restore filters applied from location
|
||||
const filtersApplied = decodeFiltersApplied(params);
|
||||
|
||||
return Object.assign({}, state, {
|
||||
anchor: anchor,
|
||||
query: query,
|
||||
queryValue: query,
|
||||
pageImported: pageReceived,
|
||||
pageFiltered: pageFiltered,
|
||||
filtersApplied: filtersApplied,
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* Receive query results
|
||||
*/
|
||||
const _loadQueryResult = function(state, payload) {
|
||||
const results = payload.results;
|
||||
const imported = results.imported;
|
||||
const filtered = results.filtered;
|
||||
const api = results.api;
|
||||
|
||||
return Object.assign({}, state, {
|
||||
isLoading: false,
|
||||
|
||||
// Cache Status
|
||||
cachedAt: api.cache_status.cached_at, // I don't like this style.
|
||||
cacheTtl: api.ttl,
|
||||
|
||||
// Routes
|
||||
routesImported: imported.routes,
|
||||
routesFiltered: filtered.routes,
|
||||
|
||||
// Filters available
|
||||
filtersAvailable: results.filters_available,
|
||||
filtersApplied: results.filters_applied,
|
||||
|
||||
// Pagination
|
||||
pageImported: imported.pagination.page,
|
||||
pageFiltered: filtered.pagination.page,
|
||||
pageSizeImported: imported.pagination.page_size,
|
||||
pageSizeFiltered: filtered.pagination.page_size,
|
||||
totalPagesImported: imported.pagination.total_pages,
|
||||
totalPagesFiltered: filtered.pagination.total_pages,
|
||||
totalRoutesImported: imported.pagination.total_results,
|
||||
totalRoutesFiltered: filtered.pagination.total_results,
|
||||
|
||||
// Statistics
|
||||
queryDurationMs: results.request_duration_ms,
|
||||
totalRoutes: imported.pagination.total_results + filtered.pagination.total_results
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
export default function reducer(state=initialState, action) {
|
||||
switch(action.type) {
|
||||
case LOCATION_CHANGE:
|
||||
return _handleLocationChange(state, action.payload);
|
||||
|
||||
case SET_LOOKUP_QUERY_VALUE:
|
||||
return Object.assign({}, state, {
|
||||
queryValue: action.payload.value,
|
||||
});
|
||||
|
||||
case LOAD_RESULTS_REQUEST:
|
||||
return Object.assign({}, state, {
|
||||
query: action.payload.query,
|
||||
queryValue: action.payload.query,
|
||||
isLoading: true
|
||||
});
|
||||
|
||||
case LOAD_RESULTS_SUCCESS:
|
||||
if (state.query != action.payload.query) {
|
||||
return state;
|
||||
}
|
||||
return _loadQueryResult(state, action.payload);
|
||||
|
||||
case LOAD_RESULTS_ERROR:
|
||||
if (state.query != action.payload.query) {
|
||||
return state;
|
||||
}
|
||||
|
||||
return Object.assign({}, state, initialState, {
|
||||
query: action.payload.query,
|
||||
queryValue: action.payload.query,
|
||||
error: action.payload.error
|
||||
});
|
||||
|
||||
case RESET:
|
||||
return Object.assign({}, state, initialState);
|
||||
}
|
||||
return state;
|
||||
}
|
||||
|
||||
|
@ -1,88 +0,0 @@
|
||||
|
||||
import React from 'react'
|
||||
import {connect} from 'react-redux'
|
||||
import moment from 'moment'
|
||||
|
||||
import RelativeTime from 'components/datetime/relative'
|
||||
|
||||
|
||||
|
||||
const RefreshState = function(props) {
|
||||
if (!props.cachedAt || !props.cacheTtl) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const cachedAt = moment.utc(props.cachedAt);
|
||||
const cacheTtl = moment.utc(props.cacheTtl);
|
||||
|
||||
if (cacheTtl.isBefore(moment.utc())) {
|
||||
// This means cache is currently being rebuilt
|
||||
return (
|
||||
<li>
|
||||
Routes cache was built <b><RelativeTime fuzzyNow={5}
|
||||
pastEvent={true}
|
||||
value={cachedAt} /> </b>
|
||||
and is currently being refreshed.
|
||||
</li>
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
return (
|
||||
<li>
|
||||
Routes cache was built <b><RelativeTime fuzzyNow={5} value={cachedAt} /> </b>
|
||||
and will be refreshed <b><RelativeTime value={cacheTtl} futureEvent={true} /></b>.
|
||||
</li>
|
||||
);
|
||||
}
|
||||
|
||||
class ResultsBox extends React.Component {
|
||||
|
||||
render() {
|
||||
if (this.props.query == '') {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (this.props.isLoading) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const queryDuration = this.props.queryDuration.toFixed(2);
|
||||
const cachedAt = this.props.cachedAt;
|
||||
const cacheTtl = this.props.cacheTtl;
|
||||
|
||||
return (
|
||||
<div className="card">
|
||||
<div className="lookup-result-summary">
|
||||
<ul>
|
||||
<li>
|
||||
Found <b>{this.props.totalImported}</b> received
|
||||
and <b>{this.props.totalFiltered}</b> filtered routes.
|
||||
</li>
|
||||
<li>Query took <b>{queryDuration} ms</b> to complete.</li>
|
||||
<RefreshState cachedAt={this.props.cachedAt}
|
||||
cacheTtl={this.props.cacheTtl} />
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export default connect(
|
||||
(state) => {
|
||||
return {
|
||||
isLoading: state.lookup.isLoading,
|
||||
|
||||
totalImported: state.lookup.totalRoutesImported,
|
||||
totalFiltered: state.lookup.totalRoutesFiltered,
|
||||
|
||||
cachedAt: state.lookup.cachedAt,
|
||||
cacheTtl: state.lookup.cacheTtl,
|
||||
|
||||
queryDuration: state.lookup.queryDurationMs
|
||||
}
|
||||
}
|
||||
)(ResultsBox)
|
||||
|
@ -1,215 +0,0 @@
|
||||
|
||||
import _ from 'underscore'
|
||||
|
||||
import React from 'react'
|
||||
import {connect} from 'react-redux'
|
||||
import {Link} from 'react-router'
|
||||
import {replace} from 'react-router-redux'
|
||||
|
||||
import {filtersEqual} from 'components/filters/groups'
|
||||
|
||||
import FilterReason
|
||||
from 'components/routeservers/communities/filter-reason'
|
||||
|
||||
import NoexportReason
|
||||
from 'components/routeservers/communities/noexport-reason'
|
||||
|
||||
import BgpAttributesModal
|
||||
from 'components/routeservers/routes/bgp-attributes-modal'
|
||||
|
||||
import LoadingIndicator
|
||||
from 'components/loading-indicator/small'
|
||||
|
||||
import ResultsTable from './table'
|
||||
|
||||
import {loadResults, reset} from './actions'
|
||||
|
||||
import {RoutesPaginator,
|
||||
RoutesPaginationInfo} from './pagination'
|
||||
|
||||
import {RoutesHeader}
|
||||
from 'components/routeservers/routes/view'
|
||||
|
||||
|
||||
|
||||
const ResultsView = function(props) {
|
||||
if(!props.routes) {
|
||||
return null;
|
||||
}
|
||||
if(props.routes.length == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const type = props.type;
|
||||
|
||||
return (
|
||||
<div className={`card routes-view routes-${type}`}>
|
||||
<div className="row">
|
||||
<div className="col-md-6 routes-header-container">
|
||||
<RoutesHeader type={type} />
|
||||
</div>
|
||||
<div className="col-md-6">
|
||||
<RoutesPaginationInfo page={props.page}
|
||||
pageSize={props.pageSize}
|
||||
totalPages={props.totalPages}
|
||||
totalResults={props.totalResults} />
|
||||
</div>
|
||||
</div>
|
||||
<ResultsTable routes={props.routes}
|
||||
displayReasons={props.displayReasons} />
|
||||
<center>
|
||||
<RoutesPaginator page={props.page} totalPages={props.totalPages}
|
||||
queryParam={props.query}
|
||||
anchor={type} />
|
||||
</center>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
class NoResultsView extends React.Component {
|
||||
render() {
|
||||
if (!this.props.show) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<p className="lookup-no-results text-info card">
|
||||
No prefixes could be found for <b>{this.props.query}</b>
|
||||
</p>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const NoResultsFallback = connect(
|
||||
(state) => {
|
||||
let total = state.lookup.totalRoutes;
|
||||
let query = state.lookup.query;
|
||||
let isLoading = state.lookup.isLoading;
|
||||
|
||||
let show = false;
|
||||
|
||||
if (total == 0 && query != "" && isLoading == false) {
|
||||
show = true;
|
||||
}
|
||||
|
||||
return {
|
||||
show: show,
|
||||
query: state.lookup.query
|
||||
}
|
||||
}
|
||||
)(NoResultsView);
|
||||
|
||||
|
||||
|
||||
class LookupResults extends React.Component {
|
||||
|
||||
dispatchLookup() {
|
||||
const query = this.props.query;
|
||||
const pageImported = this.props.pagination.imported.page;
|
||||
const pageFiltered = this.props.pagination.filtered.page;
|
||||
const filters = this.props.filtersApplied;
|
||||
|
||||
if (query == "") {
|
||||
// Dispatch reset and transition to main page
|
||||
this.props.dispatch(reset());
|
||||
this.props.dispatch(replace("/"));
|
||||
} else {
|
||||
this.props.dispatch(
|
||||
loadResults(query, filters, pageImported, pageFiltered)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
componentDidMount() {
|
||||
// Dispatch query
|
||||
this.dispatchLookup();
|
||||
}
|
||||
|
||||
componentDidUpdate(prevProps) {
|
||||
if(this.props.query != prevProps.query ||
|
||||
this.props.pagination.filtered.page != prevProps.pagination.filtered.page ||
|
||||
this.props.pagination.imported.page != prevProps.pagination.imported.page ||
|
||||
!filtersEqual(this.props.filtersApplied, prevProps.filtersApplied)) {
|
||||
this.dispatchLookup();
|
||||
}
|
||||
}
|
||||
|
||||
render() {
|
||||
if(this.props.isLoading) {
|
||||
return <LoadingIndicator />;
|
||||
}
|
||||
|
||||
const ref = this.refs[this.props.anchor];
|
||||
if(ref) {
|
||||
ref.scrollIntoView();
|
||||
}
|
||||
|
||||
const filteredRoutes = this.props.routes.filtered;
|
||||
const importedRoutes = this.props.routes.imported;
|
||||
|
||||
return (
|
||||
<div className="lookup-results">
|
||||
<BgpAttributesModal />
|
||||
|
||||
<NoResultsFallback />
|
||||
|
||||
<a ref="filtered" name="routes-filtered" />
|
||||
<ResultsView type="filtered"
|
||||
routes={filteredRoutes}
|
||||
|
||||
page={this.props.pagination.filtered.page}
|
||||
pageSize={this.props.pagination.filtered.pageSize}
|
||||
totalPages={this.props.pagination.filtered.totalPages}
|
||||
totalResults={this.props.pagination.filtered.totalResults}
|
||||
|
||||
query={this.props.query}
|
||||
|
||||
displayReasons="filtered" />
|
||||
|
||||
<a ref="received" name="routes-received" />
|
||||
<ResultsView type="received"
|
||||
|
||||
page={this.props.pagination.imported.page}
|
||||
pageSize={this.props.pagination.imported.pageSize}
|
||||
totalPages={this.props.pagination.imported.totalPages}
|
||||
totalResults={this.props.pagination.imported.totalResults}
|
||||
|
||||
query={this.props.query}
|
||||
|
||||
routes={importedRoutes} />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export default connect(
|
||||
(state) => {
|
||||
const filteredRoutes = state.lookup.routesFiltered;
|
||||
const importedRoutes = state.lookup.routesImported;
|
||||
|
||||
return {
|
||||
anchor: state.lookup.anchor,
|
||||
routes: {
|
||||
filtered: filteredRoutes,
|
||||
imported: importedRoutes
|
||||
},
|
||||
pagination: {
|
||||
filtered: {
|
||||
page: state.lookup.pageFiltered,
|
||||
pageSize: state.lookup.pageSizeFiltered,
|
||||
totalPages: state.lookup.totalPagesFiltered,
|
||||
totalResults: state.lookup.totalRoutesFiltered,
|
||||
},
|
||||
imported: {
|
||||
page: state.lookup.pageImported,
|
||||
pageSize: state.lookup.pageSizeImported,
|
||||
totalPages: state.lookup.totalPagesImported,
|
||||
totalResults: state.lookup.totalRoutesImported,
|
||||
}
|
||||
},
|
||||
isLoading: state.lookup.isLoading,
|
||||
query: state.lookup.query,
|
||||
filtersApplied: state.lookup.filtersApplied,
|
||||
}
|
||||
}
|
||||
)(LookupResults);
|
||||
|
@ -1,64 +0,0 @@
|
||||
|
||||
|
||||
/*
|
||||
* Manage state
|
||||
*/
|
||||
|
||||
import {filtersUrlEncode} from 'components/filters/encoding'
|
||||
|
||||
import {FILTER_GROUP_SOURCES,
|
||||
FILTER_GROUP_ASNS,
|
||||
FILTER_GROUP_COMMUNITIES,
|
||||
FILTER_GROUP_EXT_COMMUNITIES,
|
||||
FILTER_GROUP_LARGE_COMMUNITIES} from 'components/filters/groups'
|
||||
|
||||
/*
|
||||
* Maybe this can be customized and injected into
|
||||
* the PageLink component.
|
||||
*/
|
||||
export function makeLinkProps(props) {
|
||||
const linkPage = parseInt(props.page, 10);
|
||||
|
||||
let pr = props.pageReceived;
|
||||
let pf = props.pageFiltered;
|
||||
|
||||
// This here can be surely more elegant.
|
||||
switch(props.anchor) {
|
||||
case "received":
|
||||
pr = linkPage;
|
||||
break;
|
||||
case "filtered":
|
||||
pf = linkPage;
|
||||
break;
|
||||
}
|
||||
|
||||
let pagination = "";
|
||||
if (pr) {
|
||||
pagination += `pr=${pr}&`;
|
||||
}
|
||||
if (pf) {
|
||||
pagination += `pf=${pf}&`;
|
||||
}
|
||||
|
||||
let filtering = "";
|
||||
if (props.filtersApplied) {
|
||||
filtering = filtersUrlEncode(props.filtersApplied);
|
||||
}
|
||||
|
||||
const query = props.routing.query.q || "";
|
||||
|
||||
const search = `?${pagination}q=${query}${filtering}`;
|
||||
let hash = null;
|
||||
if (props.anchor) {
|
||||
hash = `#routes-${props.anchor}`;
|
||||
}
|
||||
|
||||
const linkTo = {
|
||||
pathname: props.routing.pathname,
|
||||
hash: hash,
|
||||
search: search,
|
||||
};
|
||||
|
||||
return linkTo;
|
||||
}
|
||||
|
@ -1,129 +0,0 @@
|
||||
|
||||
/*
|
||||
* Lookup Results Table
|
||||
* --------------------
|
||||
*/
|
||||
|
||||
import React from 'react'
|
||||
import {connect} from 'react-redux'
|
||||
import {Link} from 'react-router'
|
||||
import {push} from 'react-router-redux'
|
||||
|
||||
|
||||
import {_lookup,
|
||||
ColDefault,
|
||||
ColNetwork,
|
||||
ColFlags,
|
||||
ColAsPath} from 'components/routeservers/routes/route/column'
|
||||
|
||||
import {showBgpAttributes}
|
||||
from 'components/routeservers/routes/bgp-attributes-modal-actions'
|
||||
|
||||
|
||||
// Link Wrappers:
|
||||
const ColLinkedNeighbor = function(props) {
|
||||
const route = props.route;
|
||||
const to = `/routeservers/${route.routeserver.id}/protocols/${route.neighbour.id}/routes`;
|
||||
|
||||
return (
|
||||
<td>
|
||||
<Link to={to}>{_lookup(props.route, props.column)}</Link>
|
||||
</td>
|
||||
);
|
||||
}
|
||||
|
||||
const ColLinkedRouteserver = function(props) {
|
||||
const route = props.route;
|
||||
const to = `/routeservers/${route.routeserver.id}`;
|
||||
|
||||
return (
|
||||
<td>
|
||||
<Link to={to}>{_lookup(props.route, props.column)}</Link>
|
||||
</td>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
// Custom RouteColumn
|
||||
const RouteColumn = function(props) {
|
||||
const widgets = {
|
||||
"network": ColNetwork,
|
||||
|
||||
"flags": ColFlags,
|
||||
|
||||
"bgp.as_path": ColAsPath,
|
||||
"ASPath": ColAsPath,
|
||||
|
||||
"neighbour.description": ColLinkedNeighbor,
|
||||
"neighbour.asn": ColLinkedNeighbor,
|
||||
|
||||
"routeserver.name": ColLinkedRouteserver
|
||||
};
|
||||
|
||||
const rsId = props.route.routeserver.id;
|
||||
const blackholes = props.blackholesMap[rsId] || [];
|
||||
|
||||
let Widget = widgets[props.column] || ColDefault;
|
||||
return (
|
||||
<Widget column={props.column} route={props.route}
|
||||
displayReasons={props.displayReasons}
|
||||
blackholes={blackholes}
|
||||
onClick={props.onClick} />
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
class LookupRoutesTable extends React.Component {
|
||||
showAttributesModal(route) {
|
||||
this.props.dispatch(showBgpAttributes(route));
|
||||
}
|
||||
|
||||
render() {
|
||||
let routes = this.props.routes;
|
||||
const routesColumns = this.props.routesColumns;
|
||||
const routesColumnsOrder = this.props.routesColumnsOrder;
|
||||
|
||||
if (!routes || !routes.length) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let routesView = routes.map((r,i) => {
|
||||
return (
|
||||
<tr key={i}>
|
||||
{routesColumnsOrder.map(col => {
|
||||
return (<RouteColumn key={col}
|
||||
onClick={() => this.showAttributesModal(r)}
|
||||
blackholesMap={this.props.blackholesMap}
|
||||
column={col}
|
||||
route={r}
|
||||
displayReasons={this.props.displayReasons} />);
|
||||
}
|
||||
)}
|
||||
</tr>
|
||||
);
|
||||
});
|
||||
|
||||
return (
|
||||
<table className="table table-striped table-routes">
|
||||
<thead>
|
||||
<tr>
|
||||
{routesColumnsOrder.map(col => <th key={col}>{routesColumns[col]}</th>)}
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{routesView}
|
||||
</tbody>
|
||||
</table>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export default connect(
|
||||
(state) => ({
|
||||
blackholesMap: state.config.blackholes,
|
||||
routesColumns: state.config.lookup_columns,
|
||||
routesColumnsOrder: state.config.lookup_columns_order,
|
||||
})
|
||||
)(LookupRoutesTable);
|
||||
|
||||
|
@ -1,70 +0,0 @@
|
||||
|
||||
/**
|
||||
* Bootstrap Modal React Component
|
||||
*
|
||||
* @author Matthias Hannig <mha@ecix.net>
|
||||
*/
|
||||
|
||||
import React from 'react'
|
||||
|
||||
export class Header extends React.Component {
|
||||
render() {
|
||||
return(
|
||||
<div className="modal-header">
|
||||
<button type="button"
|
||||
className="close"
|
||||
aria-label="Close"
|
||||
onClick={this.props.onClickClose}>
|
||||
<span aria-hidden="true">×</span></button>
|
||||
|
||||
{this.props.children}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export class Body extends React.Component {
|
||||
render() {
|
||||
return (
|
||||
<div className="modal-body">
|
||||
{this.props.children}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export class Footer extends React.Component {
|
||||
render() {
|
||||
return(
|
||||
<div className="modal-footer">
|
||||
{this.props.children}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export default class Modal extends React.Component {
|
||||
render() {
|
||||
if(!this.props.show) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={this.props.className}>
|
||||
<div className="modal modal-open modal-show fade in" role="dialog">
|
||||
<div className="modal-dialog" role="document">
|
||||
<div className="modal-content">
|
||||
{this.props.children}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="modal-backdrop fade in"
|
||||
onClick={this.props.onClickBackdrop}></div>
|
||||
</div>
|
||||
);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,12 +0,0 @@
|
||||
|
||||
|
||||
import { combineReducers } from 'redux'
|
||||
|
||||
import bgpAttributesModalReducer
|
||||
from 'components/routeservers/routes/bgp-attributes-modal-reducer'
|
||||
|
||||
export default combineReducers({
|
||||
bgpAttributes: bgpAttributesModalReducer
|
||||
});
|
||||
|
||||
|
@ -1,14 +0,0 @@
|
||||
|
||||
import React from 'react'
|
||||
|
||||
|
||||
export default class PageHeader extends React.Component {
|
||||
render() {
|
||||
return (
|
||||
<div className="page-header">
|
||||
{this.props.children}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@ -1,156 +0,0 @@
|
||||
|
||||
/**
|
||||
* Routeservers Actions
|
||||
*/
|
||||
|
||||
import axios from 'axios'
|
||||
|
||||
import {apiError} from 'components/errors/actions'
|
||||
|
||||
export const LOAD_ROUTESERVERS_REQUEST = '@routeservers/LOAD_ROUTESERVERS_REQUEST';
|
||||
export const LOAD_ROUTESERVERS_SUCCESS = '@routeservers/LOAD_ROUTESERVERS_SUCCESS';
|
||||
export const LOAD_ROUTESERVERS_ERROR = '@routeservers/LOAD_ROUTESERVERS_ERROR';
|
||||
|
||||
export const LOAD_ROUTESERVER_STATUS_REQUEST = '@routeservers/LOAD_ROUTESERVER_STATUS_REQUEST';
|
||||
export const LOAD_ROUTESERVER_STATUS_SUCCESS = '@routeservers/LOAD_ROUTESERVER_STATUS_SUCCESS';
|
||||
export const LOAD_ROUTESERVER_STATUS_ERROR = '@routeservers/LOAD_ROUTESERVER_STATUS_ERROR';
|
||||
|
||||
export const LOAD_ROUTESERVER_PROTOCOL_REQUEST = '@routeservers/LOAD_ROUTESERVER_PROTOCOL_REQUEST';
|
||||
export const LOAD_ROUTESERVER_PROTOCOL_SUCCESS = '@routeservers/LOAD_ROUTESERVER_PROTOCOL_SUCCESS';
|
||||
export const LOAD_ROUTESERVER_PROTOCOL_ERROR = '@routeservers/LOAD_ROUTESERVER_PROTOCOL_ERROR';
|
||||
|
||||
export const SELECT_GROUP = "@routeservers/SELECT_GROUP";
|
||||
|
||||
|
||||
// Action Creators
|
||||
export function loadRouteserversRequest() {
|
||||
return {
|
||||
type: LOAD_ROUTESERVERS_REQUEST
|
||||
}
|
||||
}
|
||||
|
||||
export function loadRouteserversSuccess(routeservers) {
|
||||
return {
|
||||
type: LOAD_ROUTESERVERS_SUCCESS,
|
||||
payload: {
|
||||
routeservers: routeservers
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function loadRouteserversError(error) {
|
||||
return {
|
||||
type: LOAD_ROUTESERVERS_ERROR,
|
||||
payload: {
|
||||
error: error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function loadRouteservers() {
|
||||
return (dispatch) => {
|
||||
dispatch(loadRouteserversRequest())
|
||||
|
||||
axios.get('/api/v1/routeservers')
|
||||
.then(
|
||||
({data}) => {
|
||||
dispatch(loadRouteserversSuccess(data["routeservers"]));
|
||||
},
|
||||
(error) => {
|
||||
dispatch(apiError(error));
|
||||
dispatch(loadRouteserversError(error.data));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
export function loadRouteserverStatusRequest(routeserverId) {
|
||||
return {
|
||||
type: LOAD_ROUTESERVER_STATUS_REQUEST,
|
||||
payload: {
|
||||
routeserverId: routeserverId
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function loadRouteserverStatusSuccess(routeserverId, status) {
|
||||
return {
|
||||
type: LOAD_ROUTESERVER_STATUS_SUCCESS,
|
||||
payload: {
|
||||
status: status,
|
||||
routeserverId: routeserverId
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function loadRouteserverStatusError(routeserverId, error) {
|
||||
return {
|
||||
type: LOAD_ROUTESERVER_STATUS_ERROR,
|
||||
payload: {
|
||||
error: error,
|
||||
routeserverId: routeserverId
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function loadRouteserverStatus(routeserverId) {
|
||||
return (dispatch) => {
|
||||
dispatch(loadRouteserverStatusRequest(routeserverId));
|
||||
axios.get(`/api/v1/routeservers/${routeserverId}/status`)
|
||||
.then(
|
||||
({data}) => {
|
||||
dispatch(loadRouteserverStatusSuccess(routeserverId, data.status));
|
||||
},
|
||||
(error) => {
|
||||
dispatch(apiError(error));
|
||||
dispatch(loadRouteserverStatusError(routeserverId, error));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export function loadRouteserverProtocolRequest(routeserverId) {
|
||||
return {
|
||||
type: LOAD_ROUTESERVER_PROTOCOL_REQUEST,
|
||||
payload: {
|
||||
routeserverId: routeserverId,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function loadRouteserverProtocolSuccess(routeserverId, protocol, api) {
|
||||
return {
|
||||
type: LOAD_ROUTESERVER_PROTOCOL_SUCCESS,
|
||||
payload: {
|
||||
routeserverId: routeserverId,
|
||||
protocol: protocol,
|
||||
api: api
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function loadRouteserverProtocol(routeserverId) {
|
||||
return (dispatch) => {
|
||||
dispatch(loadRouteserverProtocolRequest(routeserverId));
|
||||
axios.get(`/api/v1/routeservers/${routeserverId}/neighbors`)
|
||||
.then(
|
||||
({data}) => {
|
||||
dispatch(loadRouteserverProtocolSuccess(
|
||||
routeserverId,
|
||||
data.neighbours,
|
||||
data.api,
|
||||
));
|
||||
},
|
||||
(error) => dispatch(apiError(error)));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export function selectGroup(group) {
|
||||
return {
|
||||
type: SELECT_GROUP,
|
||||
payload: group,
|
||||
}
|
||||
}
|
||||
|
@ -1,41 +0,0 @@
|
||||
|
||||
import React from 'react'
|
||||
import {connect} from 'react-redux'
|
||||
|
||||
import {resolveCommunities} from './utils'
|
||||
|
||||
class FilterReason extends React.Component {
|
||||
render() {
|
||||
const route = this.props.route;
|
||||
|
||||
if (!this.props.rejectReasons || !route || !route.bgp ||
|
||||
!route.bgp.large_communities) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const reasons = resolveCommunities(
|
||||
this.props.rejectReasons, route.bgp.large_communities,
|
||||
);
|
||||
|
||||
const reasonsView = reasons.map(([community, reason], key) => {
|
||||
const cls = `reject-reason reject-reason-${community[1]}-${community[2]}`;
|
||||
return (
|
||||
<p key={key} className={cls}>
|
||||
<a href={`http://irrexplorer.nlnog.net/search/${route.network}`}
|
||||
target="_blank" >{reason}</a>
|
||||
</p>
|
||||
);
|
||||
});
|
||||
|
||||
return (<div className="reject-reasons">{reasonsView}</div>);
|
||||
}
|
||||
}
|
||||
|
||||
export default connect(
|
||||
state => {
|
||||
return {
|
||||
rejectReasons: state.routeservers.rejectReasons,
|
||||
}
|
||||
}
|
||||
)(FilterReason);
|
||||
|
@ -1,53 +0,0 @@
|
||||
|
||||
import React from 'react'
|
||||
import {connect} from 'react-redux'
|
||||
|
||||
import {makeReadableCommunity} from './utils'
|
||||
|
||||
/*
|
||||
* Make style tags
|
||||
* Derive classes from community parts.
|
||||
*/
|
||||
function _makeStyleTags(community) {
|
||||
return community.map((part, i) => {
|
||||
return `label-bgp-community-${i}-${part}`;
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Render community label
|
||||
*/
|
||||
class Label extends React.Component {
|
||||
render() {
|
||||
// Lookup communities
|
||||
const readableCommunity = makeReadableCommunity(
|
||||
this.props.communities,
|
||||
this.props.community);
|
||||
const key = this.props.community.join(":");
|
||||
|
||||
let cls = 'label label-bgp-community ';
|
||||
if (!readableCommunity) {
|
||||
cls += "label-bgp-unknown";
|
||||
// Default label
|
||||
return (
|
||||
<span className={cls}>{key}</span>
|
||||
);
|
||||
}
|
||||
|
||||
// Apply style
|
||||
cls += "label-info ";
|
||||
|
||||
const styleTags = _makeStyleTags(this.props.community);
|
||||
cls += styleTags.join(" ");
|
||||
|
||||
return (<span className={cls}>{readableCommunity} ({key})</span>);
|
||||
}
|
||||
}
|
||||
|
||||
export default connect(
|
||||
(state) => ({
|
||||
communities: state.config.bgp_communities,
|
||||
})
|
||||
)(Label);
|
||||
|
@ -1,41 +0,0 @@
|
||||
|
||||
import React from 'react'
|
||||
import {connect} from 'react-redux'
|
||||
|
||||
import {resolveCommunities} from './utils'
|
||||
|
||||
class NoExportReason extends React.Component {
|
||||
render() {
|
||||
const route = this.props.route;
|
||||
|
||||
if (!this.props.noexportReasons || !route || !route.bgp ||
|
||||
!route.bgp.large_communities) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const reasons = resolveCommunities(
|
||||
this.props.noexportReasons, route.bgp.large_communities,
|
||||
);
|
||||
|
||||
const reasonsView = reasons.map(([community, reason], key) => {
|
||||
const cls = `noexport-reason noexport-reason-${community[1]}-${community[2]}`;
|
||||
return (
|
||||
<p key={key} className={cls}>
|
||||
<a href={`http://irrexplorer.nlnog.net/search/${route.network}`}
|
||||
target="_blank" >{reason}</a>
|
||||
</p>
|
||||
);
|
||||
});
|
||||
|
||||
return (<div className="reject-reasons">{reasonsView}</div>);
|
||||
}
|
||||
}
|
||||
|
||||
export default connect(
|
||||
state => {
|
||||
return {
|
||||
noexportReasons: state.routeservers.noexportReasons,
|
||||
}
|
||||
}
|
||||
)(NoExportReason);
|
||||
|
@ -1,102 +0,0 @@
|
||||
|
||||
/*
|
||||
* Communities helper
|
||||
*/
|
||||
|
||||
/*
|
||||
* Communities are represented as a nested object:
|
||||
* {
|
||||
* 1234: {
|
||||
* 23: "community-leaf",
|
||||
* 42: {
|
||||
* 1: "large-community-leaf"
|
||||
* }
|
||||
* }
|
||||
*/
|
||||
|
||||
/*
|
||||
* Resolve a community description from the above described
|
||||
* tree structure.
|
||||
*/
|
||||
export function resolveCommunity(base, community) {
|
||||
let lookup = base;
|
||||
for (const part of community) {
|
||||
if (typeof(lookup) !== "object") {
|
||||
return null;
|
||||
}
|
||||
let res = lookup[part];
|
||||
if (!res) {
|
||||
// Try the wildcard
|
||||
if (lookup["*"]) {
|
||||
res = lookup["*"]
|
||||
} else {
|
||||
return null; // We did everything we could
|
||||
}
|
||||
}
|
||||
lookup = res;
|
||||
}
|
||||
return lookup;
|
||||
}
|
||||
|
||||
/*
|
||||
* Resolve all communities
|
||||
*/
|
||||
export function resolveCommunities(base, communities) {
|
||||
let results = [];
|
||||
for (const c of communities) {
|
||||
const description = resolveCommunity(base, c);
|
||||
if (description != null) {
|
||||
results.push([c, description]);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Reject candidate helpers:
|
||||
*
|
||||
* - check if prefix is a reject candidate
|
||||
* - make css classes
|
||||
*/
|
||||
|
||||
export function isRejectCandidate(rejectCommunities, route) {
|
||||
// Check if any reject candidate community is set
|
||||
const communities = route.bgp.communities;
|
||||
const largeCommunities = route.bgp.large_communities;
|
||||
|
||||
const resolved = resolveCommunities(
|
||||
rejectCommunities, largeCommunities
|
||||
);
|
||||
|
||||
return (resolved.length > 0);
|
||||
}
|
||||
|
||||
/*
|
||||
* Expand variables in string:
|
||||
* "Test AS$0 rejects $2"
|
||||
* will expand with [23, 42, 123] to
|
||||
* "Test AS23 rejects 123"
|
||||
*/
|
||||
export function expandVars(str, vars) {
|
||||
if (!str) {
|
||||
return str; // We don't have to do anything.
|
||||
}
|
||||
|
||||
var res = str;
|
||||
vars.map((v, i) => {
|
||||
res = res.replace(`$${i}`, v);
|
||||
});
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
export function makeReadableCommunity(communities, community) {
|
||||
const label = resolveCommunity(communities, community);
|
||||
return expandVars(label, community);
|
||||
}
|
||||
|
||||
export function communityRepr(community) {
|
||||
return community.join(":");
|
||||
}
|
||||
|
@ -1,32 +0,0 @@
|
||||
|
||||
import React from 'react'
|
||||
import {connect} from 'react-redux'
|
||||
|
||||
class Details extends React.Component {
|
||||
render() {
|
||||
let rsStatus = this.props.details[this.props.routeserverId];
|
||||
if (!rsStatus) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Get routeserver name
|
||||
let rs = this.props.routeservers[this.props.routeserverId];
|
||||
if (!rs) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<span className="status-name">{rs.name}</span>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export default connect(
|
||||
(state) => {
|
||||
return {
|
||||
routeservers: state.routeservers.byId,
|
||||
details: state.routeservers.details
|
||||
}
|
||||
}
|
||||
)(Details);
|
||||
|
@ -1,92 +0,0 @@
|
||||
|
||||
import {debounce} from 'underscore'
|
||||
|
||||
import React from 'react'
|
||||
import {connect} from 'react-redux'
|
||||
import {replace} from 'react-router-redux'
|
||||
|
||||
import PageHeader from 'components/page-header'
|
||||
import Details from './details'
|
||||
import Status from './status'
|
||||
|
||||
import SearchInput from 'components/search-input'
|
||||
|
||||
import Protocols from './protocols'
|
||||
import QuickLinks from './protocols/quick-links'
|
||||
|
||||
import {setFilterValue} from './protocols/actions'
|
||||
import {makeQueryLinkProps} from './protocols/routing'
|
||||
|
||||
|
||||
class RouteserversPage extends React.Component {
|
||||
|
||||
constructor(props) {
|
||||
super(props);
|
||||
this.dispatchDebounced = debounce(this.props.dispatch, 350);
|
||||
}
|
||||
|
||||
|
||||
setFilter(value) {
|
||||
// Set filter value (for input rendering)
|
||||
this.props.dispatch(setFilterValue(value));
|
||||
|
||||
// Update location delayed
|
||||
this.dispatchDebounced(replace(
|
||||
makeQueryLinkProps(
|
||||
this.props.routing,
|
||||
value,
|
||||
this.props.sortColumn,
|
||||
this.props.sortOrder)));
|
||||
}
|
||||
|
||||
render() {
|
||||
return(
|
||||
<div className="routeservers-page">
|
||||
<PageHeader>
|
||||
<Details routeserverId={this.props.params.routeserverId} />
|
||||
</PageHeader>
|
||||
|
||||
<div className="row details-main">
|
||||
<div className="col-main col-lg-9 col-md-12">
|
||||
<div className="card">
|
||||
<SearchInput
|
||||
value={this.props.filterValue}
|
||||
placeholder="Filter by Neighbor, ASN or Description"
|
||||
onChange={(e) => this.setFilter(e.target.value)}
|
||||
/>
|
||||
</div>
|
||||
<QuickLinks />
|
||||
|
||||
<Protocols protocol="bgp" routeserverId={this.props.params.routeserverId} />
|
||||
</div>
|
||||
<div className="col-lg-3 col-md-12 col-aside-details">
|
||||
<div className="card">
|
||||
<Status routeserverId={this.props.params.routeserverId}
|
||||
cacheStatus={this.props.cacheStatus} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export default connect(
|
||||
(state) => {
|
||||
return {
|
||||
routing: state.routing.locationBeforeTransitions,
|
||||
|
||||
filterValue: state.neighbors.filterValue,
|
||||
sortColumn: state.neighbors.sortColumn,
|
||||
sortOrder: state.neighbors.sortOrder,
|
||||
|
||||
cacheStatus: {
|
||||
generatedAt: state.neighbors.cachedAt,
|
||||
ttl: state.neighbors.cacheTtl,
|
||||
}
|
||||
|
||||
};
|
||||
}
|
||||
)(RouteserversPage);
|
||||
|
||||
|
@ -1,15 +0,0 @@
|
||||
|
||||
|
||||
// Actions
|
||||
export const SET_FILTER_VALUE = "@neighbors/SET_FILTER_VALUE";
|
||||
|
||||
// Action Creators: Set Filter Query
|
||||
export function setFilterValue(value) {
|
||||
return {
|
||||
type: SET_FILTER_VALUE,
|
||||
payload: {
|
||||
value: value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user