Compare commits

...

11 Commits

Author SHA1 Message Date
9b6fc699f2 Added push_docker script
All checks were successful
checks-impure / test (pull_request) Successful in 28s
checks / test (pull_request) Successful in 3m47s
2024-01-24 22:41:29 +01:00
d232510c0e Added build_docker.sh script 2024-01-24 22:41:29 +01:00
dcaecba393 Fixed incorrect grouping, in eventmessages 2024-01-24 22:41:03 +01:00
7901712c4c update ui-assets.nix
All checks were successful
checks-impure / test (push) Has been skipped
checks / test (push) Has been skipped
assets1 / test (push) Has been skipped
2024-01-24 21:26:10 +00:00
Erdem-Arslan
48df7352da Merge pull request 'dynamic-routing-sidebar' (#67) from dynamic-routing-sidebar into main
Some checks failed
checks-impure / test (push) Successful in 26s
checks / test (push) Successful in 1m14s
assets1 / test (push) Has been cancelled
Reviewed-on: #67
2024-01-24 22:23:50 +01:00
Arslan, Erdem
c5c4ab7178 remove variable index within mapping
All checks were successful
checks-impure / test (pull_request) Successful in 26s
checks / test (pull_request) Successful in 3m10s
2024-01-24 22:19:46 +01:00
Arslan, Erdem
22bcbf6819 fix formatting
Some checks failed
checks-impure / test (pull_request) Successful in 27s
checks / test (pull_request) Failing after 1m51s
2024-01-24 22:14:19 +01:00
Arslan, Erdem
2ab2282116 implement dynamic routing within the sidebar 2024-01-24 22:12:00 +01:00
1c6e33e74f Merge pull request 'Fixed wrong ordering of eventmessages' (#65) from Qubasa-main into main
All checks were successful
checks-impure / test (push) Successful in 26s
checks / test (push) Successful in 1m13s
assets1 / test (push) Successful in 22s
Reviewed-on: #65
2024-01-24 18:51:56 +01:00
1757bf1952 Fixed wrong ordering of eventmessages 2024-01-24 18:51:56 +01:00
c726e4bb41 Merge pull request 'Improved links to API doc in README' (#64) from Qubasa-main into main
All checks were successful
checks-impure / test (push) Successful in 26s
checks / test (push) Successful in 1m14s
assets1 / test (push) Successful in 22s
Reviewed-on: #64
2024-01-24 18:04:07 +01:00
10 changed files with 160 additions and 76 deletions

2
.gitignore vendored
View File

@@ -23,3 +23,5 @@ htmlcov
# georgs # georgs
pkgs/.vs/ pkgs/.vs/
pkgs/clan-cli/.hypothesis/ pkgs/clan-cli/.hypothesis/
ui-assets.tar.gz
ui-release

View File

@@ -24,9 +24,9 @@ For Entity object go to
- [tests/openapi_client/docs/ResolutionApi.md](tests/openapi_client/docs/ResolutionApi.md) - [tests/openapi_client/docs/ResolutionApi.md](tests/openapi_client/docs/ResolutionApi.md)
- [tests/openapi_client/docs/RepositoriesApi.md](tests/openapi_client/docs/RepositoriesApi.md) - [tests/openapi_client/docs/RepositoriesApi.md](tests/openapi_client/docs/RepositoriesApi.md)
# Building a Docker Image # Building a Docker Image if the Backend Changed
To build a docker image of the frontend and backend be inside the `pkgs/clan-cli` folder and execute: To build a new docker image when the backend code changed be inside the `pkgs/clan-cli` folder and execute:
```bash ```bash
nix build .#clan-docker nix build .#clan-docker
@@ -48,7 +48,22 @@ docker run -p 127.0.0.1:2979:2979 clan-docker:latest
[flake-module.nix at line 22](flake-module.nix) [flake-module.nix at line 22](flake-module.nix)
- Documentation on `dockerTools.buildImage` you can find here: https://nix.dev/tutorials/nixos/building-and-running-docker-images.html - Documentation on `dockerTools.buildImage` you can find here: https://nix.dev/tutorials/nixos/building-and-running-docker-images.html
## Docker build with UI changes ## Building a Docker Image if the Frontend Changed
To build a new docker image when the frontend code changed you first need
to get the `GITLAB_TOKEN` go to [repo access tokens](https://git.tu-berlin.de/internet-of-services-lab/service-aware-network-front-end/-/settings/access_tokens) and generate one. Then execute
```bash
export GITLAB_TOKEN="<your-access-token>"
```
Afterwards you can execute:
```bash
./build_docker.sh
```
### The Script Explained
If changes to the UI have been made, and you want them to propagate to the docker container edit the file: [../ui/nix/ui-assets.nix](../ui/nix/ui-assets.nix). If changes to the UI have been made, and you want them to propagate to the docker container edit the file: [../ui/nix/ui-assets.nix](../ui/nix/ui-assets.nix).
This is where a release version of the frontend is downloaded and integrated into the cli and the docker build. To do this first execute This is where a release version of the frontend is downloaded and integrated into the cli and the docker build. To do this first execute
@@ -63,11 +78,7 @@ Make a tarball out of it called `ui-assets.tar.gz`
tar -czvf ui-assets.tar.gz ui-release/lib/node_modules/clan-ui/out/ tar -czvf ui-assets.tar.gz ui-release/lib/node_modules/clan-ui/out/
``` ```
Upload ui-assets.tar.gz to gitlab. To get the `GITLAB_TOKEN` go to [repo access tokens](https://git.tu-berlin.de/internet-of-services-lab/service-aware-network-front-end/-/settings/access_tokens) and generate one. Then execute Upload ui-assets.tar.gz to gitlab.
```bash
export GITLAB_TOKEN="<your-access-token>"
```
```bash ```bash
curl --header "PRIVATE-TOKEN: $GITLAB_TOKEN" \ curl --header "PRIVATE-TOKEN: $GITLAB_TOKEN" \
@@ -103,6 +114,12 @@ nix build .#clan-docker
# Uploading a Docker Image # Uploading a Docker Image
You can use the script:
```bash
./push_docker.sh
```
Login to the tu docker image server Login to the tu docker image server
```bash ```bash
@@ -121,6 +138,8 @@ Push the image to the git registry
docker image push git.tu-berlin.de:5000/internet-of-services-lab/service-aware-network-front-end:latest docker image push git.tu-berlin.de:5000/internet-of-services-lab/service-aware-network-front-end:latest
``` ```
# Using the Uploaded Docker Image
Pull the image Pull the image
```bash ```bash

42
pkgs/clan-cli/build_docker.sh Executable file
View File

@@ -0,0 +1,42 @@
#!/usr/bin/env bash
# shellcheck shell=bash
set -euo pipefail
# GITLAB_TOKEN
if [[ -z "${GITLAB_TOKEN:-}" ]]; then
cat <<EOF
GITLAB_TOKEN environment var is not set. Please generate a new token under
https://git.tu-berlin.de/internet-of-services-lab/service-aware-network-front-end/-/settings/access_tokens
EOF
exit 1
fi
# Create a new ui build
nix build .#ui --out-link ui-release
tar -czvf ui-assets.tar.gz ui-release/lib/node_modules/clan-ui/out/
# upload ui assets to gitlab
gitlab_base="https://git.tu-berlin.de/api/v4/projects/internet-of-services-lab%2Fservice-aware-network-front-end"
curl --header "PRIVATE-TOKEN: $GITLAB_TOKEN" \
--upload-file ./ui-assets.tar.gz \
"$gitlab_base/packages/generic/ui-assets/1.0.0/ui-assets.tar.gz"
# write url and hash to ui-assets.nix
url="$gitlab_base/packages/generic/ui-assets/1.0.0/ui-assets.tar.gz"
PROJECT_DIR=$(git rev-parse --show-toplevel)
cat > "$PROJECT_DIR/pkgs/ui/nix/ui-assets.nix" <<EOF
{ fetchzip }:
fetchzip {
url = "$url";
sha256 = "$(nix-prefetch-url --unpack $url)";
}
EOF
cat <<EOF
Please commit the changes to ui-assets.nix and push them to the repository.
If you want clan webui to use the new ui assets.
$ git commit -m "Update ui-assets.nix" "$PROJECT_DIR/pkgs/ui/nix/ui-assets.nix"
$ git push
EOF

View File

@@ -1,11 +1,13 @@
import json
import logging import logging
import time import time
import typing import typing
from collections import OrderedDict
from typing import Any, List, Optional from typing import Any, List, Optional
import httpx import httpx
from fastapi import APIRouter, BackgroundTasks, Depends, Query from fastapi import APIRouter, BackgroundTasks, Depends, Query
from fastapi.responses import HTMLResponse, JSONResponse from fastapi.responses import HTMLResponse, PlainTextResponse
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from clan_cli.config import ap_url, c1_url, c2_url, dlg_url, group_type_to_label from clan_cli.config import ap_url, c1_url, c2_url, dlg_url, group_type_to_label
@@ -360,19 +362,24 @@ def create_eventmessage(
@typing.no_type_check @typing.no_type_check
@router.get( @router.get(
"/api/v1/event_messages", "/api/v1/event_messages",
response_class=JSONResponse, response_class=PlainTextResponse,
tags=[Tags.eventmessages], tags=[Tags.eventmessages],
) )
def get_all_eventmessages( def get_all_eventmessages(
skip: int = 0, limit: int = 100, db: Session = Depends(sql_db.get_db) skip: int = 0, limit: int = 100, db: Session = Depends(sql_db.get_db)
) -> JSONResponse: ) -> PlainTextResponse:
# SQL sorts eventmessages by timestamp, so we don't need to sort them here
eventmessages = sql_crud.get_eventmessages(db, skip=skip, limit=limit) eventmessages = sql_crud.get_eventmessages(db, skip=skip, limit=limit)
result: dict[int, dict[int, List[Eventmessage]]] = {} cresult: List[OrderedDict[int, OrderedDict[int, List[Eventmessage]]]] = []
for msg in eventmessages: cresult_idx = 0
cresult.append(OrderedDict())
for idx, msg in enumerate(eventmessages):
# Use the group_type_to_label from config.py to get the group name and msg_type name # Use the group_type_to_label from config.py to get the group name and msg_type name
group = group_type_to_label.get(msg.group, None) group = group_type_to_label.get(msg.group, None)
group_name = group.get("name", None) if group is not None else str(msg.group) group_name = (
str(group.get("name", None)) if group is not None else str(msg.group)
)
msg_type_name = ( msg_type_name = (
group.get(msg.msg_type, None) if group is not None else str(msg.msg_type) group.get(msg.msg_type, None) if group is not None else str(msg.msg_type)
) )
@@ -383,14 +390,26 @@ def get_all_eventmessages(
des_name = sql_crud.get_entity_by_did(db, msg.des_did) des_name = sql_crud.get_entity_by_did(db, msg.des_did)
des_name = des_name if des_name is None else des_name.name des_name = des_name if des_name is None else des_name.name
result = cresult[cresult_idx]
if result.get("group_name") is None:
# Initialize the result array and dictionary # Initialize the result array and dictionary
if result.get(group_name) is None: result["group_name"] = group_name
result[group_name] = {} elif result["group_name"] != group_name:
if result[group_name].get(msg.group_id) is None: # If the group name changed, create a new result array and dictionary
result[group_name][msg.group_id] = [] cresult_idx += 1
cresult.append(OrderedDict())
result = cresult[cresult_idx]
result["group_name"] = group_name
if result.get("groups") is None:
result["groups"] = OrderedDict()
if result["groups"].get(msg.group_id) is None:
result["groups"][msg.group_id] = []
# Append the eventmessage to the result array # Append the eventmessage to the result array
result_arr = result[group_name][msg.group_id] result_arr = result["groups"][msg.group_id]
result_arr.append( result_arr.append(
Eventmessage( Eventmessage(
id=msg.id, id=msg.id,
@@ -408,9 +427,7 @@ def get_all_eventmessages(
).dict() ).dict()
) )
# sort by timestamp return PlainTextResponse(content=json.dumps(cresult, indent=4), status_code=200)
result_arr.sort(key=lambda x: x["timestamp"])
return JSONResponse(content=result, status_code=200)
############################## ##############################

View File

@@ -1,7 +1,7 @@
# Imports # Imports
from typing import List, Optional from typing import List, Optional
from sqlalchemy import func from sqlalchemy import asc, func
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from sqlalchemy.sql.expression import true from sqlalchemy.sql.expression import true
@@ -319,4 +319,11 @@ def create_eventmessage(
def get_eventmessages( def get_eventmessages(
db: Session, skip: int = 0, limit: int = 100 db: Session, skip: int = 0, limit: int = 100
) -> List[sql_models.Eventmessage]: ) -> List[sql_models.Eventmessage]:
return db.query(sql_models.Eventmessage).offset(skip).limit(limit).all() # Use order_by and desc to sort by timestamp
return (
db.query(sql_models.Eventmessage)
.order_by(asc(sql_models.Eventmessage.timestamp))
.offset(skip)
.limit(limit)
.all()
)

View File

@@ -101,7 +101,7 @@ class Eventmessage(Base):
## Queryable body ## ## Queryable body ##
# Primary Key # Primary Key
id = Column(Integer, primary_key=True, autoincrement=True) id = Column(Integer, primary_key=True, autoincrement=True)
timestamp = Column(Integer, unique=True, index=True) timestamp = Column(Integer, index=True)
group = Column(Integer, index=True) group = Column(Integer, index=True)
group_id = Column(Integer, index=True) group_id = Column(Integer, index=True)
msg_type = Column(Integer, index=True) # message type for the label msg_type = Column(Integer, index=True) # message type for the label

9
pkgs/clan-cli/push_docker.sh Executable file
View File

@@ -0,0 +1,9 @@
#!/usr/bin/env bash
# shellcheck shell=bash
set -euo pipefail
docker login git.tu-berlin.de:5000
docker load < result
docker image tag clan-docker:latest git.tu-berlin.de:5000/internet-of-services-lab/service-aware-network-front-end:latest
docker image push git.tu-berlin.de:5000/internet-of-services-lab/service-aware-network-front-end:latest

View File

@@ -122,49 +122,50 @@ def test_create_services(api_client: ApiClient) -> None:
random.seed(77) random.seed(77)
def create_eventmessages(num: int = 2) -> list[EventmessageCreate]: def create_eventmessages(num: int = 4) -> list[EventmessageCreate]:
res = [] res = []
starttime = int(time.time()) starttime = int(time.time())
for i in range(num): for idx in range(num):
group_id = i % 5 + random.getrandbits(6) i2 = idx + 1
group_id = i2 % 5 + random.getrandbits(6) + 1
em_req_send = EventmessageCreate( em_req_send = EventmessageCreate(
timestamp=starttime + i * 10, timestamp=starttime + i2 * 10,
group=i % 5, group=i2 % 5,
group_id=group_id, group_id=group_id,
msg_type=1, msg_type=1,
src_did=f"did:sov:test:12{i}", src_did=f"did:sov:test:12{i2}",
des_did=f"did:sov:test:12{i+1}", des_did=f"did:sov:test:12{i2+1}",
msg={}, msg={},
) )
res.append(em_req_send) res.append(em_req_send)
em_req_rec = EventmessageCreate( em_req_rec = EventmessageCreate(
timestamp=starttime + (i * 10) + 2, timestamp=starttime + (i2 * 10) + 2,
group=i % 5, group=i2 % 5,
group_id=group_id, group_id=group_id,
msg_type=2, msg_type=2,
src_did=f"did:sov:test:12{i}", src_did=f"did:sov:test:12{i2}",
des_did=f"did:sov:test:12{i+1}", des_did=f"did:sov:test:12{i2+1}",
msg={}, msg={},
) )
res.append(em_req_rec) res.append(em_req_rec)
group_id = i % 5 + random.getrandbits(6) group_id = i2 % 5 + random.getrandbits(6)
em_res_send = EventmessageCreate( em_res_send = EventmessageCreate(
timestamp=starttime + i * 10 + 4, timestamp=starttime + i2 * 10 + 4,
group=i % 5, group=i2 % 5,
group_id=group_id, group_id=group_id,
msg_type=3, msg_type=3,
src_did=f"did:sov:test:12{i+1}", src_did=f"did:sov:test:12{i2+1}",
des_did=f"did:sov:test:12{i}", des_did=f"did:sov:test:12{i2}",
msg={}, msg={},
) )
res.append(em_res_send) res.append(em_res_send)
em_res_rec = EventmessageCreate( em_res_rec = EventmessageCreate(
timestamp=starttime + (i * 10) + 8, timestamp=starttime + (i2 * 10) + 8,
group=i % 5, group=i2 % 5,
group_id=group_id, group_id=group_id,
msg_type=4, msg_type=4,
src_did=f"did:sov:test:12{i+1}", src_did=f"did:sov:test:12{i2+1}",
des_did=f"did:sov:test:12{i}", des_did=f"did:sov:test:12{i2}",
msg={}, msg={},
) )
res.append(em_res_rec) res.append(em_res_rec)

View File

@@ -1,5 +1,5 @@
{ fetchzip }: { fetchzip }:
fetchzip { fetchzip {
url = "https://gitea.gchq.icu/api/packages/IoSL/generic/IoSL-service-aware-frontend/12ndzp04vy7xmqk90gakb4igy2qjf1pcfmr94r2cmpjrkkljdgbi/assets.tar.gz"; url = "https://gitea.gchq.icu/api/packages/IoSL/generic/IoSL-service-aware-frontend/0p1dw924f4sdkq26fd3rrb9qmryl84hdn5plz9ds105xv6al4ikf/assets.tar.gz";
sha256 = "12ndzp04vy7xmqk90gakb4igy2qjf1pcfmr94r2cmpjrkkljdgbi"; sha256 = "0p1dw924f4sdkq26fd3rrb9qmryl84hdn5plz9ds105xv6al4ikf";
} }

View File

@@ -9,6 +9,7 @@ import {
Tooltip, Tooltip,
useMediaQuery, useMediaQuery,
} from "@mui/material"; } from "@mui/material";
import { useGetAllEntities } from "@/api/entities/entities";
import Image from "next/image"; import Image from "next/image";
import React, { ReactNode } from "react"; import React, { ReactNode } from "react";
@@ -33,32 +34,7 @@ type MenuEntry = {
subMenuEntries?: MenuEntry[]; subMenuEntries?: MenuEntry[];
}; };
export const menuEntityEntries: MenuEntry[] = [ export let menuEntityEntries: MenuEntry[] = [];
{
icon: <PersonIcon />,
label: "C1",
to: "/client/C1",
disabled: false,
},
{
icon: <PersonIcon />,
label: "C2",
to: "/client/C2",
disabled: false,
},
{
icon: <PersonIcon />,
label: "C3",
to: "/client/C3",
disabled: false,
},
{
icon: <PersonIcon />,
label: "C4",
to: "/client/C4",
disabled: false,
},
];
export const menuEntries: MenuEntry[] = [ export const menuEntries: MenuEntry[] = [
{ {
@@ -96,6 +72,7 @@ interface SidebarProps {
} }
export function Sidebar(props: SidebarProps) { export function Sidebar(props: SidebarProps) {
const { data: entityData } = useGetAllEntities();
const { show, onClose } = props; const { show, onClose } = props;
const [activeMenuItem, setActiveMenuItem] = React.useState( const [activeMenuItem, setActiveMenuItem] = React.useState(
typeof window !== "undefined" ? window.location.pathname : "", typeof window !== "undefined" ? window.location.pathname : "",
@@ -113,12 +90,22 @@ export function Sidebar(props: SidebarProps) {
}; };
React.useEffect(() => { React.useEffect(() => {
if (entityData) {
menuEntityEntries = Array.isArray(entityData.data)
? entityData.data.map((entity) => ({
icon: <PersonIcon />,
label: entity.name,
to: `/client/${entity.name}`,
disabled: false,
}))
: [];
}
if (isSmallerScreen) { if (isSmallerScreen) {
setCollapseMenuOpen(false); setCollapseMenuOpen(false);
} else { } else {
setCollapseMenuOpen(true); setCollapseMenuOpen(true);
} }
}, [isSmallerScreen]); }, [isSmallerScreen, entityData]);
return ( return (
<aside <aside
@@ -213,7 +200,7 @@ export function Sidebar(props: SidebarProps) {
unmountOnExit unmountOnExit
> >
<List component="div" disablePadding> <List component="div" disablePadding>
{menuEntityEntries.map((menuEntry, idx) => ( {menuEntityEntries?.map((menuEntry, idx) => (
<ListItemButton <ListItemButton
key={idx} key={idx}
sx={{ pl: 4 }} sx={{ pl: 4 }}