Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/test_backend.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ jobs:
- name: Checkout code
uses: actions/checkout@v3


- name: Start test docker services
run: |
make start-test-infra
Expand All @@ -42,7 +43,6 @@ jobs:
until nc -z river-localstack 4566; do sleep 1; done

- name: Run tests
uses: prefix-dev/setup-pixi@v0.8.1
env:
GH_USERNAME: ${{ secrets.GH_USERNAME }}
GH_ACCESS_TOKEN: ${{ secrets.GH_ACCESS_TOKEN }}
Expand Down
1,111 changes: 674 additions & 437 deletions LICENSE

Large diffs are not rendered by default.

65 changes: 38 additions & 27 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,21 @@ SLURM_IMAGE := nttg8100/river-slurm:$(SLURM_VERSION)
LOCALSTACK_IMAGE := localstack/localstack:latest
NODE_VERSION := 20.17.0
PYTHON_VERSION := 3.12.11
PYPY_VERSION := 3.11-v7.3.20-linux64

# DEV
## Setup the dependencies
.PHONY: dev-frontend dev-traefik dev-slurm publish-slurm start-slurm start-redis start-localstack start-dev-db migrate-dev-db start-test-db migrate-test-db format-backend dev start-dev-infra
~/.pixi/bin/pixi:
which pixi || curl -fsSL https://pixi.dev/install.sh | bash

# install pypy
.PHONY: install-backend-deps
backend/pypy${PYPY_VERSION}/bin/pypy3:
wget https://downloads.python.org/pypy/pypy${PYPY_VERSION}.tar.bz2 -O backend/pypy${PYPY_VERSION}.tar.bz2
tar xf backend/pypy${PYPY_VERSION}.tar.bz2 -C backend
cd backend && ./pypy${PYPY_VERSION}/bin/pypy3 -m ensurepip && ./pypy${PYPY_VERSION}/bin/pypy3 -mpip install -r requirements.txt

dev-frontend: ~/.pixi/bin/pixi
cd frontend && pixi run npm install -f

Expand All @@ -37,17 +45,17 @@ start-localstack:
start-dev-db:
docker compose up dev-db -d

migrate-dev-db:
cd backend && APP_ENV="dev" pixi run aerich init-db || echo "DB already existed"
cd backend && APP_ENV="dev" pixi run aerich migrate || echo "Nothing to migrate"
cd backend && APP_ENV="dev" pixi run aerich upgrade
migrate-dev-db: backend/pypy${PYPY_VERSION}/bin/pypy3
cd backend && APP_ENV="dev" ./pypy${PYPY_VERSION}/bin/pypy3 -m aerich init-db || echo "DB already existed"
cd backend && APP_ENV="dev" ./pypy${PYPY_VERSION}/bin/pypy3 -m aerich migrate || echo "Nothing to migrate"
cd backend && APP_ENV="dev" ./pypy${PYPY_VERSION}/bin/pypy3 -m aerich upgrade

start-test-db:
docker compose up test-db -d

migrate-test-db: ~/.pixi/bin/pixi
cd backend && pixi run aerich init-db || echo "DB already existed"
cd backend && pixi run aerich upgrade
migrate-test-db: backend/pypy${PYPY_VERSION}/bin/pypy3
cd backend && ./pypy${PYPY_VERSION}/bin/pypy3 -m aerich init-db || echo "DB already existed"
cd backend && ./pypy${PYPY_VERSION}/bin/pypy3 -m aerich upgrade

format-backend:
black . --line-length 120
Expand All @@ -62,11 +70,11 @@ start-dev-infra: start-slurm start-redis start-dev-db
@echo "Start infra setup"

## Start dev
start-backend: ~/.pixi/bin/pixi
cd backend && pixi run python dev.py
start-backend: backend/pypy${PYPY_VERSION}/bin/pypy3
cd backend && APP_ENV=dev ./pypy${PYPY_VERSION}/bin/pypy -m socketify app.main:app --reload

start-celery: ~/.pixi/bin/pixi
cd backend && APP_ENV=dev pixi run celery -A app.celery worker --loglevel=info
cd backend && APP_ENV=dev ./pypy${PYPY_VERSION}/bin/pypy -m celery -A app.celery worker --loglevel=info

start-frontend: ~/.pixi/bin/pixi
cd frontend && pixi run npm start
Expand All @@ -80,30 +88,33 @@ start-traefik: ~/.pixi/bin/pixi
.PHONY: start-test-infra test-auth test-health test-organization test-all
start-test-infra: start-test-db start-redis start-localstack start-slurm
@echo "Start test infra setup"

test-auth:
cd backend && pixi run pytest --cov=app/service_auth app/service_auth --cov-report=term-missing -vvvvvv

test-health:
cd backend && pixi run pytest --cov=app/service_health app/service_health --cov-report=term-missing -vvvvvv
install-test-deps: backend/pypy${PYPY_VERSION}/bin/pypy3
cd backend && ./pypy${PYPY_VERSION}/bin/pypy3 -mpip install pytest pytest-cov pytest-asyncio

test-auth: install-test-deps
cd backend && ./pypy${PYPY_VERSION}/bin/pypy3 -m pytest --cov=app/service_auth app/service_auth --cov-report=term-missing -vvvvvv

test-health: install-test-deps
cd backend && ./pypy${PYPY_VERSION}/bin/pypy3 -m pytest --cov=app/service_health app/service_health --cov-report=term-missing -vvvvvv

test-organization:
cd backend && pixi run pytest --cov=app/service_organization app/service_organization --cov-report=term-missing -vvvvvv
test-organization: install-test-deps
cd backend && ./pypy${PYPY_VERSION}/bin/pypy3 -m pytest --cov=app/service_organization app/service_organization --cov-report=term-missing -vvvvvv

test-credential:
cd backend && pixi run pytest --cov=app/service_credential app/service_credential --cov-report=term-missing -vvvvvv -k "github"
test-credential: install-test-deps
cd backend && ./pypy${PYPY_VERSION}/bin/pypy3 -m pytest --cov=app/service_credential app/service_credential --cov-report=term-missing -vvvvvv -k "github"

test-analysis:
cd backend && pixi run pytest --cov=app/service_analysis app/service_analysis --cov-report=term-missing -vvvvvv
test-analysis: install-test-deps
cd backend && ./pypy${PYPY_VERSION}/bin/pypy3 -m pytest --cov=app/service_analysis app/service_analysis --cov-report=term-missing -vvvvvv

test-project:
cd backend && pixi run pytest --cov=app/service_project app/service_project --cov-report=term-missing -vvvvvv
test-project: install-test-deps
cd backend && ./pypy${PYPY_VERSION}/bin/pypy3 -m pytest --cov=app/service_project app/service_project --cov-report=term-missing -vvvvvv

test-storage:
cd backend && pixi run pytest --cov=app/service_storage app/service_storage --cov-report=term-missing -vvvvvv
test-storage: install-test-deps
cd backend && ./pypy${PYPY_VERSION}/bin/pypy3 -m pytest --cov=app/service_storage app/service_storage --cov-report=term-missing -vvvvvv

test-job:
cd backend && pixi run pytest --cov=app/service_job app/service_job --cov-report=term-missing -vvvvvv
test-job: install-test-deps
cd backend && ./pypy${PYPY_VERSION}/bin/pypy3 -m pytest --cov=app/service_job app/service_job --cov-report=term-missing -vvvvvv

test-all: test-health test-auth test-organization test-credential test-analysis test-project
echo "Running all tests..."
Expand Down
15 changes: 6 additions & 9 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
# Rapid Integration and Visualization for Enhanced Research Platform

![RIVER Platform Overview](docs/images/graphical_abstract.png)


Rapid Integration and Visualization for Enhanced Research (RIVER) is an integrated ecosystem for data and computing, designed with a monolithic architecture using a Python backend ("backsheep") and a JavaScript frontend ("vite"). While the current structure is monolithic, it is architected for potential refactoring into microservices. For scientific applications, River aims to be lightweight and serve as a system controller—connecting data, software, and users. For quick recorded video demo, follow here: https://www.youtube.com/watch?v=boabEFNIkNA


Expand Down Expand Up @@ -179,13 +183,6 @@ Refer to the `Makefile` for additional targets and details.

## License

[![CC BY-NC-SA 4.0][cc-by-nc-sa-shield]][cc-by-nc-sa]

This work is licensed under a
[Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License][cc-by-nc-sa].

[![CC BY-NC-SA 4.0][cc-by-nc-sa-image]][cc-by-nc-sa]
This project is licensed under the GNU General Public License v3.0 - see the [LICENSE](LICENSE) file for details.

[cc-by-nc-sa]: http://creativecommons.org/licenses/by-nc-sa/4.0/
[cc-by-nc-sa-image]: https://licensebuttons.net/l/by-nc-sa/4.0/88x31.png
[cc-by-nc-sa-shield]: https://img.shields.io/badge/License-CC%20BY--NC--SA%204.0-lightgrey.svg
[![License: GPL v3](https://img.shields.io/badge/License-GPLv3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0)
5 changes: 4 additions & 1 deletion backend/.dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,7 @@ env
**/*.pyc
**/*.md
**/__pycache__
*.sqlite3*
*.sqlite3*
pypy*
.pixi
bombardier-linux-amd64
3 changes: 3 additions & 0 deletions backend/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -102,3 +102,6 @@ venv.bak/
.pytest # pixi environments
.pixi/*
!.pixi/config.toml
bombardier-linux-amd64
*tar.bz2
pypy3*
26 changes: 16 additions & 10 deletions backend/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,19 +1,25 @@
# Build the Server
FROM python:3.12.11-slim AS server_builder
FROM pypy:3.11-slim

ENV PYTHONUNBUFFERED=1
WORKDIR /app
COPY requirements.txt .
RUN pip install --upgrade pip && pip install -r requirements.txt

COPY . .
RUN apt-get update && apt-get install -y \
build-essential pkg-config \
libffi-dev libssl-dev libsodium-dev make libpq-dev \
&& rm -rf /var/lib/apt/lists/*

COPY requirements.txt .
RUN pip install --upgrade pip && pip install --no-cache-dir -r requirements.txt

# Entrypoint script runs migrations then starts server
COPY app ./app
COPY domain ./domain
COPY migrations ./migrations
COPY pyproject.toml .
COPY settings.yaml .
COPY run_server.sh ./
RUN chmod +x run_server.sh

RUN apt-get update && apt-get install -y libuv1
EXPOSE 8000

# Run as non-root in production
RUN useradd -m appuser
USER appuser

USER appuser
4 changes: 2 additions & 2 deletions backend/app/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,7 @@ async def user_aws(test_client, cookies_admin, user_aws_arn):
}
aws = await create_instance(
test_client,
f"/api/users/creds/{cred["id"]}/",
f"/api/users/creds/{cred['id']}/",
aws_data,
cookies_admin,
201,
Expand Down Expand Up @@ -311,7 +311,7 @@ async def org_aws(test_client, organization, cookies_org_manager):
}
aws = await create_instance(
test_client,
f"/api/orgs/{organization}/creds/{cred["id"]}/",
f"/api/orgs/{organization}/creds/{cred['id']}/",
aws_data,
cookies_org_manager,
201,
Expand Down
6 changes: 3 additions & 3 deletions backend/app/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,17 @@ def get_db_url():
if ENV == "prod":
logger.info("Using PostgreSQL database")
return (
f"postgres://{os.getenv('POSTGRES_USER', 'river')}:"
f"psycopg://{os.getenv('POSTGRES_USER', 'river')}:"
f"{os.getenv('POSTGRES_PASSWORD', 'password')}@"
f"{os.getenv('POSTGRES_HOST', 'localhost')}:"
f"{os.getenv('POSTGRES_PORT', '5432')}/"
f"{os.getenv('POSTGRES_DATABASE', 'river')}"
)
elif ENV == "test":
logger.info("Using PostgreSQL database for testing")
return "postgres://river:password@localhost:5434/river_test"
return "psycopg://river:password@localhost:5434/river_test"
logger.info("Using PostgreSQL database for development")
return "postgres://river:password@localhost:5433/river_dev"
return "psycopg://river:password@localhost:5433/river_dev"


MODELS = [
Expand Down
4 changes: 2 additions & 2 deletions backend/app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,8 @@ async def configure_redis(app):
JobController,
]
)

# Database
print("Failed here")
# Database 2 3
register_tortoise(
app,
config=TORTOISE_ORM,
Expand Down
2 changes: 1 addition & 1 deletion backend/app/service_analysis/controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ async def get_analysis(

return ok(
{
"items": result,
"item": result,
"page": page,
"page_size": page_size,
"total": total_count,
Expand Down
20 changes: 10 additions & 10 deletions backend/app/service_analysis/tests/test_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,16 +51,16 @@ async def test_basic_analysis(

# list instance as admin
res_admin = await list_instance(test_client, f"{BASE_URL}/", cookies_admin, 200)
assert len(res_admin["items"]) == 1
assert_instance_fields(res_admin["items"][0], EXPECTED_FIELDS)
assert len(res_admin["item"]) == 1
assert_instance_fields(res_admin["item"][0], EXPECTED_FIELDS)

# list instance as contributor
res_contrib = await list_instance(test_client, f"{BASE_URL}/", cookies_contributor, 200)
assert len(res_contrib["items"]) == 1
assert_instance_fields(res_contrib["items"][0], EXPECTED_FIELDS)
assert len(res_contrib["item"]) == 1
assert_instance_fields(res_contrib["item"][0], EXPECTED_FIELDS)

# delete as admin (should fail)
delete_url = f'{BASE_URL}/{res_admin["items"][0]["id"]}/'
delete_url = f"{BASE_URL}/{res_admin['item'][0]['id']}/"
await delete_instance(test_client, delete_url, cookies_admin, 204)

# delete as superuser (should pass)
Expand Down Expand Up @@ -93,23 +93,23 @@ async def get_page(page, page_size):
# page 1
data = await get_page(1, 10)
assert data["page"] == 1
assert len(data["items"]) == 10
assert len(data["item"]) == 10

# page 2
data = await get_page(2, 10)
assert data["page"] == 2
assert len(data["items"]) == 10
assert len(data["item"]) == 10

# last page
data = await get_page(4, 10)
assert data["page"] == 4
assert len(data["items"]) == 0
assert len(data["item"]) == 0
# too large page
data = await get_page(99, 10)
assert data["items"] == []
assert data["item"] == []

# search by url
res = await test_client.get(f"{BASE_URL}/", query={"search": "repo-1"}, cookies=cookies_admin)
data = await res.json()
assert res.status == 200
assert any("repo-1" in item["url"] for item in data["items"])
assert any("repo-1" in item["url"] for item in data["item"])
16 changes: 8 additions & 8 deletions backend/app/service_credential/tests/test_aws.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ async def test_credential_aws(
# success
aws_instance = await create_instance(
test_client,
f"{BASE_URL}/users/creds/{cred_instance_user_aws["id"]}/",
f"{BASE_URL}/users/creds/{cred_instance_user_aws['id']}/",
aws_data,
cookies=cookies_admin,
)
Expand All @@ -117,7 +117,7 @@ async def test_credential_aws(
bad_data["aws"]["endpoint_url"] = "http://localhost:8081"
instance = await create_instance(
test_client,
f"{BASE_URL}/users/creds/{cred_instance_user_aws["id"]}/",
f"{BASE_URL}/users/creds/{cred_instance_user_aws['id']}/",
bad_data,
cookies_admin,
400,
Expand Down Expand Up @@ -149,7 +149,7 @@ async def test_credential_aws(
# success
aws_instance = await create_instance(
test_client,
f"{BASE_URL}/orgs/{organization}/creds/{cred_instance_org_aws["id"]}/",
f"{BASE_URL}/orgs/{organization}/creds/{cred_instance_org_aws['id']}/",
aws_data,
cookies=cookies_org_manager,
)
Expand All @@ -168,7 +168,7 @@ async def test_credential_aws(
bad_data["aws"]["endpoint_url"] = "http://localhost:8081"
instance = await create_instance(
test_client,
f"{BASE_URL}/orgs/{organization}/creds/{cred_instance_org_aws["id"]}/",
f"{BASE_URL}/orgs/{organization}/creds/{cred_instance_org_aws['id']}/",
bad_data,
cookies_org_manager,
400,
Expand Down Expand Up @@ -254,7 +254,7 @@ async def test_credential_aws(
aws_data["aws"]["endpoint_url"] = os.environ["ENDPOINT_URL"]
aws_instance = await update_instance(
test_client,
f"{BASE_URL}/users/creds/{cred_instance_user_aws["id"]}/",
f"{BASE_URL}/users/creds/{cred_instance_user_aws['id']}/",
aws_data,
cookies=cookies_admin,
expected_status=201,
Expand Down Expand Up @@ -286,7 +286,7 @@ async def test_credential_aws(
aws_data["aws"]["endpoint_url"] = os.environ["ENDPOINT_URL"]
aws_instance = await update_instance(
test_client,
f"{BASE_URL}/orgs/{organization}/creds/{cred_instance_org_aws["id"]}/",
f"{BASE_URL}/orgs/{organization}/creds/{cred_instance_org_aws['id']}/",
aws_data,
cookies=cookies_org_manager,
expected_status=201,
Expand All @@ -303,11 +303,11 @@ async def test_credential_aws(
# Delete
await delete_instance(
test_client,
f"{BASE_URL}/users/creds/{cred_instance_user_aws["id"]}/",
f"{BASE_URL}/users/creds/{cred_instance_user_aws['id']}/",
cookies_admin,
)
await delete_instance(
test_client,
f"{BASE_URL}/orgs/{organization}/creds/{cred_instance_org_aws["id"]}/",
f"{BASE_URL}/orgs/{organization}/creds/{cred_instance_org_aws['id']}/",
cookies_org_manager,
)
Loading