Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/release.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ jobs:
- name: Install dependencies
run: |
uv sync --extra dev --extra docs --extra llm
uv run python -m ensurepip
- name: Run unit tests
run: |
uv run pytest -v tests/app --cov --cov-report=html:coverage_reports #--random-order
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ cms train --help

## Download models:

CMS runs the NLP model packaged in a ZIP file or a Gzipped tarball. To download pretrained GA models, please follow the [instructions](https://github.com/CogStack/MedCAT#available-models). Contact [Cogstack](contact@cogstack.org)
CMS runs the NLP model packaged in a ZIP file or a Gzipped tarball. To download pretrained GA models, please follow the [instructions](https://medcat.readthedocs.io/en/latest/main.html#models). Contact [Cogstack](contact@cogstack.org)
if you are interested in trying out Alpha release such as the de-identification model. To serve or train existing HuggingFace NER models, you can
package the model, either downloaded from the Hugging Face Hub or cached locally, as a ZIP or Gzipped tarball by running:
```commandline
Expand Down
2 changes: 1 addition & 1 deletion docker-compose-mlflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ services:
condition: "service_healthy"

mlflow-ui:
image: cogstacksystems/cogstack-mlflow-ui:0.1.1
image: cogstacksystems/cogstack-mlflow-ui:0.2.0
restart: always
networks:
- cogstack-model-serve_cms
Expand Down
25 changes: 16 additions & 9 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ name: cms
services:

medcat-snomed:
image: cogstacksystems/cogstack-modelserve:0.1.1
image: cogstacksystems/cogstack-modelserve:0.2.0
labels:
- org.cogstack.model-serve=medcat_snomed
- org.cogstack.model-name=SNOMED MedCAT model
Expand Down Expand Up @@ -40,6 +40,7 @@ services:
- http_proxy=$HTTP_PROXY
- https_proxy=$HTTPS_PROXY
- no_proxy=mlflow-ui,minio,graylog,auth-db,localhost
- COLUMNS=200
expose:
- 8000
ports:
Expand All @@ -52,7 +53,7 @@ services:
start_period: 60s

medcat-icd10:
image: cogstacksystems/cogstack-modelserve:0.1.1
image: cogstacksystems/cogstack-modelserve:0.2.0
labels:
- org.cogstack.model-serve=medcat_icd10
- org.cogstack.model-name=ICD-10 MedCAT model
Expand Down Expand Up @@ -87,6 +88,7 @@ services:
- http_proxy=$HTTP_PROXY
- https_proxy=$HTTPS_PROXY
- no_proxy=mlflow-ui,minio,graylog,auth-db,localhost
- COLUMNS=200
expose:
- 8000
ports:
Expand All @@ -99,7 +101,7 @@ services:
start_period: 60s

medcat-opcs4:
image: cogstacksystems/cogstack-modelserve:0.1.1
image: cogstacksystems/cogstack-modelserve:0.2.0
labels:
- org.cogstack.model-serve=medcat_opcs4
- org.cogstack.model-name=OPCS-4 MedCAT model
Expand Down Expand Up @@ -134,6 +136,7 @@ services:
- http_proxy=$HTTP_PROXY
- https_proxy=$HTTPS_PROXY
- no_proxy=mlflow-ui,minio,graylog,auth-db,localhost
- COLUMNS=200
expose:
- 8000
ports:
Expand All @@ -146,7 +149,7 @@ services:
start_period: 60s

medcat-deid:
image: cogstacksystems/cogstack-modelserve:0.1.1
image: cogstacksystems/cogstack-modelserve:0.2.0
labels:
- org.cogstack.model-serve=medcat_deid
- org.cogstack.model-name=De-Identification MedCAT model
Expand Down Expand Up @@ -181,6 +184,7 @@ services:
- http_proxy=$HTTP_PROXY
- https_proxy=$HTTPS_PROXY
- no_proxy=mlflow-ui,minio,graylog,auth-db,localhost
- COLUMNS=200
expose:
- 8000
ports:
Expand All @@ -193,7 +197,7 @@ services:
start_period: 60s

medcat-umls:
image: cogstacksystems/cogstack-modelserve:0.1.1
image: cogstacksystems/cogstack-modelserve:0.2.0
labels:
- org.cogstack.model-serve=medcat_umls
- org.cogstack.model-name=UMLS MedCAT model
Expand Down Expand Up @@ -228,6 +232,7 @@ services:
- http_proxy=$HTTP_PROXY
- https_proxy=$HTTPS_PROXY
- no_proxy=mlflow-ui,minio,graylog,auth-db,localhost
- COLUMNS=200
expose:
- 8000
ports:
Expand All @@ -240,7 +245,7 @@ services:
start_period: 60s

huggingface-ner:
image: cogstacksystems/cogstack-modelserve:0.1.1
image: cogstacksystems/cogstack-modelserve:0.2.0
labels:
- org.cogstack.model-serve=huggingface_ner
- org.cogstack.model-name=HuggingFace NER model
Expand Down Expand Up @@ -275,6 +280,7 @@ services:
- http_proxy=$HTTP_PROXY
- https_proxy=$HTTPS_PROXY
- no_proxy=mlflow-ui,minio,graylog,auth-db,localhost
- COLUMNS=200
expose:
- 8000
ports:
Expand All @@ -287,7 +293,7 @@ services:
start_period: 60s

huggingface-llm:
image: cogstacksystems/cogstack-modelserve:0.1.0
image: cogstacksystems/cogstack-modelserve:0.2.0
labels:
- org.cogstack.model-serve=huggingface_llm
- org.cogstack.model-name=HuggingFace LLM model
Expand All @@ -301,8 +307,8 @@ services:
- ./docker/huggingface-llm/.env:/app/envs/.env:ro
environment:
- BASE_MODEL_FULL_PATH=$MODEL_PACKAGE_FULL_PATH
- CMS_MODEL_TYPE=huggingface_ner
- CMS_MODEL_NAME=HuggingFace NER model
- CMS_MODEL_TYPE=huggingface_llm
- CMS_MODEL_NAME=HuggingFace LLM model
- CMS_STREAMABLE=${CMS_STREAMABLE:-false}
- AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID
- AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY
Expand All @@ -322,6 +328,7 @@ services:
- http_proxy=$HTTP_PROXY
- https_proxy=$HTTPS_PROXY
- no_proxy=mlflow-ui,minio,graylog,auth-db,localhost
- COLUMNS=200
expose:
- 8000
ports:
Expand Down
Loading