diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index c3cd5b7..bfc7c79 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -42,6 +42,7 @@ jobs: - name: Install dependencies run: | uv sync --extra dev --extra docs --extra llm + uv run python -m ensurepip - name: Run unit tests run: | uv run pytest -v tests/app --cov --cov-report=html:coverage_reports #--random-order diff --git a/README.md b/README.md index 20e0279..009ca19 100644 --- a/README.md +++ b/README.md @@ -34,7 +34,7 @@ cms train --help ## Download models: -CMS runs the NLP model packaged in a ZIP file or a Gzipped tarball. To download pretrained GA models, please follow the [instructions](https://github.com/CogStack/MedCAT#available-models). Contact [Cogstack](contact@cogstack.org) +CMS runs the NLP model packaged in a ZIP file or a Gzipped tarball. To download pretrained GA models, please follow the [instructions](https://medcat.readthedocs.io/en/latest/main.html#models). Contact [Cogstack](contact@cogstack.org) if you are interested in trying out Alpha release such as the de-identification model. To serve or train existing HuggingFace NER models, you can package the model, either downloaded from the Hugging Face Hub or cached locally, as a ZIP or Gzipped tarball by running: ```commandline diff --git a/docker-compose-mlflow.yml b/docker-compose-mlflow.yml index d31a17a..2e53ce1 100644 --- a/docker-compose-mlflow.yml +++ b/docker-compose-mlflow.yml @@ -65,7 +65,7 @@ services: condition: "service_healthy" mlflow-ui: - image: cogstacksystems/cogstack-mlflow-ui:0.1.1 + image: cogstacksystems/cogstack-mlflow-ui:0.2.0 restart: always networks: - cogstack-model-serve_cms diff --git a/docker-compose.yml b/docker-compose.yml index 12f2b64..65b208a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,7 +5,7 @@ name: cms services: medcat-snomed: - image: cogstacksystems/cogstack-modelserve:0.1.1 + image: cogstacksystems/cogstack-modelserve:0.2.0 labels: - org.cogstack.model-serve=medcat_snomed - org.cogstack.model-name=SNOMED MedCAT model @@ -40,6 +40,7 @@ services: - http_proxy=$HTTP_PROXY - https_proxy=$HTTPS_PROXY - no_proxy=mlflow-ui,minio,graylog,auth-db,localhost + - COLUMNS=200 expose: - 8000 ports: @@ -52,7 +53,7 @@ services: start_period: 60s medcat-icd10: - image: cogstacksystems/cogstack-modelserve:0.1.1 + image: cogstacksystems/cogstack-modelserve:0.2.0 labels: - org.cogstack.model-serve=medcat_icd10 - org.cogstack.model-name=ICD-10 MedCAT model @@ -87,6 +88,7 @@ services: - http_proxy=$HTTP_PROXY - https_proxy=$HTTPS_PROXY - no_proxy=mlflow-ui,minio,graylog,auth-db,localhost + - COLUMNS=200 expose: - 8000 ports: @@ -99,7 +101,7 @@ services: start_period: 60s medcat-opcs4: - image: cogstacksystems/cogstack-modelserve:0.1.1 + image: cogstacksystems/cogstack-modelserve:0.2.0 labels: - org.cogstack.model-serve=medcat_opcs4 - org.cogstack.model-name=OPCS-4 MedCAT model @@ -134,6 +136,7 @@ services: - http_proxy=$HTTP_PROXY - https_proxy=$HTTPS_PROXY - no_proxy=mlflow-ui,minio,graylog,auth-db,localhost + - COLUMNS=200 expose: - 8000 ports: @@ -146,7 +149,7 @@ services: start_period: 60s medcat-deid: - image: cogstacksystems/cogstack-modelserve:0.1.1 + image: cogstacksystems/cogstack-modelserve:0.2.0 labels: - org.cogstack.model-serve=medcat_deid - org.cogstack.model-name=De-Identification MedCAT model @@ -181,6 +184,7 @@ services: - http_proxy=$HTTP_PROXY - https_proxy=$HTTPS_PROXY - no_proxy=mlflow-ui,minio,graylog,auth-db,localhost + - COLUMNS=200 expose: - 8000 ports: @@ -193,7 +197,7 @@ services: start_period: 60s medcat-umls: - image: cogstacksystems/cogstack-modelserve:0.1.1 + image: cogstacksystems/cogstack-modelserve:0.2.0 labels: - org.cogstack.model-serve=medcat_umls - org.cogstack.model-name=UMLS MedCAT model @@ -228,6 +232,7 @@ services: - http_proxy=$HTTP_PROXY - https_proxy=$HTTPS_PROXY - no_proxy=mlflow-ui,minio,graylog,auth-db,localhost + - COLUMNS=200 expose: - 8000 ports: @@ -240,7 +245,7 @@ services: start_period: 60s huggingface-ner: - image: cogstacksystems/cogstack-modelserve:0.1.1 + image: cogstacksystems/cogstack-modelserve:0.2.0 labels: - org.cogstack.model-serve=huggingface_ner - org.cogstack.model-name=HuggingFace NER model @@ -275,6 +280,7 @@ services: - http_proxy=$HTTP_PROXY - https_proxy=$HTTPS_PROXY - no_proxy=mlflow-ui,minio,graylog,auth-db,localhost + - COLUMNS=200 expose: - 8000 ports: @@ -287,7 +293,7 @@ services: start_period: 60s huggingface-llm: - image: cogstacksystems/cogstack-modelserve:0.1.0 + image: cogstacksystems/cogstack-modelserve:0.2.0 labels: - org.cogstack.model-serve=huggingface_llm - org.cogstack.model-name=HuggingFace LLM model @@ -301,8 +307,8 @@ services: - ./docker/huggingface-llm/.env:/app/envs/.env:ro environment: - BASE_MODEL_FULL_PATH=$MODEL_PACKAGE_FULL_PATH - - CMS_MODEL_TYPE=huggingface_ner - - CMS_MODEL_NAME=HuggingFace NER model + - CMS_MODEL_TYPE=huggingface_llm + - CMS_MODEL_NAME=HuggingFace LLM model - CMS_STREAMABLE=${CMS_STREAMABLE:-false} - AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID - AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY @@ -322,6 +328,7 @@ services: - http_proxy=$HTTP_PROXY - https_proxy=$HTTPS_PROXY - no_proxy=mlflow-ui,minio,graylog,auth-db,localhost + - COLUMNS=200 expose: - 8000 ports: