From fa34a803b0d33e612048babf3fe6305269da15dd Mon Sep 17 00:00:00 2001 From: mathieu-benoit <11720844+mathieu-benoit@users.noreply.github.com> Date: Mon, 5 Jan 2026 10:04:31 +0000 Subject: [PATCH] chore: update generated content Signed-off-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- .../score-compose/template/dmr-llm-model.md | 26 +++++++++++++++++++ .../community-provisioners/llm-model.md | 1 + ...del-via-service-provider.provisioners.yaml | 3 +++ .../10-dmr-llm-model.provisioners.yaml | 20 ++++++++++++++ ...del-via-service-provider.provisioners.yaml | 3 +++ .../10-dmr-llm-model.provisioners.yaml | 20 ++++++++++++++ 6 files changed, 73 insertions(+) create mode 100644 content/en/examples/resource-provisioners/community/llm-model/score-compose/template/dmr-llm-model.md create mode 100644 gen/external-content/resource-provisioners/community/llm-model/score-compose/10-dmr-llm-model.provisioners.yaml create mode 100644 gen/external-content/score/resources/community-provisioners/llm-model/score-compose/10-dmr-llm-model.provisioners.yaml diff --git a/content/en/examples/resource-provisioners/community/llm-model/score-compose/template/dmr-llm-model.md b/content/en/examples/resource-provisioners/community/llm-model/score-compose/template/dmr-llm-model.md new file mode 100644 index 00000000..26e9267a --- /dev/null +++ b/content/en/examples/resource-provisioners/community/llm-model/score-compose/template/dmr-llm-model.md @@ -0,0 +1,26 @@ +--- +title: "dmr-llm-model" +draft: false +mermaid: true +type: examples +source: "community" +implementation: "score-compose" +resourceType: "llm-model" +provisionerType: "template" +flavor: "dmr" +excerpt: '' +description: 'Generates the LLM model via the Docker Model Runner (DMR).' +expectedOutputs: + - model + - url + - api-key +supportedParams: + - model + - context_size +hasMore: false + +--- + +{{% resource-provisioner-content description="Generates the LLM model via the Docker Model Runner (DMR)." type="llm-model" supportedParams="model,context_size" expectedOutputs="model,url,api-key" %}} + +{{% example-file filename="10-dmr-llm-model.provisioners.yaml" dir="resource-provisioners/community/llm-model/score-compose" githubUrl="https://github.com/score-spec/community-provisioners/blob/main" %}} diff --git a/content/en/examples/score/resources/community-provisioners/llm-model.md b/content/en/examples/score/resources/community-provisioners/llm-model.md index 98efb8d1..511aade0 100644 --- a/content/en/examples/score/resources/community-provisioners/llm-model.md +++ b/content/en/examples/score/resources/community-provisioners/llm-model.md @@ -17,6 +17,7 @@ flavor: "Resources" {{% example-file filename="10-dmr-llm-model-via-curl-cmd.provisioners.yaml" dir="score/resources/community-provisioners/llm-model/score-compose" githubUrl="https://github.com/score-spec/community-provisioners/blob/main" %}} {{% example-file filename="10-dmr-llm-model-via-curl-service.provisioners.yaml" dir="score/resources/community-provisioners/llm-model/score-compose" githubUrl="https://github.com/score-spec/community-provisioners/blob/main" %}} {{% example-file filename="10-dmr-llm-model-via-service-provider.provisioners.yaml" dir="score/resources/community-provisioners/llm-model/score-compose" githubUrl="https://github.com/score-spec/community-provisioners/blob/main" %}} +{{% example-file filename="10-dmr-llm-model.provisioners.yaml" dir="score/resources/community-provisioners/llm-model/score-compose" githubUrl="https://github.com/score-spec/community-provisioners/blob/main" %}} {{% example-file filename="10-ollama-llm-model-service.provisioners.yaml" dir="score/resources/community-provisioners/llm-model/score-compose" githubUrl="https://github.com/score-spec/community-provisioners/blob/main" %}} --- diff --git a/gen/external-content/resource-provisioners/community/llm-model/score-compose/10-dmr-llm-model-via-service-provider.provisioners.yaml b/gen/external-content/resource-provisioners/community/llm-model/score-compose/10-dmr-llm-model-via-service-provider.provisioners.yaml index 3140636b..d0156b52 100644 --- a/gen/external-content/resource-provisioners/community/llm-model/score-compose/10-dmr-llm-model-via-service-provider.provisioners.yaml +++ b/gen/external-content/resource-provisioners/community/llm-model/score-compose/10-dmr-llm-model-via-service-provider.provisioners.yaml @@ -1,6 +1,9 @@ +# - uri: template://community-provisioners/dmr-llm-model-via-service-provider type: llm-model description: Generates the LLM model service via the Docker Model Runner (DMR) provider. + info_logs: | + - "This service provider approach is now deprecated, use the 10-dmr-llm-model.provisioners.yaml provisioner file instead." supported_params: - model outputs: | diff --git a/gen/external-content/resource-provisioners/community/llm-model/score-compose/10-dmr-llm-model.provisioners.yaml b/gen/external-content/resource-provisioners/community/llm-model/score-compose/10-dmr-llm-model.provisioners.yaml new file mode 100644 index 00000000..e762d808 --- /dev/null +++ b/gen/external-content/resource-provisioners/community/llm-model/score-compose/10-dmr-llm-model.provisioners.yaml @@ -0,0 +1,20 @@ +- uri: template://community-provisioners/dmr-llm-model + type: llm-model + description: Generates the LLM model via the Docker Model Runner (DMR). + supported_params: + - model + - context_size + outputs: | + model: {{ .Init.model }} + url: "http://172.17.0.1:12434/engines/v1/" + api-key: "not-needed" + expected_outputs: + - model + - url + - api-key + init: | + model: {{ .Params.model | default "ai/smollm2:135M-Q4_0" }} + models: | + {{ .Id }}: + model: {{ .Init.model }} + context_size: {{ .Params.context_size | default 2048 }} \ No newline at end of file diff --git a/gen/external-content/score/resources/community-provisioners/llm-model/score-compose/10-dmr-llm-model-via-service-provider.provisioners.yaml b/gen/external-content/score/resources/community-provisioners/llm-model/score-compose/10-dmr-llm-model-via-service-provider.provisioners.yaml index 3140636b..d0156b52 100644 --- a/gen/external-content/score/resources/community-provisioners/llm-model/score-compose/10-dmr-llm-model-via-service-provider.provisioners.yaml +++ b/gen/external-content/score/resources/community-provisioners/llm-model/score-compose/10-dmr-llm-model-via-service-provider.provisioners.yaml @@ -1,6 +1,9 @@ +# - uri: template://community-provisioners/dmr-llm-model-via-service-provider type: llm-model description: Generates the LLM model service via the Docker Model Runner (DMR) provider. + info_logs: | + - "This service provider approach is now deprecated, use the 10-dmr-llm-model.provisioners.yaml provisioner file instead." supported_params: - model outputs: | diff --git a/gen/external-content/score/resources/community-provisioners/llm-model/score-compose/10-dmr-llm-model.provisioners.yaml b/gen/external-content/score/resources/community-provisioners/llm-model/score-compose/10-dmr-llm-model.provisioners.yaml new file mode 100644 index 00000000..e762d808 --- /dev/null +++ b/gen/external-content/score/resources/community-provisioners/llm-model/score-compose/10-dmr-llm-model.provisioners.yaml @@ -0,0 +1,20 @@ +- uri: template://community-provisioners/dmr-llm-model + type: llm-model + description: Generates the LLM model via the Docker Model Runner (DMR). + supported_params: + - model + - context_size + outputs: | + model: {{ .Init.model }} + url: "http://172.17.0.1:12434/engines/v1/" + api-key: "not-needed" + expected_outputs: + - model + - url + - api-key + init: | + model: {{ .Params.model | default "ai/smollm2:135M-Q4_0" }} + models: | + {{ .Id }}: + model: {{ .Init.model }} + context_size: {{ .Params.context_size | default 2048 }} \ No newline at end of file