Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
67 changes: 67 additions & 0 deletions .github/workflows/Autointegrate_awesomeAzd.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
name: Awesome-azd and Exec Docs Sync
permissions:
actions: write
contents: write
on:
workflow_dispatch:

jobs:
Workloads-PR:
runs-on: ubuntu-latest
environment: "AzD Integration"

steps:
- name: Checkout respository
uses: actions/checkout@v2
with:
ref: dev

- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.x'

- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r ${{ github.workspace }}/scripts/generate_fields/requirements.txt

- name: Fetch workloads from azd and exec docs scenarios
run: |
curl -o ${{ github.workspace }}/templates.json https://raw.githubusercontent.com/Azure/awesome-azd/main/website/static/templates.json
curl -o ${{ github.workspace }}/exec_metadata.json https://raw.githubusercontent.com/MicrosoftDocs/executable-docs/main/scenarios/metadata.json

- name: Updating Workloads
run: |
echo "Running script"
python ${{ github.workspace }}/scripts/add_workloads/add_azd.py --root ${{ github.workspace }} --input_file ${{ github.workspace }}/templates.json
python ${{ github.workspace }}/scripts/add_workloads/add_exec_docs.py --root ${{ github.workspace }} --input_file ${{ github.workspace }}/exec_metadata.json
rm ${{ github.workspace }}/templates.json
rm ${{ github.workspace }}/exec_metadata.json

- name: Generating Fields
env:
AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }}
AZURE_OPENAI_ENDPOINT: ${{ secrets.AZURE_OPENAI_ENDPOINT }}
GIT_EMU_PAT: ${{ secrets.GIT_EMU_PAT }}

run: |
echo "Generating New Fields"
python ${{ github.workspace }}/scripts/generate_fields/generate_fields.py --root ${{ github.workspace }}

- name: Configure Git
run: |
git config --global user.name 'github-actions[bot]'
git config --global user.email 'github-actions[bot]@users.noreply.github.com'

- name: Raise PR
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GIT_PAT }}
branch: "auto-pr-branch-${{ github.run_number }}"
commit-message: "Triggered update of workloads.json"
title: "Triggered PR: workloads.json update by ${{ github.actor}}"
body: "Triggered update of workloads.json by ${{ github.actor}}"
author: "github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>"
labels: "automated-pr"
delete-branch: true
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
.DS_Store
helper_functions.sh
**/__pycache__/**
62 changes: 0 additions & 62 deletions scripts/add_awesome_azd/add_azd.py

This file was deleted.

80 changes: 80 additions & 0 deletions scripts/add_workloads/add_azd.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
import json, uuid, argparse, logging

logging.basicConfig(level=logging.INFO, format='[%(asctime)s] - %(levelname)s - %(message)s')

def main(root: str, input_file: str, check_quality: bool = False):
workloads_file = f"{root}/workloads/workloads.json"

with open(input_file, "r") as f:
data = json.load(f)

new_workloads = []

## Get azd workloads
for workload in data:
if "msft" in workload["tags"]:
new_workloads.append(workload)
elif check_quality:
if workload["quality"]:
new_workloads.append(workload)

## Add correct fields
with open(workloads_file, "r") as f:
workloads = json.load(f)

correct_keys = workloads[0].keys()
unique_azd = {}
for workload in workloads:
unique_azd[workload["source"]] = workload

## Add correct keys for new_workloads
for azd_workload in new_workloads:
logging.info(f"Processing workload: {workload['title']}")
if azd_workload["source"] in unique_azd:
for key in azd_workload.keys():
if key in correct_keys:
unique_azd[azd_workload["source"]][key] = azd_workload[key]
else:
logging.info(f"Adding new workload: {workload['title']}")
new_workload = {}
for key in correct_keys:
if key in azd_workload:
new_workload[key] = azd_workload[key]
else:
match key:
case "tags":
new_workload[key] = []
case "products":
new_workload[key] = []
case "sampleQueries":
new_workload[key] = []
case "deploymentOptions":
new_workload[key] = ["AzD"]
case "sourceType":
new_workload[key] = "Azd"
case "deploymentConfig":
new_workload[key] = {}
case "id":
new_workload[key] = str(uuid.uuid4())
case "tech":
new_workload[key] = []
case "keyFeatures":
new_workload[key] = []
case _:
new_workload[key] = []
workloads.append(new_workload)

## Write to file
with open(workloads_file, "w") as f:
json.dump(workloads, f, indent=4)

if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Process workloads and add necessary fields.",
usage="%(prog)s --root ROOT --input_file INPUT_FILE [--check-quality]")
parser.add_argument("-r", "--root", help="Path to the root directory.", required=True)
parser.add_argument("-i", "--input_file", help="Path to the input JSON file.", required=True)
parser.add_argument("--check-quality", action="store_true", help="Whether to check the quality field in the workloads.")

args = parser.parse_args()

main(args.root, args.input_file, args.check_quality)
97 changes: 97 additions & 0 deletions scripts/add_workloads/add_exec_docs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
import json, uuid, argparse, logging

logging.basicConfig(level=logging.INFO, format='[%(asctime)s] - %(levelname)s - %(message)s')

def main(root: str, input_file: str, check_quality: bool = False):
workloads_file = f"{root}/workloads/workloads.json"

with open(input_file, "r") as f:
data = json.load(f)

new_workloads = []
active_workload_sources = set()

## Get azd workloads
for workload in data:
if workload["status"] == "active":
new_workloads.append(workload)
active_workload_sources.add(workload["sourceUrl"])
elif check_quality:
if workload.get("quality", None):
new_workloads.append(workload)

## Add correct fields
with open(workloads_file, "r") as f:
workloads = json.load(f)

non_active_exec_docs = []
for workload in workloads:
if workload["sourceType"] == "ExecDocs" and workload["source"] not in active_workload_sources:
non_active_exec_docs.append(workload)

for workload in non_active_exec_docs:
logging.info(f"Removing workload: {workload['title']}")
workloads.remove(workload)

correct_keys = workloads[0].keys()
unique_exec = {}
for workload in workloads:
unique_exec[workload["source"]] = workload

## Add correct keys for new_workloads
for exec_workload in new_workloads:
logging.info(f"Processing workload: {workload['title']}")
if exec_workload["sourceUrl"] in unique_exec:
for key in exec_workload.keys():
if key in correct_keys:
unique_exec[exec_workload["sourceUrl"]][key] = exec_workload[key]
else:
logging.info(f"Adding new workload: {workload['title']}")
new_workload = {}
for key in correct_keys:
if key in exec_workload:
new_workload[key] = exec_workload[key]
else:
match key:
case "source":
new_workload[key] = exec_workload["sourceUrl"]
case "tags":
new_workload[key] = []
case "products":
new_workload[key] = []
case "sampleQueries":
new_workload[key] = []
case "deploymentOptions":
new_workload[key] = ["azcli"]
case "sourceType":
new_workload[key] = "ExecDocs"
case "deploymentConfig":
new_workload[key] = {
"execDocs": {
"path": exec_workload["key"].replace("/", "%2F")
}
}
case "id":
new_workload[key] = str(uuid.uuid4())
case "tech":
new_workload[key] = []
case "keyFeatures":
new_workload[key] = []
case _:
new_workload[key] = []
workloads.append(new_workload)

## Write to file
with open(workloads_file, "w") as f:
json.dump(workloads, f, indent=4)

if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Process workloads and add necessary fields.",
usage="%(prog)s --root ROOT --input_file INPUT_FILE [--check-quality]")
parser.add_argument("-r", "--root", help="Path to the root directory.", required=True)
parser.add_argument("-i", "--input_file", help="Path to the input JSON file.", required=True)
parser.add_argument("--check-quality", action="store_true", help="Whether to check the quality field in the workloads.")

args = parser.parse_args()

main(args.root, args.input_file, args.check_quality)
30 changes: 30 additions & 0 deletions scripts/add_workloads/delete_workloads.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import argparse, json, requests, logging

logging.basicConfig(level=logging.INFO, format='[%(asctime)s] - %(levelname)s - %(message)s')

def main(root="."):
workloads = json.load(open(f"{root}/workloads/workloads.json", "r"))
keep_workloads = []
sources = set()

for workload in workloads:
res = requests.get(workload["source"])

print(res.status_code, workload["source"])
if res.status_code == 200 and workload["source"] not in sources:
logging.info(f"Keeping workload {workload['title']}")
keep_workloads.append(workload)
sources.add(workload["source"])
else:
logging.info(f"Removing workload {workload['title']}")

json.dump(keep_workloads, open(f"{root}/workloads/workloads.json", "w"), indent=4)

if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Delete unsupported workloads in workloads.json. Does so by looking at the source url.",
usage="%(prog)s --root ROOT")
parser.add_argument("-r", "--root", help="Path to the root directory.", required=True)

args = parser.parse_args()

main(args.root)
Loading