From 3b5523a558f2eab2c92aebe5ba4058bf1c983af6 Mon Sep 17 00:00:00 2001 From: Iliana Papadopoulou Date: Mon, 16 Sep 2024 14:51:45 +0200 Subject: [PATCH 1/6] set up script editor placeholder values for now. to be connected with rhino plugin --- log/prediction_result.txt | 1 + run_ml.py | 51 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 52 insertions(+) create mode 100644 log/prediction_result.txt create mode 100644 run_ml.py diff --git a/log/prediction_result.txt b/log/prediction_result.txt new file mode 100644 index 0000000..c57a761 --- /dev/null +++ b/log/prediction_result.txt @@ -0,0 +1 @@ +530.463623046875 \ No newline at end of file diff --git a/run_ml.py b/run_ml.py new file mode 100644 index 0000000..93e742e --- /dev/null +++ b/run_ml.py @@ -0,0 +1,51 @@ +# r: tensorflow +import tensorflow as tf +# r: joblib +import joblib +# r: numpy +import numpy as np +# r: scikit-learn +import sklearn + +#Load saved model +model = tf.keras.models.load_model("C:/Reope/GitHub/src/log/LearnModel_A.h5") +print(model.summary()) + +# Input 1 - Building Concstruction Type +# 0 - RC +# 1 - Steel Concrete +# 2 - Wood +# 3 - Wood Hybrid +constructionType = 2 + +# Input 2 - building type is 0 or 1 +buildingType = 1 + +# Input 3 - location 0 - 5 +location = 3 + +# Input 4 - Total floor area +area = 100.0 + +# Input 5 - floorCount usually between 5 and 25 +floorCount = 10 + +inputData = np.array([[constructionType], [buildingType], [location], [area], [floorCount]]) + +scalerY = joblib.load('C:/Reope/GitHub/src/log/scalerY_A.pkl') +scalerX = joblib.load('C:/Reope/GitHub/src/log/scalerx_A.pkl') + +# reshaping model +inputData_new = np.reshape(inputData, (1,5)) + +x_scaled = scalerX.transform(inputData_new) + +prediction = model.predict(x_scaled) +#reshape or normalize here +y_scaled = scalerY.inverse_transform(prediction) + +numPrediction = float(y_scaled[0][0]) +print("Building 1: " + str(numPrediction)) + +with open('C:/Reope/GitHub/src/log/prediction_result.txt', 'w') as f: + f.write(str(numPrediction)) \ No newline at end of file From 5550bf6fbb9c2c0d632dddf2a8f5745dde23caf8 Mon Sep 17 00:00:00 2001 From: Iliana Papadopoulou Date: Sat, 21 Sep 2024 14:11:54 +0200 Subject: [PATCH 2/6] added modelB and corrected directories --- run_ml.py | 92 ++++++++++++++++++++++++++++++++++--------------------- 1 file changed, 57 insertions(+), 35 deletions(-) diff --git a/run_ml.py b/run_ml.py index 93e742e..dc4512d 100644 --- a/run_ml.py +++ b/run_ml.py @@ -6,46 +6,68 @@ import numpy as np # r: scikit-learn import sklearn +# r: sys +import sys +# r: os +import os +# r: pathlib +from pathlib import Path -#Load saved model -model = tf.keras.models.load_model("C:/Reope/GitHub/src/log/LearnModel_A.h5") -print(model.summary()) - -# Input 1 - Building Concstruction Type -# 0 - RC -# 1 - Steel Concrete -# 2 - Wood -# 3 - Wood Hybrid -constructionType = 2 - -# Input 2 - building type is 0 or 1 -buildingType = 1 - -# Input 3 - location 0 - 5 -location = 3 -# Input 4 - Total floor area -area = 100.0 +# region COnfihuring dirs +current_dir = Path(__file__).resolve().parent -# Input 5 - floorCount usually between 5 and 25 -floorCount = 10 - -inputData = np.array([[constructionType], [buildingType], [location], [area], [floorCount]]) - -scalerY = joblib.load('C:/Reope/GitHub/src/log/scalerY_A.pkl') -scalerX = joblib.load('C:/Reope/GitHub/src/log/scalerx_A.pkl') +# Dynamically find paths based on the project directory +log_dir = current_dir / 'log' +model_path = log_dir / 'LearnModel_A.h5' +scalerY_path = log_dir / 'scalerY_A.pkl' +scalerX_path = log_dir / 'scalerx_A.pkl' +result_file_path = log_dir / 'prediction_result.txt' +# endregion +#Load saved model +model = tf.keras.models.load_model(model_path) +print(model.summary()) -# reshaping model -inputData_new = np.reshape(inputData, (1,5)) +def modelB(targetCo2): + print(f"Processing with targetCo2: {targetCo2}") -x_scaled = scalerX.transform(inputData_new) + with open(result_file_path, 'w') as f: + f.write(str(targetCo2)) -prediction = model.predict(x_scaled) -#reshape or normalize here -y_scaled = scalerY.inverse_transform(prediction) +# Function 2: Takes all arguments +def modelA(constructionType, buildingType, location, area, floorCount): + # Prepare input data + inputData = np.array([[constructionType], [buildingType], [location], [area], [floorCount]]) + + # Load scalers + scalerY = joblib.load(scalerY_path) + scalerX = joblib.load(scalerX_path') + + # Reshaping input data + inputData_new = np.reshape(inputData, (1, 5)) + x_scaled = scalerX.transform(inputData_new) -numPrediction = float(y_scaled[0][0]) -print("Building 1: " + str(numPrediction)) + # Make prediction + prediction = model.predict(x_scaled) + + y_scaled = scalerY.inverse_transform(prediction) + numPrediction = float(y_scaled[0][0]) + + print("Building 1: " + str(numPrediction)) + + # Write the result to file + with open(result_file_path, 'w') as f: + f.write(str(numPrediction)) -with open('C:/Reope/GitHub/src/log/prediction_result.txt', 'w') as f: - f.write(str(numPrediction)) \ No newline at end of file +# Check how many arguments were passed from C# to identify the model +if len(sys.argv) == 2: + targetCo2 = float(sys.argv[1]) + modelB(targetCo2) +else: + constructionType = int(sys.argv[1]) + buildingType = int(sys.argv[2]) + location = int(sys.argv[3]) + area = float(sys.argv[4]) + floorCount = int(sys.argv[5]) + + modelA(constructionType, buildingType, location, area, floorCount) From bc2617d6d4c9cc477cb288850d5d291d38aa0c4b Mon Sep 17 00:00:00 2001 From: Iliana Papadopoulou Date: Mon, 23 Sep 2024 09:47:17 +0200 Subject: [PATCH 3/6] Update run_ml.py --- run_ml.py | 56 +++++++++++++++++++++++++++++++------------------------ 1 file changed, 32 insertions(+), 24 deletions(-) diff --git a/run_ml.py b/run_ml.py index dc4512d..8e94373 100644 --- a/run_ml.py +++ b/run_ml.py @@ -6,27 +6,38 @@ import numpy as np # r: scikit-learn import sklearn -# r: sys -import sys -# r: os + import os -# r: pathlib -from pathlib import Path +# r: argparse +import argparse + +print("NumPy version:", np.__version__) + +parser = argparse.ArgumentParser(description="Run machine learning model predictions") +# Adding arguments for both modelA and modelB +parser.add_argument("--targetCo2", type=float, help="Target CO2 for modelB") +parser.add_argument("--constructionType", type=int, help="Construction type (0-3) for modelA") +parser.add_argument("--buildingType", type=int, help="Building type (0 or 1) for modelA") +parser.add_argument("--location", type=int, help="Location (0-5) for modelA") +parser.add_argument("--area", type=float, help="Total floor area for modelA") +parser.add_argument("--floorCount", type=int, help="Floor count (usually between 5 and 25) for modelA") + +# Parse arguments from the command line +args = parser.parse_args() # region COnfihuring dirs -current_dir = Path(__file__).resolve().parent +current_dir = os.path.dirname(os.path.abspath(__file__)) # Dynamically find paths based on the project directory -log_dir = current_dir / 'log' -model_path = log_dir / 'LearnModel_A.h5' -scalerY_path = log_dir / 'scalerY_A.pkl' -scalerX_path = log_dir / 'scalerx_A.pkl' -result_file_path = log_dir / 'prediction_result.txt' +log_dir = os.path.join(current_dir, 'log') +model_path = os.path.join(log_dir, 'LearnModel_A.h5') +scalerY_path = os.path.join(log_dir, 'scalerY_A.pkl') +scalerX_path = os.path.join(log_dir, 'scalerx_A.pkl') +result_file_path = os.path.join(log_dir, 'prediction_result.txt') # endregion #Load saved model model = tf.keras.models.load_model(model_path) -print(model.summary()) def modelB(targetCo2): print(f"Processing with targetCo2: {targetCo2}") @@ -41,7 +52,7 @@ def modelA(constructionType, buildingType, location, area, floorCount): # Load scalers scalerY = joblib.load(scalerY_path) - scalerX = joblib.load(scalerX_path') + scalerX = joblib.load(scalerX_path) # Reshaping input data inputData_new = np.reshape(inputData, (1, 5)) @@ -59,15 +70,12 @@ def modelA(constructionType, buildingType, location, area, floorCount): with open(result_file_path, 'w') as f: f.write(str(numPrediction)) -# Check how many arguments were passed from C# to identify the model -if len(sys.argv) == 2: - targetCo2 = float(sys.argv[1]) - modelB(targetCo2) +if args.targetCo2 is not None: + # If targetCo2 is provided, run modelB + modelB(args.targetCo2) +elif all([args.constructionType is not None, args.buildingType is not None, + args.location is not None, args.area is not None, args.floorCount is not None]): + # If all the required arguments for modelA are provided, run modelA + modelA(args.constructionType, args.buildingType, args.location, args.area, args.floorCount) else: - constructionType = int(sys.argv[1]) - buildingType = int(sys.argv[2]) - location = int(sys.argv[3]) - area = float(sys.argv[4]) - floorCount = int(sys.argv[5]) - - modelA(constructionType, buildingType, location, area, floorCount) + print("Invalid arguments. Please provide either targetCo2 or all arguments for modelA.") From 9b4fc3064f02a2e7a30971884e1ddc67e269bc2c Mon Sep 17 00:00:00 2001 From: Iliana Papadopoulou Date: Mon, 23 Sep 2024 12:35:11 +0200 Subject: [PATCH 4/6] fixed modelB and way to pass arguments --- .gitignore | 3 ++ log/prediction_result.txt | 2 +- run_ml.py | 83 +++++++++++++++++++++++---------------- 3 files changed, 54 insertions(+), 34 deletions(-) create mode 100644 .gitignore diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..1463dcc --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ + +log/input.txt +log/prediction_result.txt diff --git a/log/prediction_result.txt b/log/prediction_result.txt index c57a761..e150453 100644 --- a/log/prediction_result.txt +++ b/log/prediction_result.txt @@ -1 +1 @@ -530.463623046875 \ No newline at end of file +317.619384765625 \ No newline at end of file diff --git a/run_ml.py b/run_ml.py index 8e94373..0635cd8 100644 --- a/run_ml.py +++ b/run_ml.py @@ -6,47 +6,64 @@ import numpy as np # r: scikit-learn import sklearn - import os -# r: argparse -import argparse +import Rhino +# r: dill print("NumPy version:", np.__version__) -parser = argparse.ArgumentParser(description="Run machine learning model predictions") - -# Adding arguments for both modelA and modelB -parser.add_argument("--targetCo2", type=float, help="Target CO2 for modelB") -parser.add_argument("--constructionType", type=int, help="Construction type (0-3) for modelA") -parser.add_argument("--buildingType", type=int, help="Building type (0 or 1) for modelA") -parser.add_argument("--location", type=int, help="Location (0-5) for modelA") -parser.add_argument("--area", type=float, help="Total floor area for modelA") -parser.add_argument("--floorCount", type=int, help="Floor count (usually between 5 and 25) for modelA") - -# Parse arguments from the command line -args = parser.parse_args() - # region COnfihuring dirs current_dir = os.path.dirname(os.path.abspath(__file__)) # Dynamically find paths based on the project directory log_dir = os.path.join(current_dir, 'log') -model_path = os.path.join(log_dir, 'LearnModel_A.h5') +modelA_path = os.path.join(log_dir, 'LearnModel_A.h5') +modelB_path = os.path.join(log_dir, 'LearnModel_B.h5') scalerY_path = os.path.join(log_dir, 'scalerY_A.pkl') scalerX_path = os.path.join(log_dir, 'scalerx_A.pkl') +scalerX_B_path = os.path.join(log_dir, 'scalerx_B.pkl') result_file_path = os.path.join(log_dir, 'prediction_result.txt') +input_file_path = os.path.join(log_dir, 'input.txt') # endregion -#Load saved model -model = tf.keras.models.load_model(model_path) -def modelB(targetCo2): - print(f"Processing with targetCo2: {targetCo2}") +def modelB(targetCo2, buildingType, location, area, floorCount): + Rhino.RhinoApp.WriteLine(f"Processing with targetCo2: {targetCo2}") + model = tf.keras.models.load_model(modelB_path) + inputData = np.array([[targetCo2], [buildingType], [location], [area], [floorCount]]) + + scalerX = joblib.load(scalerX_B_path) + + inputData_new = np.reshape(inputData, (1, 5)) + x_scaled = scalerX.transform(inputData_new) + + # Make prediction + prediction = model.predict(x_scaled) + + construction_type = prediction.argmax(axis=1) + print("this is the construction type") + print(construction_type) + construction_type_unwarpped = construction_type[0] + print(construction_type_unwarpped) + + # check which construction types each integer is + if construction_type_unwarpped == 0: + construction_type_string = "Concrete" + + if construction_type_unwarpped == 1: + construction_type_string = "Steel-Concrete" + + if construction_type_unwarpped == 2: + construction_type_string = "Wood" + + if construction_type_unwarpped == 3: + construction_type_string = "Wood-Hybrid" with open(result_file_path, 'w') as f: - f.write(str(targetCo2)) + f.write(construction_type_string) # Function 2: Takes all arguments def modelA(constructionType, buildingType, location, area, floorCount): + model = tf.keras.models.load_model(modelA_path) # Prepare input data inputData = np.array([[constructionType], [buildingType], [location], [area], [floorCount]]) @@ -63,19 +80,19 @@ def modelA(constructionType, buildingType, location, area, floorCount): y_scaled = scalerY.inverse_transform(prediction) numPrediction = float(y_scaled[0][0]) - - print("Building 1: " + str(numPrediction)) - + # Write the result to file with open(result_file_path, 'w') as f: f.write(str(numPrediction)) + +# Read inputs from the input file +with open(input_file_path, 'r') as f: + inputs = f.read().strip().split(',') -if args.targetCo2 is not None: - # If targetCo2 is provided, run modelB - modelB(args.targetCo2) -elif all([args.constructionType is not None, args.buildingType is not None, - args.location is not None, args.area is not None, args.floorCount is not None]): - # If all the required arguments for modelA are provided, run modelA - modelA(args.constructionType, args.buildingType, args.location, args.area, args.floorCount) +if inputs[0] == "False": + targetCo2, buildingType, location, area, floorCount = map(float, inputs[1:]) + modelB(targetCo2, buildingType, location, area, floorCount) else: - print("Invalid arguments. Please provide either targetCo2 or all arguments for modelA.") + print(inputs) + constructionType, buildingType, location, area, floorCount = map(float, inputs[1:]) + modelA(int(constructionType), int(buildingType), int(location), area, int(floorCount)) From a7474bfe1e035d7b1cbc6b032094b4652a682314 Mon Sep 17 00:00:00 2001 From: Iliana Papadopoulou Date: Mon, 14 Oct 2024 19:09:48 +0200 Subject: [PATCH 5/6] Update README.md --- README.md | 35 ++++++++++++++++++++++++++--------- 1 file changed, 26 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 9520345..e44ae61 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# LearnCarbon +[# LearnCarbon Empower the architects for analysing the relationship between structure & embodied carbon in early design stages! Looking at all the new construction that is projected to take place between now and 2040, we see the critical role embodied carbon plays. @@ -35,15 +35,32 @@ Also check requirements.txt file in the src repository. * Model: Training on google collab with Tensorflow and Keras * Validating the model: Plot learning curve -### Step 03 : Link ML model through Grasshopper +### Step 03: Linking the ML Models with Rhino -![Data flow](https://github.com/LearnCarbon/src/blob/main/examples/dataFlow%20diagram.png) +This version of the LearnCarbon Rhino Plugin no longer requires **Hops through Flask**. The models can now be executed directly via Rhino's script editor. -* Hops gets parameters from the 3D model designed in Rhino , and the users inputs. -* Hops is passing these values to the ML model and calling the ML model itself using a flask server -* After the prediction is calculated Hops returns the value to a Rhino .rhp plugin written in c# and WPF, which displays the result in LearnCarbon. +- The Rhino plugin retrieves parameters from the 3D model designed in Rhino and user inputs. +- The ML models process these inputs and return predictions directly into Rhino, displaying the results in the LearnCarbon interface. -![Hops functionality](https://github.com/LearnCarbon/src/blob/main/examples/HopsBackend_cropped.png) +--- + +## Setup Instructions + +### Prerequisites + +You’ll need a **Rhino 8** license and Python for running the scripts within Rhino. + +### Step-by-Step Guide + +1. **Clone the Repositories:** + Clone the LearnCarbon Rhino Plugin and the Machine Learning backend repositories into the same directory: + ```bash + git clone https://github.com/LearnCarbon/Tool_LearnCarbonRhinoPlugin + git clone https://github.com/LearnCarbon/src + +2. **Install Required Libraries:** In Rhino’s run `ScriptEditor`, to run the run_ML.py script from the cloned src directory. This script will install all the required Python libraries. + +4. **Run the Backend:** Once the libraries are installed, the ML models are ready to be linked with the Rhino plugin. The models will process data sent from Rhino and return GWP and structural type predictions. + +5. **Build and Install the Rhino Plugin:** Follow the Rhino plugin build instructions in the [Tool_LearnCarbonRhinoPlugin repository](https://github.com/LearnCarbon/Tool_LearnCarbonRhinoPlugin). -## Set-up -You would need a working Rhino 7 license and an IDE like Visual Studio where you can run the app.py file in src. From 463f6a7ff8d9a3b4ddcc846662cab65469b3ce44 Mon Sep 17 00:00:00 2001 From: Iliana Papadopoulou Date: Mon, 14 Oct 2024 19:11:29 +0200 Subject: [PATCH 6/6] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e44ae61..101803b 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -[# LearnCarbon +# LearnCarbon Empower the architects for analysing the relationship between structure & embodied carbon in early design stages! Looking at all the new construction that is projected to take place between now and 2040, we see the critical role embodied carbon plays.