diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..1463dcc --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ + +log/input.txt +log/prediction_result.txt diff --git a/README.md b/README.md index 9520345..101803b 100644 --- a/README.md +++ b/README.md @@ -35,15 +35,32 @@ Also check requirements.txt file in the src repository. * Model: Training on google collab with Tensorflow and Keras * Validating the model: Plot learning curve -### Step 03 : Link ML model through Grasshopper +### Step 03: Linking the ML Models with Rhino -![Data flow](https://github.com/LearnCarbon/src/blob/main/examples/dataFlow%20diagram.png) +This version of the LearnCarbon Rhino Plugin no longer requires **Hops through Flask**. The models can now be executed directly via Rhino's script editor. -* Hops gets parameters from the 3D model designed in Rhino , and the users inputs. -* Hops is passing these values to the ML model and calling the ML model itself using a flask server -* After the prediction is calculated Hops returns the value to a Rhino .rhp plugin written in c# and WPF, which displays the result in LearnCarbon. +- The Rhino plugin retrieves parameters from the 3D model designed in Rhino and user inputs. +- The ML models process these inputs and return predictions directly into Rhino, displaying the results in the LearnCarbon interface. -![Hops functionality](https://github.com/LearnCarbon/src/blob/main/examples/HopsBackend_cropped.png) +--- + +## Setup Instructions + +### Prerequisites + +You’ll need a **Rhino 8** license and Python for running the scripts within Rhino. + +### Step-by-Step Guide + +1. **Clone the Repositories:** + Clone the LearnCarbon Rhino Plugin and the Machine Learning backend repositories into the same directory: + ```bash + git clone https://github.com/LearnCarbon/Tool_LearnCarbonRhinoPlugin + git clone https://github.com/LearnCarbon/src + +2. **Install Required Libraries:** In Rhino’s run `ScriptEditor`, to run the run_ML.py script from the cloned src directory. This script will install all the required Python libraries. + +4. **Run the Backend:** Once the libraries are installed, the ML models are ready to be linked with the Rhino plugin. The models will process data sent from Rhino and return GWP and structural type predictions. + +5. **Build and Install the Rhino Plugin:** Follow the Rhino plugin build instructions in the [Tool_LearnCarbonRhinoPlugin repository](https://github.com/LearnCarbon/Tool_LearnCarbonRhinoPlugin). -## Set-up -You would need a working Rhino 7 license and an IDE like Visual Studio where you can run the app.py file in src. diff --git a/log/prediction_result.txt b/log/prediction_result.txt new file mode 100644 index 0000000..e150453 --- /dev/null +++ b/log/prediction_result.txt @@ -0,0 +1 @@ +317.619384765625 \ No newline at end of file diff --git a/run_ml.py b/run_ml.py new file mode 100644 index 0000000..0635cd8 --- /dev/null +++ b/run_ml.py @@ -0,0 +1,98 @@ +# r: tensorflow +import tensorflow as tf +# r: joblib +import joblib +# r: numpy +import numpy as np +# r: scikit-learn +import sklearn +import os +import Rhino +# r: dill + +print("NumPy version:", np.__version__) + +# region COnfihuring dirs +current_dir = os.path.dirname(os.path.abspath(__file__)) + +# Dynamically find paths based on the project directory +log_dir = os.path.join(current_dir, 'log') +modelA_path = os.path.join(log_dir, 'LearnModel_A.h5') +modelB_path = os.path.join(log_dir, 'LearnModel_B.h5') +scalerY_path = os.path.join(log_dir, 'scalerY_A.pkl') +scalerX_path = os.path.join(log_dir, 'scalerx_A.pkl') +scalerX_B_path = os.path.join(log_dir, 'scalerx_B.pkl') +result_file_path = os.path.join(log_dir, 'prediction_result.txt') +input_file_path = os.path.join(log_dir, 'input.txt') +# endregion + +def modelB(targetCo2, buildingType, location, area, floorCount): + Rhino.RhinoApp.WriteLine(f"Processing with targetCo2: {targetCo2}") + model = tf.keras.models.load_model(modelB_path) + inputData = np.array([[targetCo2], [buildingType], [location], [area], [floorCount]]) + + scalerX = joblib.load(scalerX_B_path) + + inputData_new = np.reshape(inputData, (1, 5)) + x_scaled = scalerX.transform(inputData_new) + + # Make prediction + prediction = model.predict(x_scaled) + + construction_type = prediction.argmax(axis=1) + print("this is the construction type") + print(construction_type) + construction_type_unwarpped = construction_type[0] + print(construction_type_unwarpped) + + # check which construction types each integer is + if construction_type_unwarpped == 0: + construction_type_string = "Concrete" + + if construction_type_unwarpped == 1: + construction_type_string = "Steel-Concrete" + + if construction_type_unwarpped == 2: + construction_type_string = "Wood" + + if construction_type_unwarpped == 3: + construction_type_string = "Wood-Hybrid" + + with open(result_file_path, 'w') as f: + f.write(construction_type_string) + +# Function 2: Takes all arguments +def modelA(constructionType, buildingType, location, area, floorCount): + model = tf.keras.models.load_model(modelA_path) + # Prepare input data + inputData = np.array([[constructionType], [buildingType], [location], [area], [floorCount]]) + + # Load scalers + scalerY = joblib.load(scalerY_path) + scalerX = joblib.load(scalerX_path) + + # Reshaping input data + inputData_new = np.reshape(inputData, (1, 5)) + x_scaled = scalerX.transform(inputData_new) + + # Make prediction + prediction = model.predict(x_scaled) + + y_scaled = scalerY.inverse_transform(prediction) + numPrediction = float(y_scaled[0][0]) + + # Write the result to file + with open(result_file_path, 'w') as f: + f.write(str(numPrediction)) + +# Read inputs from the input file +with open(input_file_path, 'r') as f: + inputs = f.read().strip().split(',') + +if inputs[0] == "False": + targetCo2, buildingType, location, area, floorCount = map(float, inputs[1:]) + modelB(targetCo2, buildingType, location, area, floorCount) +else: + print(inputs) + constructionType, buildingType, location, area, floorCount = map(float, inputs[1:]) + modelA(int(constructionType), int(buildingType), int(location), area, int(floorCount))