From a56282d01c51a33bb2e25b6334430b69c313f1b7 Mon Sep 17 00:00:00 2001 From: jainprana Date: Fri, 30 May 2025 00:12:44 -0400 Subject: [PATCH 01/38] Create temp --- bird/postprocess/SA_optimization/temp | 1 + 1 file changed, 1 insertion(+) create mode 100644 bird/postprocess/SA_optimization/temp diff --git a/bird/postprocess/SA_optimization/temp b/bird/postprocess/SA_optimization/temp new file mode 100644 index 00000000..78981922 --- /dev/null +++ b/bird/postprocess/SA_optimization/temp @@ -0,0 +1 @@ +a From 65e172e98e5a531427db18d319841329bc7a262d Mon Sep 17 00:00:00 2001 From: jainprana Date: Fri, 30 May 2025 00:14:25 -0400 Subject: [PATCH 02/38] Add files via upload --- bird/postprocess/SA_optimization/README.md | 47 +++ .../Xdata_study_scaleup_0_4vvm_3000W.csv | 185 ++++++++++++ bird/postprocess/SA_optimization/get_csv.py | 39 +++ .../SA_optimization/get_optimal.py | 282 ++++++++++++++++++ .../get_optimal_with_constraint.py | 263 ++++++++++++++++ bird/postprocess/SA_optimization/results.pkl | Bin 0 -> 5208 bytes .../ydata_study_scaleup_0_4vvm_3000W.csv | 185 ++++++++++++ 7 files changed, 1001 insertions(+) create mode 100644 bird/postprocess/SA_optimization/README.md create mode 100644 bird/postprocess/SA_optimization/Xdata_study_scaleup_0_4vvm_3000W.csv create mode 100644 bird/postprocess/SA_optimization/get_csv.py create mode 100644 bird/postprocess/SA_optimization/get_optimal.py create mode 100644 bird/postprocess/SA_optimization/get_optimal_with_constraint.py create mode 100644 bird/postprocess/SA_optimization/results.pkl create mode 100644 bird/postprocess/SA_optimization/ydata_study_scaleup_0_4vvm_3000W.csv diff --git a/bird/postprocess/SA_optimization/README.md b/bird/postprocess/SA_optimization/README.md new file mode 100644 index 00000000..37309e9b --- /dev/null +++ b/bird/postprocess/SA_optimization/README.md @@ -0,0 +1,47 @@ +# Surrogate-Based Optimization with Simulated Annealing + +This project implements a complete framework for surrogate-based optimization using Simulated Annealing (SA). It supports three surrogate models: + - Radial Basis Function Interpolator ('rbf') + - Random Forest ('rf') + - Neural Network ('nn') + +The SA optimizer operates on discrete feature values (0,1,2) with an option to restrict the number of spargers (1) to a fixed value (max_saprgers). + +- Preprocessing the data ('get_csv.py') + - reads the 'configs.pkl' and 'results.pkl' files from a study + - saves the configuration in 'Xdata_{study_name}.csv' file + - save the qoi and qoi_error in 'ydata_{study_name}.csv' file + +- Surrogate modeling and optimization ('get_optimal.py'/'get_optimal_with_constraints.py') + - run_optimization(...) function sets up the surrogate-based optimization: + - Inputs: + - X: read from 'Xdata_{study_name}.csv' file + - y: read from 'ydata_{study_name}.csv' file + - model_type: type of surrogate model (default = 'rbf') + - model_type = 'rbf': Radial Basis Function + - model_type = 'rf': Random Forest + - model_type = 'nn': Neural Network + - max_spargers: maximum number of spargers (only in 'get_optimal_with_constraints.py') (default = 8) + - n_runs: number of bootstrap runs (default = 10) + - max_iters: maximum number of iterations of SA (default = 1000) + - bootstrap_size: sample size of each boostrap (default = 100) + - For each bootstrap run, the model hyperparameters are tuned using 5-fold cross validation. + - The simulated_annealing_surrogate(...) function runs the optimization: + - If SA is too slow or fails to converge, you can change the following parameters: + - temp: maximum temperature of SA. Controls exploration. + - alpha: the rate at which temperature changes every iteration. + - It returen the optimal solutions along with the optimization logs which are used to make plots in postprocessing. + - Once the optimization is done the best solution is saved in a csv file and the following plots are made: + - Mean-CI plot of the objective function (qoi) + - Mean-CI plot of the distance of an iterate from the optimal solution (this is not done in 'get_optimal_with_constraints.py'). + +Python dependencies: +- numpy +- pandas +- scikit-learn +- scipy +- optuna +- matplotlib +- warnings +- random +- tensorflow diff --git a/bird/postprocess/SA_optimization/Xdata_study_scaleup_0_4vvm_3000W.csv b/bird/postprocess/SA_optimization/Xdata_study_scaleup_0_4vvm_3000W.csv new file mode 100644 index 00000000..771f8ccf --- /dev/null +++ b/bird/postprocess/SA_optimization/Xdata_study_scaleup_0_4vvm_3000W.csv @@ -0,0 +1,185 @@ +0,1,0,1,0,0,2,2,2,2,0 +0,2,2,2,2,2,0,1,0,0,0 +2,1,0,1,2,0,0,1,1,0,1 +0,1,1,2,0,1,1,2,1,1,0 +0,2,2,2,2,1,1,1,0,2,0 +0,1,2,2,1,1,0,2,2,1,0 +2,0,2,1,0,2,1,1,1,2,2 +0,2,1,1,0,0,2,0,0,1,0 +0,0,0,1,1,1,1,0,2,2,2 +0,2,0,2,1,0,2,2,1,2,1 +2,2,1,1,0,2,2,2,0,1,1 +0,1,0,1,2,0,2,1,1,1,1 +2,1,1,0,0,1,2,1,1,2,1 +1,2,0,1,1,2,2,2,2,1,0 +1,1,0,0,0,2,2,0,2,1,2 +1,1,0,1,0,0,2,0,2,0,1 +0,1,2,2,2,0,1,2,2,1,0 +2,1,2,0,1,1,2,0,1,0,0 +0,1,1,0,0,0,2,2,2,2,1 +2,2,0,1,0,2,2,2,1,0,1 +0,2,0,2,1,2,2,0,0,1,2 +0,1,2,1,1,2,0,2,0,0,2 +1,1,0,1,0,2,1,2,1,1,0 +2,2,2,1,1,0,2,2,1,2,0 +2,2,1,1,1,2,1,2,1,1,0 +1,1,2,1,1,1,1,0,2,1,1 +0,0,2,1,2,0,1,0,0,0,1 +0,0,2,0,1,0,2,0,0,1,2 +0,1,0,1,1,0,2,1,0,1,0 +1,2,2,1,0,2,2,1,0,1,2 +0,0,1,2,1,1,1,2,2,1,2 +0,0,1,1,2,0,1,0,2,2,1 +2,0,0,2,1,0,2,0,0,2,1 +0,2,0,1,0,1,1,2,0,2,1 +0,1,2,2,1,0,1,0,2,2,1 +2,1,2,2,1,1,0,1,0,2,0 +1,2,2,2,1,2,2,0,2,1,0 +0,0,0,2,1,0,0,2,2,1,1 +1,0,0,0,1,0,0,2,2,1,1 +2,2,0,0,0,0,2,2,2,2,1 +2,1,2,1,2,2,1,0,1,2,1 +2,2,1,2,2,0,1,1,0,2,2 +2,1,0,2,2,2,1,1,0,1,1 +1,0,0,1,2,2,1,1,2,0,0 +1,2,2,0,2,0,1,1,0,0,2 +1,1,0,2,1,0,2,0,0,1,2 +1,2,0,1,1,0,1,1,0,2,0 +1,2,0,1,1,0,2,2,2,2,2 +1,2,1,2,2,1,1,2,2,0,1 +0,1,1,0,0,1,0,1,1,2,2 +0,0,0,1,0,0,0,0,1,0,2 +0,1,1,1,0,2,2,2,0,1,0 +1,0,1,0,1,1,2,0,2,0,2 +0,1,0,1,2,0,1,1,2,2,0 +2,0,2,1,2,1,0,2,2,0,2 +0,2,1,1,2,1,2,2,2,2,0 +0,1,0,2,2,0,0,0,2,2,2 +2,0,1,1,0,2,1,1,1,1,2 +2,2,1,1,1,2,1,1,2,2,0 +0,0,0,0,0,1,0,2,2,0,0 +0,0,0,2,2,1,0,2,0,1,1 +1,2,0,0,2,2,2,2,0,1,0 +1,1,2,0,2,2,1,0,1,0,0 +1,0,0,2,0,1,0,0,2,0,0 +2,1,2,1,0,1,2,1,1,2,2 +1,0,2,0,2,1,1,2,0,1,2 +1,0,2,0,2,0,0,0,2,2,1 +2,0,2,1,1,2,0,0,2,1,0 +1,0,1,0,2,0,2,2,1,0,2 +0,1,2,1,0,0,0,2,1,1,2 +0,0,1,0,2,1,0,0,1,0,2 +1,0,0,0,1,2,1,2,0,2,1 +0,2,1,0,0,1,1,1,0,0,0 +2,0,2,1,0,0,0,0,0,1,1 +1,0,0,1,1,2,2,1,2,1,0 +2,2,1,2,2,0,2,1,1,2,2 +0,2,2,1,2,0,1,1,2,0,1 +1,0,2,1,1,2,0,0,1,0,0 +1,0,1,1,0,1,1,2,1,1,1 +1,1,0,1,1,2,2,0,0,2,0 +1,0,2,2,2,1,1,1,1,2,1 +1,1,2,1,0,0,1,1,2,2,1 +0,1,0,0,2,1,0,1,2,0,2 +0,0,0,2,2,1,2,0,2,1,1 +2,0,2,1,1,1,0,0,0,0,1 +2,0,0,1,2,1,1,1,0,1,1 +1,0,1,0,0,0,0,1,1,0,0 +0,1,2,1,2,0,2,0,1,2,1 +0,0,2,1,0,0,2,1,1,2,1 +0,2,1,1,1,0,2,0,2,0,2 +2,1,1,2,0,0,0,1,1,0,0 +2,1,1,0,2,1,0,0,0,1,0 +0,2,1,0,0,1,2,2,2,0,2 +1,0,2,0,1,0,2,0,2,2,0 +1,1,1,1,2,2,1,0,1,0,0 +1,0,1,0,2,2,1,2,0,1,0 +2,0,2,2,1,2,1,0,2,0,2 +2,1,1,1,0,0,0,1,0,1,0 +1,1,1,1,2,1,2,1,0,0,2 +2,0,2,2,2,1,1,2,0,1,1 +0,1,1,2,2,1,2,2,1,0,0 +0,0,1,1,2,2,0,1,1,0,2 +2,2,2,1,0,0,1,1,2,2,1 +2,0,1,2,0,1,0,1,1,2,2 +0,1,0,2,2,1,2,1,1,1,0 +1,1,2,1,1,0,2,0,1,0,2 +2,1,0,0,1,2,2,2,2,1,0 +2,2,0,0,1,1,1,1,1,0,0 +2,0,1,1,1,1,1,0,1,2,0 +1,2,2,1,2,1,2,2,2,0,1 +1,0,2,0,1,0,1,1,0,0,0 +2,1,0,1,0,1,0,1,2,2,2 +0,1,2,2,0,0,1,1,2,2,1 +1,2,1,1,1,1,1,2,0,1,0 +2,2,1,1,2,2,1,2,0,2,2 +2,2,0,2,0,1,1,0,0,0,2 +1,2,2,0,0,2,0,0,1,1,2 +2,1,2,2,1,0,0,2,1,0,1 +1,1,2,0,0,2,2,0,0,0,2 +0,2,0,0,2,0,2,0,1,2,2 +0,1,0,0,0,1,2,1,0,0,2 +1,2,0,2,2,0,2,2,1,1,1 +2,0,1,0,2,1,1,1,2,2,2 +0,0,1,0,2,0,0,1,2,1,2 +0,1,0,1,2,1,2,2,0,0,2 +2,2,2,2,0,1,1,1,1,0,2 +2,2,2,2,0,1,1,2,0,1,0 +1,2,1,1,0,1,0,0,2,0,2 +0,1,0,0,1,0,2,1,0,0,0 +2,2,1,1,2,2,2,2,0,2,2 +0,2,2,0,1,2,2,0,2,0,0 +0,1,0,1,2,1,1,1,2,2,2 +0,1,2,2,1,2,0,1,0,0,2 +1,0,1,0,0,2,1,1,0,2,1 +2,0,1,0,0,0,1,1,0,0,0 +2,2,2,1,0,2,1,2,0,0,2 +2,0,1,0,2,2,0,2,0,2,2 +0,0,0,1,2,2,2,0,0,1,2 +0,2,0,1,2,0,1,1,0,2,1 +0,1,2,0,2,0,1,1,2,0,1 +1,1,1,0,0,1,2,2,1,1,1 +2,1,2,0,0,1,0,0,0,2,2 +2,1,2,2,2,1,2,0,0,2,2 +0,0,0,2,0,1,1,1,0,0,1 +0,2,0,2,2,2,2,2,0,1,1 +2,2,2,0,1,1,1,0,1,0,0 +1,0,1,1,2,2,2,1,0,0,0 +2,1,2,2,1,1,0,0,1,1,2 +0,1,2,1,2,1,0,0,1,1,2 +1,2,2,0,1,2,2,0,1,1,0 +0,2,2,0,0,1,0,2,2,2,0 +1,2,2,2,1,2,2,2,0,1,0 +1,0,2,1,2,2,2,2,1,1,2 +2,0,2,0,1,1,1,2,1,2,1 +0,1,0,1,0,1,1,2,1,2,1 +2,2,0,1,2,2,2,2,0,2,2 +1,0,2,2,0,1,1,0,0,0,1 +0,0,0,1,2,0,2,1,2,0,2 +0,0,0,1,2,2,2,0,1,1,0 +0,1,0,1,1,0,0,1,1,0,1 +2,0,2,0,1,0,0,2,1,2,2 +1,0,1,2,2,1,0,0,1,2,0 +2,0,2,0,1,0,0,1,2,1,0 +1,0,1,0,0,1,0,0,2,1,0 +2,1,2,2,0,1,1,2,2,1,2 +2,1,2,1,1,0,1,0,1,2,2 +1,0,0,2,2,0,1,1,1,1,0 +1,0,2,1,2,0,2,2,2,2,2 +1,0,0,0,2,1,2,0,0,0,2 +1,0,0,2,2,2,0,1,2,0,0 +0,1,1,0,2,2,1,2,1,1,0 +1,2,2,2,2,0,1,0,2,2,2 +1,0,1,1,2,1,2,0,1,1,1 +1,1,0,2,2,1,1,1,2,2,1 +2,2,1,0,2,2,2,0,1,2,1 +1,0,0,0,1,2,0,1,2,0,1 +0,1,2,0,2,2,1,1,0,1,2 +1,2,1,2,2,0,2,0,0,0,2 +0,0,0,1,2,2,1,0,1,0,1 +2,0,1,0,1,2,0,0,2,1,1 +1,0,0,2,2,0,2,1,0,0,1 +1,0,1,1,0,2,2,2,1,2,0 +0,0,1,1,0,2,1,1,0,1,0 +2,2,1,0,2,0,0,1,0,1,1 +1,1,0,1,2,0,1,2,2,0,0 diff --git a/bird/postprocess/SA_optimization/get_csv.py b/bird/postprocess/SA_optimization/get_csv.py new file mode 100644 index 00000000..cc76fc14 --- /dev/null +++ b/bird/postprocess/SA_optimization/get_csv.py @@ -0,0 +1,39 @@ +import pickle5 as pkl +import os +import numpy as np +import csv + +def get_config_result(config_fold): + with open(os.path.join(config_fold, "results.pkl"), "rb") as f: + results = pkl.load(f) + with open(os.path.join(config_fold, "configs.pkl"), "rb") as f: + configs = pkl.load(f) + Xdata = np.array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], np.int64) + count = 0 + # Save data into CSV files + xfname = "Xdata_"+config_fold+".csv" + yfname = "ydata_"+config_fold+".csv" + with open(xfname,'w',newline='') as csvfile: + writer = csv.writer(csvfile) + for sims in results: + b0 = configs[sims][0] + b1 = configs[sims][1] + b2 = configs[sims][2] + raw_data = np.concatenate((b0,b1,b2), axis = None) + writer.writerow(raw_data) + + with open(yfname,'w',newline='') as csvfile: + writer = csv.writer(csvfile) + for sims in results: + q0 = results[sims]["qoi"] + q1 = results[sims]["qoi_err"] + y_data = np.concatenate((q0,q1), axis = None) + writer.writerow(y_data) + +studies = { + "study_scaleup_0_4vvm_3000W" : r"0.0036$m^3$ 0.4vvm 0W" + } + + +for study in studies: + get_config_result(study) diff --git a/bird/postprocess/SA_optimization/get_optimal.py b/bird/postprocess/SA_optimization/get_optimal.py new file mode 100644 index 00000000..ca960fd8 --- /dev/null +++ b/bird/postprocess/SA_optimization/get_optimal.py @@ -0,0 +1,282 @@ +import numpy as np +import pandas as pd +import random +import optuna +from sklearn.ensemble import RandomForestRegressor +from sklearn.preprocessing import OneHotEncoder +from sklearn.model_selection import KFold, cross_val_score +from sklearn.metrics import mean_squared_error +from scipy.interpolate import RBFInterpolator +from tensorflow.keras.models import Sequential +from tensorflow.keras.layers import Dense +import matplotlib.pyplot as plt +import warnings + +warningsl.filterwarnings("ignore") + +def tune_rbf(X,y): + # Tune the RBFInterpolator + # kernel - "multiquadric" (Can try other kernels) + def objective(trial): + epsilon = trial.suggest_float("epsilon",0.1,10.0, log=False) + try: + kf = KFold(n_splits=5,shuffle=True,random_state=42) + return np.mean([ + mean_squared_error( + y[test], + RBFInterpolator(X[train],y[train],epsilon=epsilon,kernel="multiquadric")(X[test]) + ) + for train, test in kf.split(X) + ]) + except: + return float("inf") + study = optuna.create_study(direction="minimize") + study.optimize(objective,n_trials=20) + return study.best_params + +def tune_rf(X,y): + # Tune the Random Forest + def objective(trial): + model = RandomForestRegressor( + n_estimators=trial.suggest_int("n_estimators",50,200), + max_depth = trial.suggest_int("max_depth",3,10), + min_samples_leaf = trial.suggest_int("min_samples_leaf",1,10), + random_state = 42 + ) + scores = cross_val_score(model, X, y, cv=5, scoring='neg_mean_squared_error') + return -np.mean(scores) + study = optuna.create_study(direction="minimize") + study.optimize(objective,n_trials=20) + return study.best_params + +def tune_nn(X_encoded,y): + # Tune the Neural Network + def objective(trial): + units = trial.suggest_int("n_units",16,128) + layers = trial.suggest_int("n_layers",1,3) + kf = KFold(n_splits=5,shuffle=True,random_state=42) + mses=[] + for train, test in kf.split(X_encoded): + model = Sequential() + model.add(Dense(units, activation='relu', input_shape=(X_encoded.shape[1],))) + for _ in range(layers-1): + model.add(Dense(units,activation='relu')) + model.add(Dense(1)) + model.compile(optimizer='adam',loss='mse') + model.fit(X_encoded[train],y[train],epochs=100,batch_size=16,verbose=0) + mses.append(mean_squared_error(y[test],model.predict(X_encoded[test]).flatten())) + return np.mean(mses) + study = optuna.create_study(direction="minimize") + study.optimize(objective, n_trials=20) + return study.best_params + + +class Surrogatewrapper: + # Wrapper that builds the surrogate model and predicts the QOI value for given X + def __init__(self,model_type,X,y,params): + # Inputs: + # model_type = 'rbf' for RBFInterpolator + # = 'rf' for Random Forest + # = 'nn' for Neural Network + # X,y = the raw data + # params = tune parameter of the surrogate model + + self.model_type = model_type + self.encoder = None + + if model_type == 'rbf': + self.model = RBFInterpolator(X,y,kernel='multiquadric',epsilon=params["epsilon"]) + elif model_type == 'rf': + self.model = RandomForestRegressor(**params, random_state=42) + self.model.fit(X,y) + elif model_type == 'nn': + self.encoder = OneHotEncoder(sparse_output=False) + X_encoded = self.encoder.fit_transform(X) + self.model = self._build_nn(X_encoded.shape[1],params["n_units"],params["n_layers"]) + self.model.fit(X_encoded,y,epochs=100,batch_size=16,verbose=0) + + + def _build_nn(self,input_dim, units, layers): + # Builds the neural network + model = Sequential() + model.add(Dense(units, activation='relu',input_shape=(input_dim,))) + for _ in range(layers-1): + model.add(Dense(units,activation='relu')) + model.add(Dense(1)) + model.compile(optimizer='adam', loss='mse') + return model + + def predict(self,X): + X = X.reshape(1,-1) + if self.model_type == 'nn': + X = self.encoder.transform(X) + return float(self.model.predict(X)[0]) + elif self.model_type == 'rf': + return float(self.model.predict(X)[0]) + else: + return float(self.model(X)[0]) + +def simulated_annealing_surrogate(surrogate,dim=12,max_iters=1000,temp=10.0,alpha=0.95): + # Runs the Simulated Annealing (SA) and reports the resuts + # Inputs: + # surrogate = tuned surrogate model + # dim = dimension of the problem + # max_iters = maximum number of iterations for SA + # temp = parameter of Simulated Annealing (can be changes for tuned for other problems) + # = max temperature. It controls the exploration rate of SA + # alpha = parameter of Simulated Annealing (can be changes for tuned for other problems) + # = cooling rate. It determines how quickly temp decays. + + # Outputs: + # x_best = optimal solution + # y_best = optimal objective function value + # trace = optimization log. Updates at the end of each iteration. + # trace_x = tracks how X changes during each iteration. + + # generate random starting point + x_curr = np.random.randint(0,3,size=dim) + + y_curr = surrogate.predict(x_curr) + x_best, y_best = x_curr.copy(), y_curr + + trace = [(0,y_best)] + trace_x = [x_curr.copy()] + + for i in range(max_iters): + # Optimization loop + + x_new = x_curr.copy() + idx = random.randint(0,dim-1) + # perturb a random dimension to get new x + x_new[idx] = (x_new[idx] + random.choice([-1,1])) % 3 + y_new = surrogate.predict(x_new) + + delta = y_new - y_curr + if delta < 0 or np.random.rand() < np.exp(-delta / temp): + x_curr, y_curr = x_new, y_new + if y_curr < y_best: + x_best, y_best = x_new.copy(), y_new + + trace.append((i,y_best)) + trace_x.append(x_curr.copy()) + temp *= alpha + + return x_best, y_best, trace, trace_x + +def run_optimization(X,y,model_type = 'rbf',n_runs=10,max_iters = 1000, bootstrap_size=100): + # Bootstraps data, runs optimization and postprocesses the resutls. + # Inputs: + # X,y = the raw data + # model_type = 'rbf' for RBFInterpolator + # = 'rf' for Random Forest + # = 'nn' for Neural Network + # n_runs = number of bootstraps + # max_iters = maximum number of SA iterations + # bootstrap_size = size of bootstrap samples + + all_x = [] + all_y = [] + all_traces = np.zeros((n_runs, max_iters + 1)) + all_trace_x = [] + rng = np.random.default_rng(42) + # Bootstrap the data + bootstrap_idxs = [rng.choice(len(X), size = bootstrap_size, replace = False) for _ in range(n_runs)] + + i = 0 + for idxs in bootstrap_idxs: + # Tune the model for each bootstrap + X_sub, y_sub = X[idxs], y[idxs] + if model_type == "rbf": + params = tune_rbf(X_sub,y_sub) + elif model_type == "rf": + params = tune_rf(X_sub,y_sub) + elif model_type == "nn": + encoder = OneHotEncoder(sparse_output=False) + X_encoded = encoder.fit_transform(X_sub) + params = tune_nn(X_encoded,y_sub) + else: + raise ValueError("Invalid model_type") + + # build the surrogate model + surrogate = Surrogatewrapper(model_type,X_sub,y_sub,params) + + # run optimization + x_best, y_best, trace, trace_x = simulated_annealing_surrogate(surrogate, dim=X.shape[1], max_iters=max_iters) + + trace_y = [y for _, y in trace] + all_traces[i,:] = trace_y + all_x.append(x_best) + all_y.append(y_best) + all_trace_x.append(trace_x) + i = i + 1 + + # Compute the best y across all the bootstraps + best_index = np.argmin(all_y) + best_x = all_x[best_index] + best_y = all_y[best_index] + # Save the best solution + df = pd.DataFrame([{ + **{f"x{i}": best_x[i] for i in range(len(best_x))}, + "best_y": best_y + }]) + df.to_csv(f"best_bootstrap_solution_{model_type}_size_{bootstrap_size}.csv", index=False) + + + # Make the mean-CI plot for the objective function and save it + mean_trace = np.mean(-1*all_traces,axis=0) + std_trace = np.std(all_traces,axis=0) + lower_bound = mean_trace - 1.96* std_trace / np.sqrt(n_runs) + upper_bound = mean_trace + 1.96* std_trace / np.sqrt(n_runs) + iterations = np.arange(max_iters + 1) + + plt.figure(figsize=(8,4)) + plt.plot(iterations,mean_trace,label=f"Mean Convergence {model_type.upper()}", color = "blue") + plt.fill_between(iterations, lower_bound, upper_bound, color = "blue", alpha = 0.3, label = "95% CI") + plt.xlabel("Iteration") + plt.ylabel("Best Surrogate-Predicted Objective") + plt.title(f"Mean Convergence with 95% Confidence Interval ({model_type.upper()})") + plt.grid(True) + plt.legend() + plt.savefig(f"Mean_Convergence_plot_{model_type}_size_{bootstrap_size}.png",dpi=300) + plt.show() + + # Compute the distance of intermediate x from the optimal solution + optimal_x = np.ones(X.shape[1],dtype=int) + l1_traces = [] + for t in all_trace_x: + distances = [np.sum(np.abs(np.array(x) - optimal_x)) for x in t] + l1_traces.append(distances) + + ''' compute and make the mean-CI plot for the distance between the intermdeiate + solution and the optimal solution ''' + l1_traces = np.array(l1_traces) + mean_l1 = np.mean(l1_traces,axis=0) + std_l1 = np.std(l1_traces,axis=0) + lower = mean_l1 - 1.96 * std_l1 / np.sqrt(len(trace_x)) + upper = mean_l1 + 1.96 * std_l1 / np.sqrt(len(trace_x)) + iterations = np.arange(len(mean_l1)) + plt.figure(figsize=(8,4)) + plt.plot(iterations,mean_l1,label="Mean L1 Distance from Optimal",color = "darkred") + plt.fill_between(iterations,lower,upper,alpha=0.3,color="darkred", label="95% CI") + plt.xlabel("Iteration") + plt.ylabel("L1 Distance from Optimal") + plt.title("Convergence Toward Optimal Solution (L1 Norm)") + plt.grid(True) + plt.legend() + plt.tight_layout() + plt.savefig(f"Mean_L1_distance_{model_type}_size_{bootstrap_size}.png",dpi=300) + plt.show() + + + + +# Read data from the csv file. +X_raw_data = pd.read_csv('Xdata_study_scaleup_0_4vvm_3000W.csv') +y_raw_data = pd.read_csv('ydata_study_scaleup_0_4vvm_3000W.csv') + +X = X_raw_data.values +y = y_raw_data.iloc[:,:-1].values +y = y*-1 + +# The function will build, tune, and optimize the surrogate model and postprocess the results. +run_optimization(X,y,model_type="rbf",n_runs = 10,max_iters = 1000, bootstrap_size=150) \ No newline at end of file diff --git a/bird/postprocess/SA_optimization/get_optimal_with_constraint.py b/bird/postprocess/SA_optimization/get_optimal_with_constraint.py new file mode 100644 index 00000000..03279036 --- /dev/null +++ b/bird/postprocess/SA_optimization/get_optimal_with_constraint.py @@ -0,0 +1,263 @@ +import numpy as np +import pandas as pd +import random +import optuna +from sklearn.ensemble import RandomForestRegressor +from sklearn.preprocessing import OneHotEncoder +from sklearn.model_selection import KFold, cross_val_score +from sklearn.metrics import mean_squared_error +from scipy.interpolate import RBFInterpolator +from tensorflow.keras.models import Sequential +from tensorflow.keras.layers import Dense +import matplotlib.pyplot as plt + +def tune_rbf(X,y): + def objective(trial): + # Tune the RBFInterpolator + # kernel - "multiquadric" (Can try other kernels) + epsilon = trial.suggest_float("epsilon",0.1,10.0, log=False) + try: + kf = KFold(n_splits=5,shuffle=True,random_state=42) + return np.mean([ + mean_squared_error( + y[test], + RBFInterpolator(X[train],y[train],epsilon=epsilon,kernel="multiquadric")(X[test]) + ) + for train, test in kf.split(X) + ]) + except: + return float("inf") + study = optuna.create_study(direction="minimize") + study.optimize(objective,n_trials=20) + return study.best_params + +def tune_rf(X,y): + # Tune the Random Forest + def objective(trial): + model = RandomForestRegressor( + n_estimators=trial.suggest_int("n_estimators",50,200), + max_depth = trial.suggest_int("max_depth",3,10), + min_samples_leaf = trial.suggest_int("min_samples_leaf",1,10), + random_state = 42 + ) + scores = cross_val_score(model, X, y, cv=5, scoring='neg_mean_squared_error') + return -np.mean(scores) + study = optuna.create_study(direction="minimize") + study.optimize(objective,n_trials=20) + return study.best_params + +def tune_nn(X_encoded,y): + # Tune the Neural Network + def objective(trial): + units = trial.suggest_int("n_units",16,128) + layers = trial.suggest_int("n_layers",1,3) + kf = KFold(n_splits=5,shuffle=True,random_state=42) + mses=[] + for train, test in kf.split(X_encoded): + model = Sequential() + model.add(Dense(units, activation='relu', input_shape=(X_encoded.shape[1],))) + for _ in range(layers-1): + model.add(Dense(units,activation='relu')) + model.add(Dense(1)) + model.compile(optimizer='adam',loss='mse') + model.fit(X_encoded[train],y[train],epochs=100,batch_size=16,verbose=0) + mses.append(mean_squared_error(y[test],model.predict(X_encoded[test]).flatten())) + return np.mean(mses) + study = optuna.create_study(direction="minimize") + study.optimize(objective, n_trials=20) + return study.best_params + + +class Surrogatewrapper: + # Wrapper that builds the surrogate model and predicts the QOI value for given X + def __init__(self,model_type,X,y,params): + # Inputs: + # model_type = 'rbf' for RBFInterpolator + # = 'rf' for Random Forest + # = 'nn' for Neural Network + # X,y = the raw data + # params = tune parameter of the surrogate model + + self.model_type = model_type + self.encoder = None + + if model_type == 'rbf': + self.model = RBFInterpolator(X,y,kernel='multiquadric',epsilon=params["epsilon"]) + elif model_type == 'rf': + self.model = RandomForestRegressor(**params, random_state=42) + self.model.fit(X,y) + elif model_type == 'nn': + self.encoder = OneHotEncoder(sparse_output=False) + X_encoded = self.encoder.fit_transform(X) + self.model = self._build_nn(X_encoded.shape[1],params["n_units"],params["n_layers"]) + self.model.fit(X_encoded,y,epochs=100,batch_size=16,verbose=0) + + + def _build_nn(self,input_dim, units, layers): + # Builds the neural network + model = Sequential() + model.add(Dense(units, activation='relu',input_shape=(input_dim,))) + for _ in range(layers-1): + model.add(Dense(units,activation='relu')) + model.add(Dense(1)) + model.compile(optimizer='adam', loss='mse') + return model + + def predict(self,X): + X = X.reshape(1,-1) + if self.model_type == 'nn': + X = self.encoder.transform(X) + return float(self.model.predict(X)[0]) + elif self.model_type == 'rf': + return float(self.model.predict(X)[0]) + else: + return float(self.model(X)[0]) + +def simulated_annealing_surrogate(surrogate,dim=12,max_iters=1000,max_spargers=8,temp=10.0,alpha=0.95): + # Runs the Simulated Annealing (SA) and reports the resuts + # Inputs: + # surrogate = tuned surrogate model + # dim = dimension of the problem + # max_spargers = maximum number of spargers allowed + # max_iters = maximum number of iterations for SA + # temp = parameter of Simulated Annealing (can be changes for tuned for other problems) + # = max temperature. It controls the exploration rate of SA + # alpha = parameter of Simulated Annealing (can be changes for tuned for other problems) + # = cooling rate. It determines how quickly temp decays. + + # Outputs: + # x_best = optimal solution + # y_best = optimal objective function value + # trace = optimization log. Updates at the end of each iteration. + # trace_x = tracks how X changes during each iteration. + + def is_valid(x): + # Checks if the number of spargers <= max_spargers + return np.sum(x== 1) <= max_spargers + + while True: + # generate random starting point + x_curr = np.random.randint(0,3,size=dim) + if is_valid(x_curr): + break + + y_curr = surrogate.predict(x_curr) + x_best, y_best = x_curr.copy(), y_curr + + trace = [(0,y_best)] + + for i in range(max_iters): + # Optimization loop + + x_new = x_curr.copy() + idx = random.randint(0,dim-1) + # perturb a random dimension to get new x + x_new[idx] = (x_new[idx] + random.choice([-1,1])) % 3 + + if not is_valid(x_new): + trace.append((i,y_best)) + temp *= alpha + continue + + y_new = surrogate.predict(x_new) + + delta = y_new - y_curr + if delta < 0 or np.random.rand() < np.exp(-delta / temp): + x_curr, y_curr = x_new, y_new + if y_curr < y_best: + x_best, y_best = x_new.copy(), y_new + + trace.append((i,y_best)) + temp *= alpha + + return x_best, y_best, trace + +def run_optimization(X,y,model_type = 'rbf',max_spargers = 8,n_runs=10,max_iters = 1000, bootstrap_size=100): + # Bootstraps data, runs optimization and postprocesses the resutls. + # Inputs: + # X,y = the raw data + # model_type = 'rbf' for RBFInterpolator + # = 'rf' for Random Forest + # = 'nn' for Neural Network + # n_runs = number of bootstraps + # max_iters = maximum number of SA iterations + # bootstrap_size = size of bootstrap samples + + all_x = [] + all_y = [] + all_traces = np.zeros((n_runs, max_iters + 1)) + rng = np.random.default_rng(42) + # Bootstrap the data + bootstrap_idxs = [rng.choice(len(X), size = bootstrap_size, replace = False) for _ in range(n_runs)] + + i = 0 + for idxs in bootstrap_idxs: + # Tune the model for each bootstrap + X_sub, y_sub = X[idxs], y[idxs] + if model_type == "rbf": + params = tune_rbf(X_sub,y_sub) + elif model_type == "rf": + params = tune_rf(X_sub,y_sub) + elif model_type == "nn": + encoder = OneHotEncoder(sparse_output=False) + X_encoded = encoder.fit_transform(X_sub) + params = tune_nn(X_encoded,y_sub) + else: + raise ValueError("Invalid model_type") + + # build the surrogate model + surrogate = Surrogatewrapper(model_type,X_sub,y_sub,params) + + # run optimization + x_best, y_best, trace = simulated_annealing_surrogate(surrogate, dim=X.shape[1], max_iters=max_iters, max_spargers=max_spargers) + + trace_y = [y for _, y in trace] + all_traces[i,:] = trace_y + all_x.append(x_best) + all_y.append(y_best) + i = i + 1 + + # Compute the best y across all the bootstraps + best_index = np.argmin(all_y) + best_x = all_x[best_index] + best_y = all_y[best_index] + # Save the best solution + df = pd.DataFrame([{ + **{f"x{i}": best_x[i] for i in range(len(best_x))}, + "best_y": best_y + }]) + df.to_csv(f"best_bootstrap_solution_{model_type}_size_{bootstrap_size}_max_spargers_{max_spargers}.csv", index=False) + + print("X = ", x_best) + print("surrogate-predicted y",y_best) + + # Make the mean-CI plot for the objective function and save it + mean_trace = np.mean(-1*all_traces,axis=0) + std_trace = np.std(all_traces,axis=0) + lower_bound = mean_trace - 1.96* std_trace / np.sqrt(n_runs) + upper_bound = mean_trace + 1.96* std_trace / np.sqrt(n_runs) + iterations = np.arange(max_iters + 1) + + plt.figure(figsize=(8,4)) + plt.plot(iterations,mean_trace,label=f"Mean Convergence {model_type.upper()}", color = "blue") + plt.fill_between(iterations, lower_bound, upper_bound, color = "blue", alpha = 0.3, label = "95% CI") + plt.xlabel("Iteration") + plt.ylabel("Best Surrogate-Predicted Objective") + plt.title(f"Mean Convergence with 95% Confidence Interval ({model_type.upper()})") + plt.grid(True) + plt.legend() + plt.savefig(f"Mean_Convergence_plot_{model_type}_size_{bootstrap_size}_max_spargers_{max_spargers}.png",dpi=300) + plt.show() + + + +# Read data from the csv file. +X_raw_data = pd.read_csv('Xdata_study_scaleup_0_4vvm_3000W.csv') +y_raw_data = pd.read_csv('ydata_study_scaleup_0_4vvm_3000W.csv') + +X = X_raw_data.values +y = y_raw_data.iloc[:,:-1].values +y = y*-1 + +# The function will build, tune, and optimize the surrogate model and postprocess the results. +run_optimization(X,y,model_type="rbf",max_spargers = 8, n_runs = 10,max_iters = 1000, bootstrap_size=100) \ No newline at end of file diff --git a/bird/postprocess/SA_optimization/results.pkl b/bird/postprocess/SA_optimization/results.pkl new file mode 100644 index 0000000000000000000000000000000000000000..10aa5fe88138e206188453a8ec77f9050aaa9765 GIT binary patch literal 5208 zcmXxoc{r3?7yxi7l`Pj*n6VFIhA4X_OR{#-DwRv65|Kn5NpwZlC`7vICav00NwnBv z$}*POZmC;WN=3JgwCLXNeD@uHcplHs`<`>=obP?lH%W4Zz8W(0PhtTF?W2BWkB^Sp zT;S!ZcA&_wr~h#FBZ_V(z`xmiCo{SPO78KYBRoJAAt?tPCUCbo)y*8J@)M#f+U3c&`}(LuzzipoAMEE&M)G#HD+ zjJBJ+incMaqxm})Mp|Hc2`YjCW7;|cKj!D48yHoCF~bY`Udhnb9`V^>jG2Xz0vLNi z*0hEp7Rf0Hl$T>)Qo#sGOv4 z80VMMRZMRVF|qylp)&o282N!2%0#`2OQF}-sTKH@m)?lcA5u$5>6?uQ6(p+O*k?+?2_*J=p*1>*pUB8G-Ny$|ojdo}^4-LGN^Qf=o{7Pz28`3p z8VvgP7%^dvaF=@VN{l?gEZ^F!@;dPkvEGu3TEjEk#$%FV>+ zJQ!o7!Y+duNvu`{{LMwMzYN&zzu``60iqX{udh)y@R zYY|3g!A#}Xw$4^z&~b&L+R|)yjBZP9Qj@Kgz7lPwziSw1e8I>9OsD@|QKvb5&N{pe zoQDoZVx$3PaxAsm`qeKaPSD5sQ8hUkqed|9lRX;0uAtX0u3Zv-FUTCDKxrKB=d;pV z$|SDmXI}B}O)6&$7>BjyYxLNCV&c>V>AQAP=Vd#X_Ry!QdFKuiVRT!0knWy*c3Xi-)Op{LqWF3!fN7|i6WRr&h?`q>oJW&Yz7a}pyTFjkS_^!SEnp4yfGW6moJx`y8&rQ#2Fp8AgzQiHbdtt=72Ieq+Q=)2trPPAi0n4g4 z&~Y99dUUbOX$MAbeE$Ts1L?>bUsD0Z%cK1vC9I zZ8XfpnwZEX@8BTR9r!nib*w&9nxP?w(GoDD9CMY!p8wanSqz_C1&mRU zG>)lx(Cu+IiR&x~ZuUr$!Dts4Yx}k9vPEZz^)R(O7tf$R>u4~hqe)0!`buIC9qNx> z^tg`ETrfuJhtMq*^N6+9`B=P;^}(nLOl96q=UtgIh~0i1f^JOe#wZtzy?ws@`OOky zB1W4)+4LYrzk#vNSuLpdrk^8$-__MAYpAoN3#L_iGx);(1QORW5HZYq%)rP1OmoWw zcH&$ABYVE6b{jpOr`J1PGBwW}FjiWXR<%BTJ_TQ7a%>B=F-ik7{35K&3fxTM zI#xaCo&0?(M&@Ab4-1}Wzw{;6vu{J@@`iI5se+lzd6!z_Ii0pQ8LrVAsLxsk%xq6b zmGweLVq%S1!ebn&&W{5#X4@~=(>g-OjfxvqnU`X;3QW&t0>4JEh*+!0<9=lLPmI=s znJ-(Cuut_dF;PsDpFb}bqi`^zeZ@c9gXlf#QSFScEv3$UveYi`JR8ALpvOBiwB(uE z4vZqejQtG*Om4{&>;7#hY)MuFMgd@!0_E6wHFRI;4VirKaW*y2O<>v?^6u-0J4hUl zyUjgGl82E47-`xEXqdYJZ|K)}Cs&t%y zHG9^&KdD+UMQR@{=QETp(er${p}+8CJ4Q3X7#{=)1qF1SX#FtISno~shs|Kxp1gD= zNiT_esO1=|IC2N0iD0Z7kIJ2WY>0LG#&DC*PQXYW%yh3tYk80rG2z~SE9~=ZG4cn~ zl_~f~DD#n6o6Re;2XiYi`c)dIp74p`mqSdDywr2E-WrT{fEf*Uz8?HJO02tK-eGcU-ISjt4JBE=9n9++@g_>zE=(xcp>>DC4jP8Q5=fvGV z^(Kj!Fnut$=J9Ndwn=lUZR_0as^;)8at1SAzhrHSQy-l(yEufqzyhP)U}mNriNj*L-%9%bGJaY{ zo!vxfoR#CLnjdss>ZxDhJincfQ6-qs)#qZjdb)>q7bdTXnB;yI> z&m1hRn1#_oFwT^mrCo!y#3XiQ@!D^bFe(DmDSu+Q-ETZGz6npkSDCsmEe6x`7dWw8 z8i{pk4llcEL+$NyFr!5VqRw$<#JY?JM!9G&M)lJ1G}A)7OX(gUJY+VbyIBzwS2=4lzObYVWTPsAuJ-G^hRh*V-%oCf0o^ zF6+`ms!p5(({biLy13{8F@BD0cw5>iMpj_VX89d6AB+)e|FTZ(c1<56J23rE)&Vt> zGKh8jYV&#c&VG!hf-yQ(mR8nXC)RqwcewL-HAd=ShTr(nwqJ{g^-THHnv|o2(K@iH zHaV)5GhB%^=Bh8dysR4|Q!u^h(G|C9nuzh!oV<5FqUus180$pT)`b?Bm_Rqipjw~m zW7ENmJ`~0;RVt%xfm@lHc@ai(VABt5uCH2il-MJV`{~GX>dwHC<_zF*6>R9<-#s&x zC#mnkC=^V$`R>NJ>GYZJT)Wi1@1p=CZ7|iWH22ld=^kG9t>K@O;naJ8H<&?8lI@-l z`gv%Jy|R%}XNA!jFrzNb*OId_WG>w$B`rOl_h2+0%rs~Ftz%>K9ZVd-2yEfK!-x%L zIc>@D%Jx`#4VOjBdlk|!+5pCQbYfECfCI6Io=my@%fDlE2Ta!~;#6D3WMXYml2M5_ z^`0{yjG6t5^Xs>C?P=9+egE#%A&la{R8L)-v#OC^-`#jY^2UB@A6&uo^1MeXU5}BR z?fF?H+8WgRLn@fTmv#3__G~1^{}wv7;u}?$Jis_nqHCJ6nZ!gBEj6E2q+m1y%*-L< z%HKzeiAgTjO+1rtgwa}QTybEkrA7!HR~g!zKT4gKWnfHQX85u1|KC5SZ3=Y}QqTJZ zF!rMIzD-8-_Y6Yk$Dz96R1eq-#;tVxu!bR~$II_M-^ZMZksFxa(3a*M8BWCbKNBOw zAzB#agPFGkWJQ!~5_{CcZ?QF|p6M(w4c#$~#k@RXO;gh1(=SpzV5M}t(ya229rQU9 zBrlHbIZ54LFqolGZp-kv|IgWxP1R~ArebswOz+5`Q@O`-dM+;&JF>^$#b}l^XLa`a zn2S}!#6owqh#&qK*?=*^9IZl}=>2O+u`UW0wqkS@%=pmh&wr1n_pc`;v|^5j3r6?A znAhIcv?kk=oE?p|-#6@|-eH+wrUirPu5;U zsZrm}906m`nEBmBfj)!66`MtWL{hcjD421uebi7z6B)0|zpTpO-DHf8gJ~>&x%`0o zWnxYC&s7+M)ZXfV>GaElzWo?ZtkqxslM3e(M!{f){pFn@WjxOyYHvfpO!O9(-qx-pCh$C7lA^gCqcAX| zmAh4BAJ9Fg+i!7Z>Qw69lL5vt8(kCrmhR!g{N0MqCJ!;P2Q!?a{iry>o8;uX-g)M_ bmx?O~)77r4$+|=Lqt2Z=yn#3t#+Ls9#_kjd literal 0 HcmV?d00001 diff --git a/bird/postprocess/SA_optimization/ydata_study_scaleup_0_4vvm_3000W.csv b/bird/postprocess/SA_optimization/ydata_study_scaleup_0_4vvm_3000W.csv new file mode 100644 index 00000000..e224b58a --- /dev/null +++ b/bird/postprocess/SA_optimization/ydata_study_scaleup_0_4vvm_3000W.csv @@ -0,0 +1,185 @@ +4.639249158318668,0.12626038361898534 +4.830205075462752,0.1345759778255486 +11.883263228093135,0.3264360683677917 +13.920765170099399,0.4105486654309187 +6.542159679067755,0.2034214300932224 +12.152375869450877,0.36239501906304267 +8.022225385816165,0.2748815895051835 +9.57833436748858,0.28111820963360595 +7.621157957714864,0.25567306815826935 +6.854083596204951,0.2298569939977962 +11.145584784788815,0.31811738493951314 +11.017594168736228,0.3184159068790033 +13.50973761231716,0.3752779584066442 +12.030553522901826,0.3626718350283127 +8.500853761159016,0.2514870108881235 +8.743216029351826,0.2670107363696459 +11.717721199440591,0.3128707825208673 +11.345565509701485,0.353472952974596 +8.988391479133291,0.2580096856927115 +8.543409441773761,0.262301422633598 +6.560127864354076,0.20837078993418331 +6.4149526976824855,0.19347669879343268 +13.741028936629446,0.37516250222124864 +10.260225187485267,0.30671415538616664 +12.38699282309954,0.33678185294994506 +15.459376105064136,0.447246924226177 +9.09803267916285,0.304227995563132 +6.740910374100037,0.21154083742443155 +13.191334348313264,0.34796851870506956 +13.70507196126807,0.38117142223133704 +11.518308973994909,0.35289543460220707 +11.515168788856977,0.3461668678795173 +7.224363681870076,0.23657610962538503 +10.805708473886197,0.3263647525674359 +12.932602535390258,0.370807747121449 +10.892569560758172,0.31241264469905766 +12.069083972134544,0.36751044492654417 +6.4823749997684805,0.19262760715138516 +11.231649637811778,0.3277463243939802 +4.270684191558357,0.10987581442252059 +13.76528529571703,0.38285304273583004 +10.152583557069674,0.3408235347109574 +10.986707238586106,0.324251636154978 +9.79517497061306,0.3029635961468728 +10.419543443002388,0.3170020475695413 +10.258773239532914,0.27979900920419293 +10.189745733831108,0.3250937746551829 +6.514063372968979,0.2268487917845758 +13.953941108502349,0.37603794998487483 +11.992362749779748,0.34762137147830285 +7.882404941588837,0.23711080920489294 +9.275719183296298,0.26904923614363635 +7.776457060294847,0.25905651438188215 +9.725980401340914,0.2985287367453095 +6.807759844786992,0.2048265466788003 +6.630803695687349,0.21680559865756013 +4.063931442420922,0.11272271250075441 +11.225122173481324,0.3453857489445164 +9.82654794320513,0.31766875974197384 +6.157926107439263,0.18678044797135201 +5.855484586386016,0.18450597017794995 +9.18587411892937,0.2978687770910763 +9.88460112835837,0.2705988868749546 +6.841601401913933,0.2050600340598003 +12.79640726757815,0.3623376347189863 +12.83899374588669,0.3975083213578766 +8.457051563045496,0.26991461104084247 +9.763108036495394,0.3275758151015267 +8.984052674188392,0.23708461040402135 +11.471274422120723,0.3049553719602422 +10.615101031292475,0.311189711506409 +13.247670835151544,0.36060925350524264 +10.074632957437625,0.31206315974184845 +7.549823041704327,0.21747657909403267 +13.660770312425418,0.3507548164008165 +9.258995020455252,0.29796230302465954 +9.338803482609384,0.3070023813054725 +10.168223206620057,0.289058067197555 +15.414172835849321,0.4436665921077268 +6.382847851314592,0.18876460404289166 +12.541321134236878,0.3777538153682838 +14.625672661663916,0.38802297600654273 +9.2131453370491,0.2672227550883449 +6.020912999143897,0.18351282428727747 +9.594482823382547,0.3548268182647394 +9.434406063121779,0.32561538569508414 +11.27709117666853,0.3170389795034779 +11.85440229800048,0.3713104751517777 +8.143103526931212,0.24373125334484186 +5.987413503473131,0.19209801113080932 +10.66860645276813,0.3146498728341653 +10.973009201937053,0.326561903194073 +6.736162134712763,0.21831998403883163 +6.319124326915894,0.20386451019469645 +9.85518408546759,0.2547269254087896 +11.752111783293545,0.3360594969365309 +7.2505681489712845,0.22033395375408343 +11.538062664951463,0.32045394779240677 +9.480109510593579,0.2931065455457869 +7.24841607883414,0.23832775051957591 +10.347675591461396,0.28753868506914715 +10.686337772821627,0.30684839551276677 +9.822044747899417,0.32661245475083844 +9.447079333421648,0.3112984064131052 +11.114202667483596,0.3364211476686481 +9.532358320060213,0.31236106613200026 +11.688464102351732,0.35487546667211 +6.532609784598158,0.22168781874321142 +10.192019448061677,0.3054567325499729 +13.077747284183287,0.38903025246203415 +10.096158138823926,0.30867510872414256 +9.343519904040358,0.290375320672992 +11.40697959771358,0.31308878923916295 +12.020609433055474,0.3782750313181893 +7.172718740262101,0.24334049751097286 +7.584794001842911,0.2514622981514563 +9.152594292536907,0.2678778098535802 +12.045947232357136,0.33710722608802324 +4.379843967798498,0.10762641890440137 +3.9571319937219607,0.113635341004926 +9.604472609306962,0.27473135213632965 +8.418838881573317,0.25553219416942385 +10.210954922063188,0.323263474717489 +9.607306751578054,0.29177678625562137 +7.060706536012455,0.2464907666663334 +6.943395438649984,0.23411188612717468 +7.987496774296126,0.23715829335149538 +6.703187252555086,0.21613478186303836 +8.989863908497496,0.29754347680337007 +5.3003428235059005,0.15283433472600066 +7.54340245910743,0.24628416098103528 +9.813350149916667,0.3142972297214279 +9.471843550104339,0.3170324937170163 +14.278150609504612,0.4087077718493328 +9.677308352105648,0.28421158422409315 +7.4440856645917135,0.25581770286934824 +4.385277812944313,0.13265263272190378 +8.22363437693505,0.2223718632499199 +9.408385570236735,0.29099268450210747 +11.77627353707695,0.36591204701029667 +15.043583831144797,0.4168601817074705 +6.7135010881993855,0.20526445170131055 +7.3099809259113115,0.22216331625583294 +6.8867769079468415,0.2117247511316945 +4.409863387744625,0.11906173881006474 +7.641879896764433,0.23103459137397314 +8.483160007686088,0.24982929105677146 +12.2184349027241,0.34412720042998773 +13.108464684640968,0.3854827942762161 +12.301487123370132,0.3546389998626981 +6.778424897986802,0.20380859414550845 +11.153693300454865,0.33196148938956604 +12.848113577592285,0.383963743119625 +7.7318602570176695,0.24995391622250263 +13.889362645764818,0.41917795613750286 +5.029285733575436,0.1565712068835105 +12.815511254920658,0.32045351604623495 +8.916057019146086,0.2593018338761806 +7.649338687785831,0.2492080696611349 +12.8604460492114,0.3723917179012849 +6.601788303188531,0.18369133369166799 +10.288367893781292,0.30306427295098 +6.925703924819166,0.2403483369335239 +11.790867067900965,0.32527202858137033 +12.810837863572981,0.36388377313805176 +11.092170573659601,0.333338222107799 +10.189868448033742,0.2709142473792956 +5.078027062937434,0.15792477880845202 +7.223634361053522,0.21356319778744356 +8.948019498562504,0.24191457543885958 +13.222659710790236,0.3750710095241255 +8.076671575057484,0.2209566383161624 +15.312111223230433,0.3871211907716363 +14.170973262775446,0.42197025792806736 +9.32461479951292,0.29381079716589753 +13.309929308155027,0.3625732862415955 +11.21961903186075,0.3243213919583699 +4.66559471266868,0.1327459953758173 +8.65681333512524,0.2849376420634901 +10.051156775736247,0.33207149084178794 +10.559178404056045,0.31681027963019587 +9.170860767688787,0.2863959162680961 +12.08392615370928,0.3370654696996921 +8.519728499754038,0.24416325998118774 +7.275149264066791,0.2572700464241981 From b74c87098aa2acbc6554e6e68f0e5f404e53f3bd Mon Sep 17 00:00:00 2001 From: Malik Date: Fri, 30 May 2025 10:07:05 -0600 Subject: [PATCH 03/38] fix format --- bird/postprocess/SA_optimization/README.md | 2 +- bird/postprocess/SA_optimization/get_csv.py | 23 +- .../SA_optimization/get_optimal.py | 324 +++++++++++------- .../get_optimal_with_constraint.py | 322 ++++++++++------- 4 files changed, 416 insertions(+), 255 deletions(-) diff --git a/bird/postprocess/SA_optimization/README.md b/bird/postprocess/SA_optimization/README.md index 37309e9b..3d79d9db 100644 --- a/bird/postprocess/SA_optimization/README.md +++ b/bird/postprocess/SA_optimization/README.md @@ -24,7 +24,7 @@ The SA optimizer operates on discrete feature values (0,1,2) with an option to r - max_spargers: maximum number of spargers (only in 'get_optimal_with_constraints.py') (default = 8) - n_runs: number of bootstrap runs (default = 10) - max_iters: maximum number of iterations of SA (default = 1000) - - bootstrap_size: sample size of each boostrap (default = 100) + - bootstrap_size: sample size of each bootstrap (default = 100) - For each bootstrap run, the model hyperparameters are tuned using 5-fold cross validation. - The simulated_annealing_surrogate(...) function runs the optimization: - If SA is too slow or fails to converge, you can change the following parameters: diff --git a/bird/postprocess/SA_optimization/get_csv.py b/bird/postprocess/SA_optimization/get_csv.py index cc76fc14..02dd1191 100644 --- a/bird/postprocess/SA_optimization/get_csv.py +++ b/bird/postprocess/SA_optimization/get_csv.py @@ -1,7 +1,9 @@ -import pickle5 as pkl +import csv import os + import numpy as np -import csv +import pickle5 as pkl + def get_config_result(config_fold): with open(os.path.join(config_fold, "results.pkl"), "rb") as f: @@ -11,28 +13,27 @@ def get_config_result(config_fold): Xdata = np.array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], np.int64) count = 0 # Save data into CSV files - xfname = "Xdata_"+config_fold+".csv" - yfname = "ydata_"+config_fold+".csv" - with open(xfname,'w',newline='') as csvfile: + xfname = "Xdata_" + config_fold + ".csv" + yfname = "ydata_" + config_fold + ".csv" + with open(xfname, "w", newline="") as csvfile: writer = csv.writer(csvfile) for sims in results: b0 = configs[sims][0] b1 = configs[sims][1] b2 = configs[sims][2] - raw_data = np.concatenate((b0,b1,b2), axis = None) + raw_data = np.concatenate((b0, b1, b2), axis=None) writer.writerow(raw_data) - with open(yfname,'w',newline='') as csvfile: + with open(yfname, "w", newline="") as csvfile: writer = csv.writer(csvfile) for sims in results: q0 = results[sims]["qoi"] q1 = results[sims]["qoi_err"] - y_data = np.concatenate((q0,q1), axis = None) + y_data = np.concatenate((q0, q1), axis=None) writer.writerow(y_data) -studies = { - "study_scaleup_0_4vvm_3000W" : r"0.0036$m^3$ 0.4vvm 0W" - } + +studies = {"study_scaleup_0_4vvm_3000W": r"0.0036$m^3$ 0.4vvm 0W"} for study in studies: diff --git a/bird/postprocess/SA_optimization/get_optimal.py b/bird/postprocess/SA_optimization/get_optimal.py index ca960fd8..43c86bbb 100644 --- a/bird/postprocess/SA_optimization/get_optimal.py +++ b/bird/postprocess/SA_optimization/get_optimal.py @@ -1,79 +1,109 @@ -import numpy as np +import random +import warnings + +import matplotlib.pyplot as plt +import numpy as np +import optuna import pandas as pd -import random -import optuna +from scipy.interpolate import RBFInterpolator from sklearn.ensemble import RandomForestRegressor -from sklearn.preprocessing import OneHotEncoder -from sklearn.model_selection import KFold, cross_val_score from sklearn.metrics import mean_squared_error -from scipy.interpolate import RBFInterpolator -from tensorflow.keras.models import Sequential -from tensorflow.keras.layers import Dense -import matplotlib.pyplot as plt -import warnings +from sklearn.model_selection import KFold, cross_val_score +from sklearn.preprocessing import OneHotEncoder +from tensorflow.keras.layers import Dense +from tensorflow.keras.models import Sequential warningsl.filterwarnings("ignore") -def tune_rbf(X,y): + +def tune_rbf(X, y): # Tune the RBFInterpolator # kernel - "multiquadric" (Can try other kernels) def objective(trial): - epsilon = trial.suggest_float("epsilon",0.1,10.0, log=False) + epsilon = trial.suggest_float("epsilon", 0.1, 10.0, log=False) try: - kf = KFold(n_splits=5,shuffle=True,random_state=42) - return np.mean([ - mean_squared_error( - y[test], - RBFInterpolator(X[train],y[train],epsilon=epsilon,kernel="multiquadric")(X[test]) - ) - for train, test in kf.split(X) - ]) + kf = KFold(n_splits=5, shuffle=True, random_state=42) + return np.mean( + [ + mean_squared_error( + y[test], + RBFInterpolator( + X[train], + y[train], + epsilon=epsilon, + kernel="multiquadric", + )(X[test]), + ) + for train, test in kf.split(X) + ] + ) except: return float("inf") + study = optuna.create_study(direction="minimize") - study.optimize(objective,n_trials=20) + study.optimize(objective, n_trials=20) return study.best_params -def tune_rf(X,y): + +def tune_rf(X, y): # Tune the Random Forest def objective(trial): model = RandomForestRegressor( - n_estimators=trial.suggest_int("n_estimators",50,200), - max_depth = trial.suggest_int("max_depth",3,10), - min_samples_leaf = trial.suggest_int("min_samples_leaf",1,10), - random_state = 42 + n_estimators=trial.suggest_int("n_estimators", 50, 200), + max_depth=trial.suggest_int("max_depth", 3, 10), + min_samples_leaf=trial.suggest_int("min_samples_leaf", 1, 10), + random_state=42, + ) + scores = cross_val_score( + model, X, y, cv=5, scoring="neg_mean_squared_error" ) - scores = cross_val_score(model, X, y, cv=5, scoring='neg_mean_squared_error') return -np.mean(scores) + study = optuna.create_study(direction="minimize") - study.optimize(objective,n_trials=20) + study.optimize(objective, n_trials=20) return study.best_params -def tune_nn(X_encoded,y): + +def tune_nn(X_encoded, y): # Tune the Neural Network def objective(trial): - units = trial.suggest_int("n_units",16,128) - layers = trial.suggest_int("n_layers",1,3) - kf = KFold(n_splits=5,shuffle=True,random_state=42) - mses=[] + units = trial.suggest_int("n_units", 16, 128) + layers = trial.suggest_int("n_layers", 1, 3) + kf = KFold(n_splits=5, shuffle=True, random_state=42) + mses = [] for train, test in kf.split(X_encoded): model = Sequential() - model.add(Dense(units, activation='relu', input_shape=(X_encoded.shape[1],))) - for _ in range(layers-1): - model.add(Dense(units,activation='relu')) + model.add( + Dense( + units, activation="relu", input_shape=(X_encoded.shape[1],) + ) + ) + for _ in range(layers - 1): + model.add(Dense(units, activation="relu")) model.add(Dense(1)) - model.compile(optimizer='adam',loss='mse') - model.fit(X_encoded[train],y[train],epochs=100,batch_size=16,verbose=0) - mses.append(mean_squared_error(y[test],model.predict(X_encoded[test]).flatten())) + model.compile(optimizer="adam", loss="mse") + model.fit( + X_encoded[train], + y[train], + epochs=100, + batch_size=16, + verbose=0, + ) + mses.append( + mean_squared_error( + y[test], model.predict(X_encoded[test]).flatten() + ) + ) return np.mean(mses) + study = optuna.create_study(direction="minimize") study.optimize(objective, n_trials=20) - return study.best_params + return study.best_params class Surrogatewrapper: # Wrapper that builds the surrogate model and predicts the QOI value for given X - def __init__(self,model_type,X,y,params): + def __init__(self, model_type, X, y, params): # Inputs: # model_type = 'rbf' for RBFInterpolator # = 'rf' for Random Forest @@ -81,51 +111,57 @@ def __init__(self,model_type,X,y,params): # X,y = the raw data # params = tune parameter of the surrogate model - self.model_type = model_type - self.encoder = None + self.model_type = model_type + self.encoder = None - if model_type == 'rbf': - self.model = RBFInterpolator(X,y,kernel='multiquadric',epsilon=params["epsilon"]) - elif model_type == 'rf': + if model_type == "rbf": + self.model = RBFInterpolator( + X, y, kernel="multiquadric", epsilon=params["epsilon"] + ) + elif model_type == "rf": self.model = RandomForestRegressor(**params, random_state=42) - self.model.fit(X,y) - elif model_type == 'nn': + self.model.fit(X, y) + elif model_type == "nn": self.encoder = OneHotEncoder(sparse_output=False) X_encoded = self.encoder.fit_transform(X) - self.model = self._build_nn(X_encoded.shape[1],params["n_units"],params["n_layers"]) - self.model.fit(X_encoded,y,epochs=100,batch_size=16,verbose=0) + self.model = self._build_nn( + X_encoded.shape[1], params["n_units"], params["n_layers"] + ) + self.model.fit(X_encoded, y, epochs=100, batch_size=16, verbose=0) - - def _build_nn(self,input_dim, units, layers): + def _build_nn(self, input_dim, units, layers): # Builds the neural network model = Sequential() - model.add(Dense(units, activation='relu',input_shape=(input_dim,))) - for _ in range(layers-1): - model.add(Dense(units,activation='relu')) + model.add(Dense(units, activation="relu", input_shape=(input_dim,))) + for _ in range(layers - 1): + model.add(Dense(units, activation="relu")) model.add(Dense(1)) - model.compile(optimizer='adam', loss='mse') - return model - - def predict(self,X): - X = X.reshape(1,-1) - if self.model_type == 'nn': + model.compile(optimizer="adam", loss="mse") + return model + + def predict(self, X): + X = X.reshape(1, -1) + if self.model_type == "nn": X = self.encoder.transform(X) return float(self.model.predict(X)[0]) - elif self.model_type == 'rf': + elif self.model_type == "rf": return float(self.model.predict(X)[0]) else: return float(self.model(X)[0]) - -def simulated_annealing_surrogate(surrogate,dim=12,max_iters=1000,temp=10.0,alpha=0.95): - # Runs the Simulated Annealing (SA) and reports the resuts + + +def simulated_annealing_surrogate( + surrogate, dim=12, max_iters=1000, temp=10.0, alpha=0.95 +): + # Runs the Simulated Annealing (SA) and reports the results # Inputs: - # surrogate = tuned surrogate model + # surrogate = tuned surrogate model # dim = dimension of the problem # max_iters = maximum number of iterations for SA # temp = parameter of Simulated Annealing (can be changes for tuned for other problems) # = max temperature. It controls the exploration rate of SA # alpha = parameter of Simulated Annealing (can be changes for tuned for other problems) - # = cooling rate. It determines how quickly temp decays. + # = cooling rate. It determines how quickly temp decays. # Outputs: # x_best = optimal solution @@ -134,37 +170,40 @@ def simulated_annealing_surrogate(surrogate,dim=12,max_iters=1000,temp=10.0,alph # trace_x = tracks how X changes during each iteration. # generate random starting point - x_curr = np.random.randint(0,3,size=dim) - + x_curr = np.random.randint(0, 3, size=dim) + y_curr = surrogate.predict(x_curr) - x_best, y_best = x_curr.copy(), y_curr + x_best, y_best = x_curr.copy(), y_curr - trace = [(0,y_best)] + trace = [(0, y_best)] trace_x = [x_curr.copy()] for i in range(max_iters): # Optimization loop - + x_new = x_curr.copy() - idx = random.randint(0,dim-1) + idx = random.randint(0, dim - 1) # perturb a random dimension to get new x - x_new[idx] = (x_new[idx] + random.choice([-1,1])) % 3 + x_new[idx] = (x_new[idx] + random.choice([-1, 1])) % 3 y_new = surrogate.predict(x_new) delta = y_new - y_curr if delta < 0 or np.random.rand() < np.exp(-delta / temp): - x_curr, y_curr = x_new, y_new + x_curr, y_curr = x_new, y_new if y_curr < y_best: - x_best, y_best = x_new.copy(), y_new - - trace.append((i,y_best)) + x_best, y_best = x_new.copy(), y_new + + trace.append((i, y_best)) trace_x.append(x_curr.copy()) - temp *= alpha - + temp *= alpha + return x_best, y_best, trace, trace_x -def run_optimization(X,y,model_type = 'rbf',n_runs=10,max_iters = 1000, bootstrap_size=100): - # Bootstraps data, runs optimization and postprocesses the resutls. + +def run_optimization( + X, y, model_type="rbf", n_runs=10, max_iters=1000, bootstrap_size=100 +): + # Bootstraps data, runs optimization and postprocesses the results. # Inputs: # X,y = the raw data # model_type = 'rbf' for RBFInterpolator @@ -180,31 +219,36 @@ def run_optimization(X,y,model_type = 'rbf',n_runs=10,max_iters = 1000, bootstra all_trace_x = [] rng = np.random.default_rng(42) # Bootstrap the data - bootstrap_idxs = [rng.choice(len(X), size = bootstrap_size, replace = False) for _ in range(n_runs)] + bootstrap_idxs = [ + rng.choice(len(X), size=bootstrap_size, replace=False) + for _ in range(n_runs) + ] i = 0 for idxs in bootstrap_idxs: # Tune the model for each bootstrap X_sub, y_sub = X[idxs], y[idxs] if model_type == "rbf": - params = tune_rbf(X_sub,y_sub) + params = tune_rbf(X_sub, y_sub) elif model_type == "rf": - params = tune_rf(X_sub,y_sub) + params = tune_rf(X_sub, y_sub) elif model_type == "nn": encoder = OneHotEncoder(sparse_output=False) X_encoded = encoder.fit_transform(X_sub) - params = tune_nn(X_encoded,y_sub) + params = tune_nn(X_encoded, y_sub) else: raise ValueError("Invalid model_type") # build the surrogate model - surrogate = Surrogatewrapper(model_type,X_sub,y_sub,params) + surrogate = Surrogatewrapper(model_type, X_sub, y_sub, params) # run optimization - x_best, y_best, trace, trace_x = simulated_annealing_surrogate(surrogate, dim=X.shape[1], max_iters=max_iters) - + x_best, y_best, trace, trace_x = simulated_annealing_surrogate( + surrogate, dim=X.shape[1], max_iters=max_iters + ) + trace_y = [y for _, y in trace] - all_traces[i,:] = trace_y + all_traces[i, :] = trace_y all_x.append(x_best) all_y.append(y_best) all_trace_x.append(trace_x) @@ -214,69 +258,101 @@ def run_optimization(X,y,model_type = 'rbf',n_runs=10,max_iters = 1000, bootstra best_index = np.argmin(all_y) best_x = all_x[best_index] best_y = all_y[best_index] - # Save the best solution - df = pd.DataFrame([{ - **{f"x{i}": best_x[i] for i in range(len(best_x))}, - "best_y": best_y - }]) - df.to_csv(f"best_bootstrap_solution_{model_type}_size_{bootstrap_size}.csv", index=False) - + # Save the best solution + df = pd.DataFrame( + [ + { + **{f"x{i}": best_x[i] for i in range(len(best_x))}, + "best_y": best_y, + } + ] + ) + df.to_csv( + f"best_bootstrap_solution_{model_type}_size_{bootstrap_size}.csv", + index=False, + ) # Make the mean-CI plot for the objective function and save it - mean_trace = np.mean(-1*all_traces,axis=0) - std_trace = np.std(all_traces,axis=0) - lower_bound = mean_trace - 1.96* std_trace / np.sqrt(n_runs) - upper_bound = mean_trace + 1.96* std_trace / np.sqrt(n_runs) + mean_trace = np.mean(-1 * all_traces, axis=0) + std_trace = np.std(all_traces, axis=0) + lower_bound = mean_trace - 1.96 * std_trace / np.sqrt(n_runs) + upper_bound = mean_trace + 1.96 * std_trace / np.sqrt(n_runs) iterations = np.arange(max_iters + 1) - plt.figure(figsize=(8,4)) - plt.plot(iterations,mean_trace,label=f"Mean Convergence {model_type.upper()}", color = "blue") - plt.fill_between(iterations, lower_bound, upper_bound, color = "blue", alpha = 0.3, label = "95% CI") + plt.figure(figsize=(8, 4)) + plt.plot( + iterations, + mean_trace, + label=f"Mean Convergence {model_type.upper()}", + color="blue", + ) + plt.fill_between( + iterations, + lower_bound, + upper_bound, + color="blue", + alpha=0.3, + label="95% CI", + ) plt.xlabel("Iteration") plt.ylabel("Best Surrogate-Predicted Objective") - plt.title(f"Mean Convergence with 95% Confidence Interval ({model_type.upper()})") + plt.title( + f"Mean Convergence with 95% Confidence Interval ({model_type.upper()})" + ) plt.grid(True) - plt.legend() - plt.savefig(f"Mean_Convergence_plot_{model_type}_size_{bootstrap_size}.png",dpi=300) + plt.legend() + plt.savefig( + f"Mean_Convergence_plot_{model_type}_size_{bootstrap_size}.png", + dpi=300, + ) plt.show() # Compute the distance of intermediate x from the optimal solution - optimal_x = np.ones(X.shape[1],dtype=int) + optimal_x = np.ones(X.shape[1], dtype=int) l1_traces = [] for t in all_trace_x: distances = [np.sum(np.abs(np.array(x) - optimal_x)) for x in t] l1_traces.append(distances) - - ''' compute and make the mean-CI plot for the distance between the intermdeiate - solution and the optimal solution ''' + + """ compute and make the mean-CI plot for the distance between the intermdeiate + solution and the optimal solution """ l1_traces = np.array(l1_traces) - mean_l1 = np.mean(l1_traces,axis=0) - std_l1 = np.std(l1_traces,axis=0) + mean_l1 = np.mean(l1_traces, axis=0) + std_l1 = np.std(l1_traces, axis=0) lower = mean_l1 - 1.96 * std_l1 / np.sqrt(len(trace_x)) upper = mean_l1 + 1.96 * std_l1 / np.sqrt(len(trace_x)) iterations = np.arange(len(mean_l1)) - plt.figure(figsize=(8,4)) - plt.plot(iterations,mean_l1,label="Mean L1 Distance from Optimal",color = "darkred") - plt.fill_between(iterations,lower,upper,alpha=0.3,color="darkred", label="95% CI") + plt.figure(figsize=(8, 4)) + plt.plot( + iterations, + mean_l1, + label="Mean L1 Distance from Optimal", + color="darkred", + ) + plt.fill_between( + iterations, lower, upper, alpha=0.3, color="darkred", label="95% CI" + ) plt.xlabel("Iteration") plt.ylabel("L1 Distance from Optimal") plt.title("Convergence Toward Optimal Solution (L1 Norm)") plt.grid(True) plt.legend() plt.tight_layout() - plt.savefig(f"Mean_L1_distance_{model_type}_size_{bootstrap_size}.png",dpi=300) + plt.savefig( + f"Mean_L1_distance_{model_type}_size_{bootstrap_size}.png", dpi=300 + ) plt.show() - - # Read data from the csv file. -X_raw_data = pd.read_csv('Xdata_study_scaleup_0_4vvm_3000W.csv') -y_raw_data = pd.read_csv('ydata_study_scaleup_0_4vvm_3000W.csv') +X_raw_data = pd.read_csv("Xdata_study_scaleup_0_4vvm_3000W.csv") +y_raw_data = pd.read_csv("ydata_study_scaleup_0_4vvm_3000W.csv") X = X_raw_data.values -y = y_raw_data.iloc[:,:-1].values -y = y*-1 +y = y_raw_data.iloc[:, :-1].values +y = y * -1 # The function will build, tune, and optimize the surrogate model and postprocess the results. -run_optimization(X,y,model_type="rbf",n_runs = 10,max_iters = 1000, bootstrap_size=150) \ No newline at end of file +run_optimization( + X, y, model_type="rbf", n_runs=10, max_iters=1000, bootstrap_size=150 +) diff --git a/bird/postprocess/SA_optimization/get_optimal_with_constraint.py b/bird/postprocess/SA_optimization/get_optimal_with_constraint.py index 03279036..b3371e41 100644 --- a/bird/postprocess/SA_optimization/get_optimal_with_constraint.py +++ b/bird/postprocess/SA_optimization/get_optimal_with_constraint.py @@ -1,129 +1,165 @@ -import numpy as np +import random + +import matplotlib.pyplot as plt +import numpy as np +import optuna import pandas as pd -import random -import optuna +from scipy.interpolate import RBFInterpolator from sklearn.ensemble import RandomForestRegressor -from sklearn.preprocessing import OneHotEncoder -from sklearn.model_selection import KFold, cross_val_score from sklearn.metrics import mean_squared_error -from scipy.interpolate import RBFInterpolator -from tensorflow.keras.models import Sequential -from tensorflow.keras.layers import Dense -import matplotlib.pyplot as plt +from sklearn.model_selection import KFold, cross_val_score +from sklearn.preprocessing import OneHotEncoder +from tensorflow.keras.layers import Dense +from tensorflow.keras.models import Sequential -def tune_rbf(X,y): + +def tune_rbf(X, y): def objective(trial): # Tune the RBFInterpolator # kernel - "multiquadric" (Can try other kernels) - epsilon = trial.suggest_float("epsilon",0.1,10.0, log=False) + epsilon = trial.suggest_float("epsilon", 0.1, 10.0, log=False) try: - kf = KFold(n_splits=5,shuffle=True,random_state=42) - return np.mean([ - mean_squared_error( - y[test], - RBFInterpolator(X[train],y[train],epsilon=epsilon,kernel="multiquadric")(X[test]) - ) - for train, test in kf.split(X) - ]) + kf = KFold(n_splits=5, shuffle=True, random_state=42) + return np.mean( + [ + mean_squared_error( + y[test], + RBFInterpolator( + X[train], + y[train], + epsilon=epsilon, + kernel="multiquadric", + )(X[test]), + ) + for train, test in kf.split(X) + ] + ) except: return float("inf") + study = optuna.create_study(direction="minimize") - study.optimize(objective,n_trials=20) + study.optimize(objective, n_trials=20) return study.best_params -def tune_rf(X,y): + +def tune_rf(X, y): # Tune the Random Forest def objective(trial): model = RandomForestRegressor( - n_estimators=trial.suggest_int("n_estimators",50,200), - max_depth = trial.suggest_int("max_depth",3,10), - min_samples_leaf = trial.suggest_int("min_samples_leaf",1,10), - random_state = 42 + n_estimators=trial.suggest_int("n_estimators", 50, 200), + max_depth=trial.suggest_int("max_depth", 3, 10), + min_samples_leaf=trial.suggest_int("min_samples_leaf", 1, 10), + random_state=42, + ) + scores = cross_val_score( + model, X, y, cv=5, scoring="neg_mean_squared_error" ) - scores = cross_val_score(model, X, y, cv=5, scoring='neg_mean_squared_error') return -np.mean(scores) + study = optuna.create_study(direction="minimize") - study.optimize(objective,n_trials=20) + study.optimize(objective, n_trials=20) return study.best_params -def tune_nn(X_encoded,y): + +def tune_nn(X_encoded, y): # Tune the Neural Network def objective(trial): - units = trial.suggest_int("n_units",16,128) - layers = trial.suggest_int("n_layers",1,3) - kf = KFold(n_splits=5,shuffle=True,random_state=42) - mses=[] + units = trial.suggest_int("n_units", 16, 128) + layers = trial.suggest_int("n_layers", 1, 3) + kf = KFold(n_splits=5, shuffle=True, random_state=42) + mses = [] for train, test in kf.split(X_encoded): model = Sequential() - model.add(Dense(units, activation='relu', input_shape=(X_encoded.shape[1],))) - for _ in range(layers-1): - model.add(Dense(units,activation='relu')) + model.add( + Dense( + units, activation="relu", input_shape=(X_encoded.shape[1],) + ) + ) + for _ in range(layers - 1): + model.add(Dense(units, activation="relu")) model.add(Dense(1)) - model.compile(optimizer='adam',loss='mse') - model.fit(X_encoded[train],y[train],epochs=100,batch_size=16,verbose=0) - mses.append(mean_squared_error(y[test],model.predict(X_encoded[test]).flatten())) + model.compile(optimizer="adam", loss="mse") + model.fit( + X_encoded[train], + y[train], + epochs=100, + batch_size=16, + verbose=0, + ) + mses.append( + mean_squared_error( + y[test], model.predict(X_encoded[test]).flatten() + ) + ) return np.mean(mses) + study = optuna.create_study(direction="minimize") study.optimize(objective, n_trials=20) - return study.best_params + return study.best_params class Surrogatewrapper: # Wrapper that builds the surrogate model and predicts the QOI value for given X - def __init__(self,model_type,X,y,params): + def __init__(self, model_type, X, y, params): # Inputs: # model_type = 'rbf' for RBFInterpolator # = 'rf' for Random Forest # = 'nn' for Neural Network # X,y = the raw data # params = tune parameter of the surrogate model - - self.model_type = model_type - self.encoder = None - if model_type == 'rbf': - self.model = RBFInterpolator(X,y,kernel='multiquadric',epsilon=params["epsilon"]) - elif model_type == 'rf': + self.model_type = model_type + self.encoder = None + + if model_type == "rbf": + self.model = RBFInterpolator( + X, y, kernel="multiquadric", epsilon=params["epsilon"] + ) + elif model_type == "rf": self.model = RandomForestRegressor(**params, random_state=42) - self.model.fit(X,y) - elif model_type == 'nn': + self.model.fit(X, y) + elif model_type == "nn": self.encoder = OneHotEncoder(sparse_output=False) X_encoded = self.encoder.fit_transform(X) - self.model = self._build_nn(X_encoded.shape[1],params["n_units"],params["n_layers"]) - self.model.fit(X_encoded,y,epochs=100,batch_size=16,verbose=0) + self.model = self._build_nn( + X_encoded.shape[1], params["n_units"], params["n_layers"] + ) + self.model.fit(X_encoded, y, epochs=100, batch_size=16, verbose=0) - - def _build_nn(self,input_dim, units, layers): + def _build_nn(self, input_dim, units, layers): # Builds the neural network model = Sequential() - model.add(Dense(units, activation='relu',input_shape=(input_dim,))) - for _ in range(layers-1): - model.add(Dense(units,activation='relu')) + model.add(Dense(units, activation="relu", input_shape=(input_dim,))) + for _ in range(layers - 1): + model.add(Dense(units, activation="relu")) model.add(Dense(1)) - model.compile(optimizer='adam', loss='mse') - return model - - def predict(self,X): - X = X.reshape(1,-1) - if self.model_type == 'nn': + model.compile(optimizer="adam", loss="mse") + return model + + def predict(self, X): + X = X.reshape(1, -1) + if self.model_type == "nn": X = self.encoder.transform(X) return float(self.model.predict(X)[0]) - elif self.model_type == 'rf': + elif self.model_type == "rf": return float(self.model.predict(X)[0]) else: return float(self.model(X)[0]) - -def simulated_annealing_surrogate(surrogate,dim=12,max_iters=1000,max_spargers=8,temp=10.0,alpha=0.95): - # Runs the Simulated Annealing (SA) and reports the resuts + + +def simulated_annealing_surrogate( + surrogate, dim=12, max_iters=1000, max_spargers=8, temp=10.0, alpha=0.95 +): + # Runs the Simulated Annealing (SA) and reports the results # Inputs: - # surrogate = tuned surrogate model + # surrogate = tuned surrogate model # dim = dimension of the problem # max_spargers = maximum number of spargers allowed # max_iters = maximum number of iterations for SA # temp = parameter of Simulated Annealing (can be changes for tuned for other problems) # = max temperature. It controls the exploration rate of SA # alpha = parameter of Simulated Annealing (can be changes for tuned for other problems) - # = cooling rate. It determines how quickly temp decays. + # = cooling rate. It determines how quickly temp decays. # Outputs: # x_best = optimal solution @@ -133,47 +169,56 @@ def simulated_annealing_surrogate(surrogate,dim=12,max_iters=1000,max_spargers=8 def is_valid(x): # Checks if the number of spargers <= max_spargers - return np.sum(x== 1) <= max_spargers - + return np.sum(x == 1) <= max_spargers + while True: # generate random starting point - x_curr = np.random.randint(0,3,size=dim) + x_curr = np.random.randint(0, 3, size=dim) if is_valid(x_curr): break y_curr = surrogate.predict(x_curr) - x_best, y_best = x_curr.copy(), y_curr + x_best, y_best = x_curr.copy(), y_curr - trace = [(0,y_best)] + trace = [(0, y_best)] for i in range(max_iters): # Optimization loop - + x_new = x_curr.copy() - idx = random.randint(0,dim-1) + idx = random.randint(0, dim - 1) # perturb a random dimension to get new x - x_new[idx] = (x_new[idx] + random.choice([-1,1])) % 3 - + x_new[idx] = (x_new[idx] + random.choice([-1, 1])) % 3 + if not is_valid(x_new): - trace.append((i,y_best)) + trace.append((i, y_best)) temp *= alpha continue - + y_new = surrogate.predict(x_new) delta = y_new - y_curr if delta < 0 or np.random.rand() < np.exp(-delta / temp): - x_curr, y_curr = x_new, y_new + x_curr, y_curr = x_new, y_new if y_curr < y_best: - x_best, y_best = x_new.copy(), y_new - - trace.append((i,y_best)) - temp *= alpha - + x_best, y_best = x_new.copy(), y_new + + trace.append((i, y_best)) + temp *= alpha + return x_best, y_best, trace -def run_optimization(X,y,model_type = 'rbf',max_spargers = 8,n_runs=10,max_iters = 1000, bootstrap_size=100): - # Bootstraps data, runs optimization and postprocesses the resutls. + +def run_optimization( + X, + y, + model_type="rbf", + max_spargers=8, + n_runs=10, + max_iters=1000, + bootstrap_size=100, +): + # Bootstraps data, runs optimization and postprocesses the results. # Inputs: # X,y = the raw data # model_type = 'rbf' for RBFInterpolator @@ -185,34 +230,42 @@ def run_optimization(X,y,model_type = 'rbf',max_spargers = 8,n_runs=10,max_iters all_x = [] all_y = [] - all_traces = np.zeros((n_runs, max_iters + 1)) + all_traces = np.zeros((n_runs, max_iters + 1)) rng = np.random.default_rng(42) # Bootstrap the data - bootstrap_idxs = [rng.choice(len(X), size = bootstrap_size, replace = False) for _ in range(n_runs)] - + bootstrap_idxs = [ + rng.choice(len(X), size=bootstrap_size, replace=False) + for _ in range(n_runs) + ] + i = 0 for idxs in bootstrap_idxs: # Tune the model for each bootstrap X_sub, y_sub = X[idxs], y[idxs] if model_type == "rbf": - params = tune_rbf(X_sub,y_sub) + params = tune_rbf(X_sub, y_sub) elif model_type == "rf": - params = tune_rf(X_sub,y_sub) + params = tune_rf(X_sub, y_sub) elif model_type == "nn": encoder = OneHotEncoder(sparse_output=False) X_encoded = encoder.fit_transform(X_sub) - params = tune_nn(X_encoded,y_sub) + params = tune_nn(X_encoded, y_sub) else: raise ValueError("Invalid model_type") # build the surrogate model - surrogate = Surrogatewrapper(model_type,X_sub,y_sub,params) + surrogate = Surrogatewrapper(model_type, X_sub, y_sub, params) # run optimization - x_best, y_best, trace = simulated_annealing_surrogate(surrogate, dim=X.shape[1], max_iters=max_iters, max_spargers=max_spargers) - + x_best, y_best, trace = simulated_annealing_surrogate( + surrogate, + dim=X.shape[1], + max_iters=max_iters, + max_spargers=max_spargers, + ) + trace_y = [y for _, y in trace] - all_traces[i,:] = trace_y + all_traces[i, :] = trace_y all_x.append(x_best) all_y.append(y_best) i = i + 1 @@ -221,43 +274,74 @@ def run_optimization(X,y,model_type = 'rbf',max_spargers = 8,n_runs=10,max_iters best_index = np.argmin(all_y) best_x = all_x[best_index] best_y = all_y[best_index] - # Save the best solution - df = pd.DataFrame([{ - **{f"x{i}": best_x[i] for i in range(len(best_x))}, - "best_y": best_y - }]) - df.to_csv(f"best_bootstrap_solution_{model_type}_size_{bootstrap_size}_max_spargers_{max_spargers}.csv", index=False) + # Save the best solution + df = pd.DataFrame( + [ + { + **{f"x{i}": best_x[i] for i in range(len(best_x))}, + "best_y": best_y, + } + ] + ) + df.to_csv( + f"best_bootstrap_solution_{model_type}_size_{bootstrap_size}_max_spargers_{max_spargers}.csv", + index=False, + ) print("X = ", x_best) - print("surrogate-predicted y",y_best) + print("surrogate-predicted y", y_best) # Make the mean-CI plot for the objective function and save it - mean_trace = np.mean(-1*all_traces,axis=0) - std_trace = np.std(all_traces,axis=0) - lower_bound = mean_trace - 1.96* std_trace / np.sqrt(n_runs) - upper_bound = mean_trace + 1.96* std_trace / np.sqrt(n_runs) + mean_trace = np.mean(-1 * all_traces, axis=0) + std_trace = np.std(all_traces, axis=0) + lower_bound = mean_trace - 1.96 * std_trace / np.sqrt(n_runs) + upper_bound = mean_trace + 1.96 * std_trace / np.sqrt(n_runs) iterations = np.arange(max_iters + 1) - plt.figure(figsize=(8,4)) - plt.plot(iterations,mean_trace,label=f"Mean Convergence {model_type.upper()}", color = "blue") - plt.fill_between(iterations, lower_bound, upper_bound, color = "blue", alpha = 0.3, label = "95% CI") + plt.figure(figsize=(8, 4)) + plt.plot( + iterations, + mean_trace, + label=f"Mean Convergence {model_type.upper()}", + color="blue", + ) + plt.fill_between( + iterations, + lower_bound, + upper_bound, + color="blue", + alpha=0.3, + label="95% CI", + ) plt.xlabel("Iteration") plt.ylabel("Best Surrogate-Predicted Objective") - plt.title(f"Mean Convergence with 95% Confidence Interval ({model_type.upper()})") + plt.title( + f"Mean Convergence with 95% Confidence Interval ({model_type.upper()})" + ) plt.grid(True) - plt.legend() - plt.savefig(f"Mean_Convergence_plot_{model_type}_size_{bootstrap_size}_max_spargers_{max_spargers}.png",dpi=300) + plt.legend() + plt.savefig( + f"Mean_Convergence_plot_{model_type}_size_{bootstrap_size}_max_spargers_{max_spargers}.png", + dpi=300, + ) plt.show() - # Read data from the csv file. -X_raw_data = pd.read_csv('Xdata_study_scaleup_0_4vvm_3000W.csv') -y_raw_data = pd.read_csv('ydata_study_scaleup_0_4vvm_3000W.csv') +X_raw_data = pd.read_csv("Xdata_study_scaleup_0_4vvm_3000W.csv") +y_raw_data = pd.read_csv("ydata_study_scaleup_0_4vvm_3000W.csv") X = X_raw_data.values -y = y_raw_data.iloc[:,:-1].values -y = y*-1 +y = y_raw_data.iloc[:, :-1].values +y = y * -1 # The function will build, tune, and optimize the surrogate model and postprocess the results. -run_optimization(X,y,model_type="rbf",max_spargers = 8, n_runs = 10,max_iters = 1000, bootstrap_size=100) \ No newline at end of file +run_optimization( + X, + y, + model_type="rbf", + max_spargers=8, + n_runs=10, + max_iters=1000, + bootstrap_size=100, +) From e1382f6b81cd332547646583079c1f52eb2f026e Mon Sep 17 00:00:00 2001 From: Malik Date: Fri, 30 May 2025 10:14:31 -0600 Subject: [PATCH 04/38] fix typos in readme --- bird/postprocess/SA_optimization/README.md | 32 +++++++++++----------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/bird/postprocess/SA_optimization/README.md b/bird/postprocess/SA_optimization/README.md index 3d79d9db..941e4a64 100644 --- a/bird/postprocess/SA_optimization/README.md +++ b/bird/postprocess/SA_optimization/README.md @@ -1,27 +1,27 @@ # Surrogate-Based Optimization with Simulated Annealing -This project implements a complete framework for surrogate-based optimization using Simulated Annealing (SA). It supports three surrogate models: +Implementation of surrogate-based design optimization with Simulated Annealing (SA). It supports three surrogate models: - Radial Basis Function Interpolator ('rbf') - Random Forest ('rf') - Neural Network ('nn') -The SA optimizer operates on discrete feature values (0,1,2) with an option to restrict the number of spargers (1) to a fixed value (max_saprgers). +The SA optimizer operates on discrete feature values (0,1,2) with an option to restrict the number of spargers (1) to a fixed value (max_spargers). -- Preprocessing the data ('get_csv.py') - - reads the 'configs.pkl' and 'results.pkl' files from a study - - saves the configuration in 'Xdata_{study_name}.csv' file - - save the qoi and qoi_error in 'ydata_{study_name}.csv' file +- Preprocessing the data (`get_csv.py`) + - reads the `configs.pkl` and `results.pkl` files from a study + - saves the configuration in `Xdata_{study_name}.csv` file + - save the qoi and qoi_error in `ydata_{study_name}.csv` file -- Surrogate modeling and optimization ('get_optimal.py'/'get_optimal_with_constraints.py') +- Surrogate modeling and optimization (`get_optimal.py` and `get_optimal_with_constraints.py`) - run_optimization(...) function sets up the surrogate-based optimization: - Inputs: - - X: read from 'Xdata_{study_name}.csv' file - - y: read from 'ydata_{study_name}.csv' file - - model_type: type of surrogate model (default = 'rbf') - - model_type = 'rbf': Radial Basis Function - - model_type = 'rf': Random Forest - - model_type = 'nn': Neural Network - - max_spargers: maximum number of spargers (only in 'get_optimal_with_constraints.py') (default = 8) + - X: read from `Xdata_{study_name}.csv` file + - y: read from `ydata_{study_name}.csv` file + - model_type: type of surrogate model (default = `rbf`) + - model_type = `rbf`: Radial Basis Function + - model_type = `rf`: Random Forest + - model_type = `nn`: Neural Network + - max_spargers: maximum number of spargers (only in `get_optimal_with_constraints.py`) (default = 8) - n_runs: number of bootstrap runs (default = 10) - max_iters: maximum number of iterations of SA (default = 1000) - bootstrap_size: sample size of each bootstrap (default = 100) @@ -30,10 +30,10 @@ The SA optimizer operates on discrete feature values (0,1,2) with an option to r - If SA is too slow or fails to converge, you can change the following parameters: - temp: maximum temperature of SA. Controls exploration. - alpha: the rate at which temperature changes every iteration. - - It returen the optimal solutions along with the optimization logs which are used to make plots in postprocessing. + - It returns the optimal solutions along with the optimization logs which are used to make plots in postprocessing. - Once the optimization is done the best solution is saved in a csv file and the following plots are made: - Mean-CI plot of the objective function (qoi) - - Mean-CI plot of the distance of an iterate from the optimal solution (this is not done in 'get_optimal_with_constraints.py'). + - Mean-CI plot of the distance of an iterate from the optimal solution (this is not done in `get_optimal_with_constraints.py`). Python dependencies: - numpy From 00bdb3d452e4e1494908771dcd62159d92f88b16 Mon Sep 17 00:00:00 2001 From: Malik Date: Fri, 30 May 2025 10:33:34 -0600 Subject: [PATCH 05/38] typo in pickle import --- bird/postprocess/SA_optimization/get_csv.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bird/postprocess/SA_optimization/get_csv.py b/bird/postprocess/SA_optimization/get_csv.py index 02dd1191..5729ea74 100644 --- a/bird/postprocess/SA_optimization/get_csv.py +++ b/bird/postprocess/SA_optimization/get_csv.py @@ -2,7 +2,7 @@ import os import numpy as np -import pickle5 as pkl +import pickle as pkl def get_config_result(config_fold): From a310482f4aa10dc88ee0461661d4fa7172284928 Mon Sep 17 00:00:00 2001 From: Malik Date: Fri, 30 May 2025 11:49:13 -0600 Subject: [PATCH 06/38] add dependencies to setup --- bird/postprocess/SA_optimization/README.md | 22 ++++++------ bird/postprocess/SA_optimization/get_csv.py | 38 +++++++++++++++------ setup.py | 4 +++ 3 files changed, 43 insertions(+), 21 deletions(-) diff --git a/bird/postprocess/SA_optimization/README.md b/bird/postprocess/SA_optimization/README.md index 941e4a64..454a45e9 100644 --- a/bird/postprocess/SA_optimization/README.md +++ b/bird/postprocess/SA_optimization/README.md @@ -1,5 +1,17 @@ # Surrogate-Based Optimization with Simulated Annealing +## Install dependencies + +``` + conda create --name bird python=3.10 + conda activate bird + git clone https://github.com/NREL/BioReactorDesign.git + cd BioReactorDesign + pip install -e .[optim] +``` + +## Examples + Implementation of surrogate-based design optimization with Simulated Annealing (SA). It supports three surrogate models: - Radial Basis Function Interpolator ('rbf') - Random Forest ('rf') @@ -35,13 +47,3 @@ The SA optimizer operates on discrete feature values (0,1,2) with an option to r - Mean-CI plot of the objective function (qoi) - Mean-CI plot of the distance of an iterate from the optimal solution (this is not done in `get_optimal_with_constraints.py`). -Python dependencies: -- numpy -- pandas -- scikit-learn -- scipy -- optuna -- matplotlib -- warnings -- random -- tensorflow diff --git a/bird/postprocess/SA_optimization/get_csv.py b/bird/postprocess/SA_optimization/get_csv.py index 5729ea74..b3f2426e 100644 --- a/bird/postprocess/SA_optimization/get_csv.py +++ b/bird/postprocess/SA_optimization/get_csv.py @@ -5,16 +5,34 @@ import pickle as pkl -def get_config_result(config_fold): - with open(os.path.join(config_fold, "results.pkl"), "rb") as f: +def get_config_result(study_fold:str=".") -> None: + """ + Read the configs.pkl and results.pkl files from a study + Saves the configuration in Xdata_{study_fold}.csv file + Save the qoi and qoi_error in ydata_{study_fold}.csv file + + Parameters + ---------- + study_fold : str + Folder that contains the study results + + Returns + ---------- + None + + """ + # Read results + with open(os.path.join(study_fold, "results.pkl"), "rb") as f: results = pkl.load(f) - with open(os.path.join(config_fold, "configs.pkl"), "rb") as f: + with open(os.path.join(study_fold, "configs.pkl"), "rb") as f: configs = pkl.load(f) + Xdata = np.array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], np.int64) count = 0 + # Save data into CSV files - xfname = "Xdata_" + config_fold + ".csv" - yfname = "ydata_" + config_fold + ".csv" + xfname = f"Xdata_{study_fold}.csv" + yfname = f"ydata_{study_fold}.csv" with open(xfname, "w", newline="") as csvfile: writer = csv.writer(csvfile) for sims in results: @@ -32,9 +50,7 @@ def get_config_result(config_fold): y_data = np.concatenate((q0, q1), axis=None) writer.writerow(y_data) - -studies = {"study_scaleup_0_4vvm_3000W": r"0.0036$m^3$ 0.4vvm 0W"} - - -for study in studies: - get_config_result(study) +if __name__ == "__main__": + studies = {"study_scaleup_0_4vvm_3000W": r"0.0036$m^3$ 0.4vvm 0W"} + for study in studies: + get_config_result(study) diff --git a/setup.py b/setup.py index 4eaa7bd2..49263f60 100644 --- a/setup.py +++ b/setup.py @@ -50,6 +50,10 @@ "scikit-learn", "tf2jax", ], + "optim": [ + "optuna", + "pandas", + ], }, project_urls={ "Documentation": "https://nrel.github.io/BioReactorDesign/", From 3dbb89281afe17a264052e2164952825dbbf9608 Mon Sep 17 00:00:00 2001 From: Malik Date: Fri, 30 May 2025 14:41:36 -0600 Subject: [PATCH 07/38] add doc and type hinting --- bird/postprocess/SA_optimization/get_csv.py | 9 +- .../SA_optimization/get_optimal.py | 334 ++++++++++++++---- 2 files changed, 265 insertions(+), 78 deletions(-) diff --git a/bird/postprocess/SA_optimization/get_csv.py b/bird/postprocess/SA_optimization/get_csv.py index b3f2426e..1876d485 100644 --- a/bird/postprocess/SA_optimization/get_csv.py +++ b/bird/postprocess/SA_optimization/get_csv.py @@ -1,11 +1,11 @@ import csv import os +import pickle as pkl import numpy as np -import pickle as pkl -def get_config_result(study_fold:str=".") -> None: +def get_config_result(study_fold: str = ".") -> None: """ Read the configs.pkl and results.pkl files from a study Saves the configuration in Xdata_{study_fold}.csv file @@ -15,10 +15,10 @@ def get_config_result(study_fold:str=".") -> None: ---------- study_fold : str Folder that contains the study results - + Returns ---------- - None + None """ # Read results @@ -50,6 +50,7 @@ def get_config_result(study_fold:str=".") -> None: y_data = np.concatenate((q0, q1), axis=None) writer.writerow(y_data) + if __name__ == "__main__": studies = {"study_scaleup_0_4vvm_3000W": r"0.0036$m^3$ 0.4vvm 0W"} for study in studies: diff --git a/bird/postprocess/SA_optimization/get_optimal.py b/bird/postprocess/SA_optimization/get_optimal.py index 43c86bbb..b418a231 100644 --- a/bird/postprocess/SA_optimization/get_optimal.py +++ b/bird/postprocess/SA_optimization/get_optimal.py @@ -1,23 +1,70 @@ import random import warnings -import matplotlib.pyplot as plt import numpy as np import optuna import pandas as pd +from prettyPlot.plotting import * from scipy.interpolate import RBFInterpolator from sklearn.ensemble import RandomForestRegressor from sklearn.metrics import mean_squared_error from sklearn.model_selection import KFold, cross_val_score from sklearn.preprocessing import OneHotEncoder from tensorflow.keras.layers import Dense +from tensorflow.keras.models import Model as tfModel from tensorflow.keras.models import Sequential -warningsl.filterwarnings("ignore") - - -def tune_rbf(X, y): - # Tune the RBFInterpolator +warnings.filterwarnings("ignore") + + +def check_data_shape(X: np.ndarray, y: np.ndarray) -> None: + """ + Tune the shape parameter (epsilon) of the multiquadratic RBF + + Parameters + ---------- + X : np.ndarray + Design configuration + Dimension N by d, where N is the number of simulations, + and d is the number of design variables + y: np.ndarray + Array of quantity of interest + Dimension N by 1, where N is the number of simulations + """ + + # Same number of samples + assert X.shape[0] == y.shape[0] + # Arrays are 2 dimensional + assert len(X.shape) == 2 + assert len(y.shape) == 2 + # only 1 QoI + assert y.shape[1] == 1 + print(f"INFO: {X.shape[0]} sim with {X.shape[1]} design variables") + + +def tune_rbf(X: np.ndarray, y: np.ndarray) -> dict: + """ + Tune the shape parameter (epsilon) of the multiquadratic RBF + + Parameters + ---------- + X : np.ndarray + Design configuration + Dimension N by d, where N is the number of simulations, + and d is the number of design variables + y: np.ndarray + Array of quantity of interest + Dimension N by 1, where N is the number of simulations + Returns + ---------- + params: dict + dictionary of optimal parameters + + """ + + check_data_shape(X=X, y=y) + + # Tune the RBFInterpolator with optuna # kernel - "multiquadric" (Can try other kernels) def objective(trial): epsilon = trial.suggest_float("epsilon", 0.1, 10.0, log=False) @@ -45,7 +92,32 @@ def objective(trial): return study.best_params -def tune_rf(X, y): +def tune_rf(X: np.ndarray, y: np.ndarray): + """ + Tune the number of trees (n_estimators) + tree depth (max_depth) + number of samples in a leaf (min_samples_leaf) + of the RandomForestRegressor + + Parameters + ---------- + X : np.ndarray + Design configuration + Dimension N by d, where N is the number of simulations, + and d is the number of design variables + y: np.ndarray + Array of quantity of interest + Dimension N by 1, where N is the number of simulations + + Returns + ---------- + params: dict + dictionary of optimal parameters + + """ + + check_data_shape(X=X, y=y) + # Tune the Random Forest def objective(trial): model = RandomForestRegressor( @@ -64,7 +136,31 @@ def objective(trial): return study.best_params -def tune_nn(X_encoded, y): +def tune_nn(X_encoded: np.ndarray, y: np.ndarray) -> dict: + """ + Tune the number of neurons (n_units) + number of layers (n_layers) + + in a leaf (min_samples_leaf) of the RandomForestRegressor + + Parameters + ---------- + X_encoded : np.ndarray + Design configuration + Dimension N by d, where N is the number of simulations, + and d is the number of design variables + y: np.ndarray + Array of quantity of interest + Dimension N by 1, where N is the number of simulations + + Returns + ---------- + params: dict + dictionary of optimal parameters + + """ + check_data_shape(X=X_encoded, y=y) + # Tune the Neural Network def objective(trial): units = trial.suggest_int("n_units", 16, 128) @@ -101,36 +197,72 @@ def objective(trial): return study.best_params -class Surrogatewrapper: - # Wrapper that builds the surrogate model and predicts the QOI value for given X - def __init__(self, model_type, X, y, params): - # Inputs: - # model_type = 'rbf' for RBFInterpolator - # = 'rf' for Random Forest - # = 'nn' for Neural Network - # X,y = the raw data - # params = tune parameter of the surrogate model - +class Surrogate_wrapper: + """ + Wrapper that builds the surrogate model and predicts the QOI value for given X + """ + + def __init__( + self, model_type: str, X: np.ndarray, y: np.ndarray, params: dict + ): + """ + Create the surroagte wrapper + + Parameters + ---------- + model_type : str + 'rbf' for RBFInterpolator + 'rf' for Random Forest + 'nn' for Neural Network + + X: np.ndarray + Raw design configuration + Dimension N by d, where N is the number of simulations, + and d is the number of design variables + y: np.ndarray + Raw array of quantity of interest + Dimension N by 1, where N is the number of simulations + params: dict + dictionary of surrogate parameters + + """ self.model_type = model_type self.encoder = None - if model_type == "rbf": + if model_type.lower() == "rbf": self.model = RBFInterpolator( X, y, kernel="multiquadric", epsilon=params["epsilon"] ) - elif model_type == "rf": + elif model_type.lower() == "rf": self.model = RandomForestRegressor(**params, random_state=42) self.model.fit(X, y) - elif model_type == "nn": + elif model_type.lower() == "nn": self.encoder = OneHotEncoder(sparse_output=False) X_encoded = self.encoder.fit_transform(X) self.model = self._build_nn( X_encoded.shape[1], params["n_units"], params["n_layers"] ) self.model.fit(X_encoded, y, epochs=100, batch_size=16, verbose=0) - - def _build_nn(self, input_dim, units, layers): - # Builds the neural network + else: + raise NotImplementedError + + def _build_nn(self, input_dim: int, units: int, layers: int) -> tfModel: + """ + Builds the neural net + + Parameters + ---------- + input_dim : int + number of features + units: int + number of neurons per layer + layers: int + number of hidden layers + + Returns + ---------- + model: tfModel + """ model = Sequential() model.add(Dense(units, activation="relu", input_shape=(input_dim,))) for _ in range(layers - 1): @@ -151,23 +283,41 @@ def predict(self, X): def simulated_annealing_surrogate( - surrogate, dim=12, max_iters=1000, temp=10.0, alpha=0.95 -): - # Runs the Simulated Annealing (SA) and reports the results - # Inputs: - # surrogate = tuned surrogate model - # dim = dimension of the problem - # max_iters = maximum number of iterations for SA - # temp = parameter of Simulated Annealing (can be changes for tuned for other problems) - # = max temperature. It controls the exploration rate of SA - # alpha = parameter of Simulated Annealing (can be changes for tuned for other problems) - # = cooling rate. It determines how quickly temp decays. - - # Outputs: - # x_best = optimal solution - # y_best = optimal objective function value - # trace = optimization log. Updates at the end of each iteration. - # trace_x = tracks how X changes during each iteration. + surrogate: Surrogate_wrapper, + dim: int = 12, + max_iters: int = 1000, + temp: float = 10.0, + alpha: float = 0.95, +) -> tuple[np.ndarray, float, list[tuple], list[np.ndarray]]: + """ + Runs the Simulated Annealing (SA) and reports the results + + Parameters + ---------- + surrogate : Surrogate_wrapper + tuned surrogate model + dim: int + dimension of the problem + max_iters: int + maximum number of iterations for SA + temp: float + parameter of Simulated Annealing (can be changed or tuned for other problems) + max temperature. It controls the exploration rate of SA + alpha: + parameter of Simulated Annealing (can be changed or tuned for other problems) + cooling rate. It determines how quickly temp decays + + Returns + ---------- + x_best: np.ndarray + optimal solution + y_best: float + optimal objective function value + trace: list[tuple] + optimization log. Updates at the end of each iteration. + trace_x: list[np.ndarray] + tracks how X changes during each iteration. + """ # generate random starting point x_curr = np.random.randint(0, 3, size=dim) @@ -196,22 +346,45 @@ def simulated_annealing_surrogate( trace.append((i, y_best)) trace_x.append(x_curr.copy()) temp *= alpha - return x_best, y_best, trace, trace_x def run_optimization( - X, y, model_type="rbf", n_runs=10, max_iters=1000, bootstrap_size=100 + X: np.ndarray, + y: np.ndarray, + model_type: str = "rbf", + n_runs: int = 10, + max_iters: int = 1000, + bootstrap_size: int = 100, ): - # Bootstraps data, runs optimization and postprocesses the results. - # Inputs: - # X,y = the raw data - # model_type = 'rbf' for RBFInterpolator - # = 'rf' for Random Forest - # = 'nn' for Neural Network - # n_runs = number of bootstraps - # max_iters = maximum number of SA iterations - # bootstrap_size = size of bootstrap samples + """ + Bootstraps data, runs optimization and postprocesses the results. + + Parameters + ---------- + X: np.ndarray + Raw design configuration + Dimension N by d, where N is the number of simulations, + and d is the number of design variables + y: np.ndarray + Raw array of quantity of interest + Dimension N by 1, where N is the number of simulations + + model_type : str + 'rbf' for RBFInterpolator + 'rf' for Random Forest + 'nn' for Neural Network + + n_runs :int + number of bootstraps + max_iters: int + maximum number of SA iterations + bootstrap_size : int + size of bootstrap samples + + Returns + ---------- + """ all_x = [] all_y = [] @@ -240,7 +413,7 @@ def run_optimization( raise ValueError("Invalid model_type") # build the surrogate model - surrogate = Surrogatewrapper(model_type, X_sub, y_sub, params) + surrogate = Surrogate_wrapper(model_type, X_sub, y_sub, params) # run optimization x_best, y_best, trace, trace_x = simulated_annealing_surrogate( @@ -294,17 +467,22 @@ def run_optimization( alpha=0.3, label="95% CI", ) - plt.xlabel("Iteration") - plt.ylabel("Best Surrogate-Predicted Objective") - plt.title( - f"Mean Convergence with 95% Confidence Interval ({model_type.upper()})" + pretty_labels( + "Iteration", + "Best Surrogate-Predicted Objective", + fontsize=16, + title=f"Mean Convergence with 95% Confidence Interval ({model_type.upper()})", + grid=True, + fontname="Times", ) - plt.grid(True) - plt.legend() + pretty_legend(fontsize=16, fontname="Times") plt.savefig( f"Mean_Convergence_plot_{model_type}_size_{bootstrap_size}.png", dpi=300, ) + plt.savefig( + f"Mean_Convergence_plot_{model_type}_size_{bootstrap_size}.pdf", + ) plt.show() # Compute the distance of intermediate x from the optimal solution @@ -332,27 +510,35 @@ def run_optimization( plt.fill_between( iterations, lower, upper, alpha=0.3, color="darkred", label="95% CI" ) - plt.xlabel("Iteration") - plt.ylabel("L1 Distance from Optimal") - plt.title("Convergence Toward Optimal Solution (L1 Norm)") - plt.grid(True) - plt.legend() + pretty_labels( + "Iteration", + "L1 Distance from Optimal", + fontsize=16, + title=f"Convergence Toward Optimal Solution (L1 Norm)", + grid=True, + fontname="Times", + ) + pretty_legend(fontsize=16, fontname="Times") plt.tight_layout() plt.savefig( f"Mean_L1_distance_{model_type}_size_{bootstrap_size}.png", dpi=300 ) + plt.savefig(f"Mean_L1_distance_{model_type}_size_{bootstrap_size}.pdf") plt.show() -# Read data from the csv file. -X_raw_data = pd.read_csv("Xdata_study_scaleup_0_4vvm_3000W.csv") -y_raw_data = pd.read_csv("ydata_study_scaleup_0_4vvm_3000W.csv") +if __name__ == "__main__": + + # Read data from the csv file. + X_raw_data = pd.read_csv("Xdata_study_scaleup_0_4vvm_3000W.csv") + y_raw_data = pd.read_csv("ydata_study_scaleup_0_4vvm_3000W.csv") -X = X_raw_data.values -y = y_raw_data.iloc[:, :-1].values -y = y * -1 + X = X_raw_data.values + y = y_raw_data.iloc[:, :-1].values + # We want to maximize, so we minimize the opposite + y = y * -1 -# The function will build, tune, and optimize the surrogate model and postprocess the results. -run_optimization( - X, y, model_type="rbf", n_runs=10, max_iters=1000, bootstrap_size=150 -) + # The function will build, tune, and optimize the surrogate model and postprocess the results. + run_optimization( + X, y, model_type="rbf", n_runs=10, max_iters=1000, bootstrap_size=150 + ) From 5c0c719fa978d1881d99fc0ca3b2b1f365fd757c Mon Sep 17 00:00:00 2001 From: Malik Date: Fri, 30 May 2025 14:51:39 -0600 Subject: [PATCH 08/38] split surrogate and optimization --- .../SA_optimization/get_optimal.py | 275 +---------------- bird/postprocess/SA_optimization/surrogate.py | 282 ++++++++++++++++++ 2 files changed, 283 insertions(+), 274 deletions(-) create mode 100644 bird/postprocess/SA_optimization/surrogate.py diff --git a/bird/postprocess/SA_optimization/get_optimal.py b/bird/postprocess/SA_optimization/get_optimal.py index b418a231..da0851e5 100644 --- a/bird/postprocess/SA_optimization/get_optimal.py +++ b/bird/postprocess/SA_optimization/get_optimal.py @@ -2,286 +2,13 @@ import warnings import numpy as np -import optuna import pandas as pd from prettyPlot.plotting import * -from scipy.interpolate import RBFInterpolator -from sklearn.ensemble import RandomForestRegressor -from sklearn.metrics import mean_squared_error -from sklearn.model_selection import KFold, cross_val_score from sklearn.preprocessing import OneHotEncoder -from tensorflow.keras.layers import Dense -from tensorflow.keras.models import Model as tfModel -from tensorflow.keras.models import Sequential - +from bird.postprocess.SA_optimization.surrogate import tune_rf, tune_rbf, tune_nn, Surrogate_wrapper warnings.filterwarnings("ignore") -def check_data_shape(X: np.ndarray, y: np.ndarray) -> None: - """ - Tune the shape parameter (epsilon) of the multiquadratic RBF - - Parameters - ---------- - X : np.ndarray - Design configuration - Dimension N by d, where N is the number of simulations, - and d is the number of design variables - y: np.ndarray - Array of quantity of interest - Dimension N by 1, where N is the number of simulations - """ - - # Same number of samples - assert X.shape[0] == y.shape[0] - # Arrays are 2 dimensional - assert len(X.shape) == 2 - assert len(y.shape) == 2 - # only 1 QoI - assert y.shape[1] == 1 - print(f"INFO: {X.shape[0]} sim with {X.shape[1]} design variables") - - -def tune_rbf(X: np.ndarray, y: np.ndarray) -> dict: - """ - Tune the shape parameter (epsilon) of the multiquadratic RBF - - Parameters - ---------- - X : np.ndarray - Design configuration - Dimension N by d, where N is the number of simulations, - and d is the number of design variables - y: np.ndarray - Array of quantity of interest - Dimension N by 1, where N is the number of simulations - Returns - ---------- - params: dict - dictionary of optimal parameters - - """ - - check_data_shape(X=X, y=y) - - # Tune the RBFInterpolator with optuna - # kernel - "multiquadric" (Can try other kernels) - def objective(trial): - epsilon = trial.suggest_float("epsilon", 0.1, 10.0, log=False) - try: - kf = KFold(n_splits=5, shuffle=True, random_state=42) - return np.mean( - [ - mean_squared_error( - y[test], - RBFInterpolator( - X[train], - y[train], - epsilon=epsilon, - kernel="multiquadric", - )(X[test]), - ) - for train, test in kf.split(X) - ] - ) - except: - return float("inf") - - study = optuna.create_study(direction="minimize") - study.optimize(objective, n_trials=20) - return study.best_params - - -def tune_rf(X: np.ndarray, y: np.ndarray): - """ - Tune the number of trees (n_estimators) - tree depth (max_depth) - number of samples in a leaf (min_samples_leaf) - of the RandomForestRegressor - - Parameters - ---------- - X : np.ndarray - Design configuration - Dimension N by d, where N is the number of simulations, - and d is the number of design variables - y: np.ndarray - Array of quantity of interest - Dimension N by 1, where N is the number of simulations - - Returns - ---------- - params: dict - dictionary of optimal parameters - - """ - - check_data_shape(X=X, y=y) - - # Tune the Random Forest - def objective(trial): - model = RandomForestRegressor( - n_estimators=trial.suggest_int("n_estimators", 50, 200), - max_depth=trial.suggest_int("max_depth", 3, 10), - min_samples_leaf=trial.suggest_int("min_samples_leaf", 1, 10), - random_state=42, - ) - scores = cross_val_score( - model, X, y, cv=5, scoring="neg_mean_squared_error" - ) - return -np.mean(scores) - - study = optuna.create_study(direction="minimize") - study.optimize(objective, n_trials=20) - return study.best_params - - -def tune_nn(X_encoded: np.ndarray, y: np.ndarray) -> dict: - """ - Tune the number of neurons (n_units) - number of layers (n_layers) - - in a leaf (min_samples_leaf) of the RandomForestRegressor - - Parameters - ---------- - X_encoded : np.ndarray - Design configuration - Dimension N by d, where N is the number of simulations, - and d is the number of design variables - y: np.ndarray - Array of quantity of interest - Dimension N by 1, where N is the number of simulations - - Returns - ---------- - params: dict - dictionary of optimal parameters - - """ - check_data_shape(X=X_encoded, y=y) - - # Tune the Neural Network - def objective(trial): - units = trial.suggest_int("n_units", 16, 128) - layers = trial.suggest_int("n_layers", 1, 3) - kf = KFold(n_splits=5, shuffle=True, random_state=42) - mses = [] - for train, test in kf.split(X_encoded): - model = Sequential() - model.add( - Dense( - units, activation="relu", input_shape=(X_encoded.shape[1],) - ) - ) - for _ in range(layers - 1): - model.add(Dense(units, activation="relu")) - model.add(Dense(1)) - model.compile(optimizer="adam", loss="mse") - model.fit( - X_encoded[train], - y[train], - epochs=100, - batch_size=16, - verbose=0, - ) - mses.append( - mean_squared_error( - y[test], model.predict(X_encoded[test]).flatten() - ) - ) - return np.mean(mses) - - study = optuna.create_study(direction="minimize") - study.optimize(objective, n_trials=20) - return study.best_params - - -class Surrogate_wrapper: - """ - Wrapper that builds the surrogate model and predicts the QOI value for given X - """ - - def __init__( - self, model_type: str, X: np.ndarray, y: np.ndarray, params: dict - ): - """ - Create the surroagte wrapper - - Parameters - ---------- - model_type : str - 'rbf' for RBFInterpolator - 'rf' for Random Forest - 'nn' for Neural Network - - X: np.ndarray - Raw design configuration - Dimension N by d, where N is the number of simulations, - and d is the number of design variables - y: np.ndarray - Raw array of quantity of interest - Dimension N by 1, where N is the number of simulations - params: dict - dictionary of surrogate parameters - - """ - self.model_type = model_type - self.encoder = None - - if model_type.lower() == "rbf": - self.model = RBFInterpolator( - X, y, kernel="multiquadric", epsilon=params["epsilon"] - ) - elif model_type.lower() == "rf": - self.model = RandomForestRegressor(**params, random_state=42) - self.model.fit(X, y) - elif model_type.lower() == "nn": - self.encoder = OneHotEncoder(sparse_output=False) - X_encoded = self.encoder.fit_transform(X) - self.model = self._build_nn( - X_encoded.shape[1], params["n_units"], params["n_layers"] - ) - self.model.fit(X_encoded, y, epochs=100, batch_size=16, verbose=0) - else: - raise NotImplementedError - - def _build_nn(self, input_dim: int, units: int, layers: int) -> tfModel: - """ - Builds the neural net - - Parameters - ---------- - input_dim : int - number of features - units: int - number of neurons per layer - layers: int - number of hidden layers - - Returns - ---------- - model: tfModel - """ - model = Sequential() - model.add(Dense(units, activation="relu", input_shape=(input_dim,))) - for _ in range(layers - 1): - model.add(Dense(units, activation="relu")) - model.add(Dense(1)) - model.compile(optimizer="adam", loss="mse") - return model - - def predict(self, X): - X = X.reshape(1, -1) - if self.model_type == "nn": - X = self.encoder.transform(X) - return float(self.model.predict(X)[0]) - elif self.model_type == "rf": - return float(self.model.predict(X)[0]) - else: - return float(self.model(X)[0]) - - def simulated_annealing_surrogate( surrogate: Surrogate_wrapper, dim: int = 12, diff --git a/bird/postprocess/SA_optimization/surrogate.py b/bird/postprocess/SA_optimization/surrogate.py new file mode 100644 index 00000000..79a3f9c9 --- /dev/null +++ b/bird/postprocess/SA_optimization/surrogate.py @@ -0,0 +1,282 @@ +import random +import warnings + +import numpy as np +import optuna +import pandas as pd +from scipy.interpolate import RBFInterpolator +from sklearn.ensemble import RandomForestRegressor +from sklearn.metrics import mean_squared_error +from sklearn.model_selection import KFold, cross_val_score +from sklearn.preprocessing import OneHotEncoder +from tensorflow.keras.layers import Dense +from tensorflow.keras.models import Model as tfModel +from tensorflow.keras.models import Sequential + +warnings.filterwarnings("ignore") + + +def check_data_shape(X: np.ndarray, y: np.ndarray) -> None: + """ + Tune the shape parameter (epsilon) of the multiquadratic RBF + + Parameters + ---------- + X : np.ndarray + Design configuration + Dimension N by d, where N is the number of simulations, + and d is the number of design variables + y: np.ndarray + Array of quantity of interest + Dimension N by 1, where N is the number of simulations + """ + + # Same number of samples + assert X.shape[0] == y.shape[0] + # Arrays are 2 dimensional + assert len(X.shape) == 2 + assert len(y.shape) == 2 + # only 1 QoI + assert y.shape[1] == 1 + print(f"INFO: {X.shape[0]} sim with {X.shape[1]} design variables") + + +def tune_rbf(X: np.ndarray, y: np.ndarray) -> dict: + """ + Tune the shape parameter (epsilon) of the multiquadratic RBF + + Parameters + ---------- + X : np.ndarray + Design configuration + Dimension N by d, where N is the number of simulations, + and d is the number of design variables + y: np.ndarray + Array of quantity of interest + Dimension N by 1, where N is the number of simulations + Returns + ---------- + params: dict + dictionary of optimal parameters + + """ + + check_data_shape(X=X, y=y) + + # Tune the RBFInterpolator with optuna + # kernel - "multiquadric" (Can try other kernels) + def objective(trial): + epsilon = trial.suggest_float("epsilon", 0.1, 10.0, log=False) + try: + kf = KFold(n_splits=5, shuffle=True, random_state=42) + return np.mean( + [ + mean_squared_error( + y[test], + RBFInterpolator( + X[train], + y[train], + epsilon=epsilon, + kernel="multiquadric", + )(X[test]), + ) + for train, test in kf.split(X) + ] + ) + except: + return float("inf") + + study = optuna.create_study(direction="minimize") + study.optimize(objective, n_trials=20) + return study.best_params + + +def tune_rf(X: np.ndarray, y: np.ndarray): + """ + Tune the number of trees (n_estimators) + tree depth (max_depth) + number of samples in a leaf (min_samples_leaf) + of the RandomForestRegressor + + Parameters + ---------- + X : np.ndarray + Design configuration + Dimension N by d, where N is the number of simulations, + and d is the number of design variables + y: np.ndarray + Array of quantity of interest + Dimension N by 1, where N is the number of simulations + + Returns + ---------- + params: dict + dictionary of optimal parameters + + """ + + check_data_shape(X=X, y=y) + + # Tune the Random Forest + def objective(trial): + model = RandomForestRegressor( + n_estimators=trial.suggest_int("n_estimators", 50, 200), + max_depth=trial.suggest_int("max_depth", 3, 10), + min_samples_leaf=trial.suggest_int("min_samples_leaf", 1, 10), + random_state=42, + ) + scores = cross_val_score( + model, X, y, cv=5, scoring="neg_mean_squared_error" + ) + return -np.mean(scores) + + study = optuna.create_study(direction="minimize") + study.optimize(objective, n_trials=20) + return study.best_params + + +def tune_nn(X_encoded: np.ndarray, y: np.ndarray) -> dict: + """ + Tune the number of neurons (n_units) + number of layers (n_layers) + + in a leaf (min_samples_leaf) of the RandomForestRegressor + + Parameters + ---------- + X_encoded : np.ndarray + Design configuration + Dimension N by d, where N is the number of simulations, + and d is the number of design variables + y: np.ndarray + Array of quantity of interest + Dimension N by 1, where N is the number of simulations + + Returns + ---------- + params: dict + dictionary of optimal parameters + + """ + check_data_shape(X=X_encoded, y=y) + + # Tune the Neural Network + def objective(trial): + units = trial.suggest_int("n_units", 16, 128) + layers = trial.suggest_int("n_layers", 1, 3) + kf = KFold(n_splits=5, shuffle=True, random_state=42) + mses = [] + for train, test in kf.split(X_encoded): + model = Sequential() + model.add( + Dense( + units, activation="relu", input_shape=(X_encoded.shape[1],) + ) + ) + for _ in range(layers - 1): + model.add(Dense(units, activation="relu")) + model.add(Dense(1)) + model.compile(optimizer="adam", loss="mse") + model.fit( + X_encoded[train], + y[train], + epochs=100, + batch_size=16, + verbose=0, + ) + mses.append( + mean_squared_error( + y[test], model.predict(X_encoded[test]).flatten() + ) + ) + return np.mean(mses) + + study = optuna.create_study(direction="minimize") + study.optimize(objective, n_trials=20) + return study.best_params + + +class Surrogate_wrapper: + """ + Wrapper that builds the surrogate model and predicts the QOI value for given X + """ + + def __init__( + self, model_type: str, X: np.ndarray, y: np.ndarray, params: dict + ): + """ + Create the surroagte wrapper + + Parameters + ---------- + model_type : str + 'rbf' for RBFInterpolator + 'rf' for Random Forest + 'nn' for Neural Network + + X: np.ndarray + Raw design configuration + Dimension N by d, where N is the number of simulations, + and d is the number of design variables + y: np.ndarray + Raw array of quantity of interest + Dimension N by 1, where N is the number of simulations + params: dict + dictionary of surrogate parameters + + """ + self.model_type = model_type + self.encoder = None + + if model_type.lower() == "rbf": + self.model = RBFInterpolator( + X, y, kernel="multiquadric", epsilon=params["epsilon"] + ) + elif model_type.lower() == "rf": + self.model = RandomForestRegressor(**params, random_state=42) + self.model.fit(X, y) + elif model_type.lower() == "nn": + self.encoder = OneHotEncoder(sparse_output=False) + X_encoded = self.encoder.fit_transform(X) + self.model = self._build_nn( + X_encoded.shape[1], params["n_units"], params["n_layers"] + ) + self.model.fit(X_encoded, y, epochs=100, batch_size=16, verbose=0) + else: + raise NotImplementedError + + def _build_nn(self, input_dim: int, units: int, layers: int) -> tfModel: + """ + Builds the neural net + + Parameters + ---------- + input_dim : int + number of features + units: int + number of neurons per layer + layers: int + number of hidden layers + + Returns + ---------- + model: tfModel + """ + model = Sequential() + model.add(Dense(units, activation="relu", input_shape=(input_dim,))) + for _ in range(layers - 1): + model.add(Dense(units, activation="relu")) + model.add(Dense(1)) + model.compile(optimizer="adam", loss="mse") + return model + + def predict(self, X): + X = X.reshape(1, -1) + if self.model_type == "nn": + X = self.encoder.transform(X) + return float(self.model.predict(X)[0]) + elif self.model_type == "rf": + return float(self.model.predict(X)[0]) + else: + return float(self.model(X)[0]) + From 188e7b6971bb6c4eca342bba4465f742eb863890 Mon Sep 17 00:00:00 2001 From: Malik Date: Fri, 30 May 2025 15:01:32 -0600 Subject: [PATCH 09/38] doc string type hinting and reuse surrogate functions --- .../SA_optimization/get_optimal.py | 9 +- .../get_optimal_with_constraint.py | 311 +++++++----------- bird/postprocess/SA_optimization/surrogate.py | 1 - 3 files changed, 120 insertions(+), 201 deletions(-) diff --git a/bird/postprocess/SA_optimization/get_optimal.py b/bird/postprocess/SA_optimization/get_optimal.py index da0851e5..b667fbb9 100644 --- a/bird/postprocess/SA_optimization/get_optimal.py +++ b/bird/postprocess/SA_optimization/get_optimal.py @@ -5,7 +5,14 @@ import pandas as pd from prettyPlot.plotting import * from sklearn.preprocessing import OneHotEncoder -from bird.postprocess.SA_optimization.surrogate import tune_rf, tune_rbf, tune_nn, Surrogate_wrapper + +from bird.postprocess.SA_optimization.surrogate import ( + Surrogate_wrapper, + tune_nn, + tune_rbf, + tune_rf, +) + warnings.filterwarnings("ignore") diff --git a/bird/postprocess/SA_optimization/get_optimal_with_constraint.py b/bird/postprocess/SA_optimization/get_optimal_with_constraint.py index b3371e41..4bfc7b02 100644 --- a/bird/postprocess/SA_optimization/get_optimal_with_constraint.py +++ b/bird/postprocess/SA_optimization/get_optimal_with_constraint.py @@ -1,171 +1,56 @@ import random -import matplotlib.pyplot as plt import numpy as np import optuna import pandas as pd -from scipy.interpolate import RBFInterpolator -from sklearn.ensemble import RandomForestRegressor -from sklearn.metrics import mean_squared_error -from sklearn.model_selection import KFold, cross_val_score +from prettyPlot.plotting import * from sklearn.preprocessing import OneHotEncoder -from tensorflow.keras.layers import Dense -from tensorflow.keras.models import Sequential - - -def tune_rbf(X, y): - def objective(trial): - # Tune the RBFInterpolator - # kernel - "multiquadric" (Can try other kernels) - epsilon = trial.suggest_float("epsilon", 0.1, 10.0, log=False) - try: - kf = KFold(n_splits=5, shuffle=True, random_state=42) - return np.mean( - [ - mean_squared_error( - y[test], - RBFInterpolator( - X[train], - y[train], - epsilon=epsilon, - kernel="multiquadric", - )(X[test]), - ) - for train, test in kf.split(X) - ] - ) - except: - return float("inf") - - study = optuna.create_study(direction="minimize") - study.optimize(objective, n_trials=20) - return study.best_params - - -def tune_rf(X, y): - # Tune the Random Forest - def objective(trial): - model = RandomForestRegressor( - n_estimators=trial.suggest_int("n_estimators", 50, 200), - max_depth=trial.suggest_int("max_depth", 3, 10), - min_samples_leaf=trial.suggest_int("min_samples_leaf", 1, 10), - random_state=42, - ) - scores = cross_val_score( - model, X, y, cv=5, scoring="neg_mean_squared_error" - ) - return -np.mean(scores) - - study = optuna.create_study(direction="minimize") - study.optimize(objective, n_trials=20) - return study.best_params - - -def tune_nn(X_encoded, y): - # Tune the Neural Network - def objective(trial): - units = trial.suggest_int("n_units", 16, 128) - layers = trial.suggest_int("n_layers", 1, 3) - kf = KFold(n_splits=5, shuffle=True, random_state=42) - mses = [] - for train, test in kf.split(X_encoded): - model = Sequential() - model.add( - Dense( - units, activation="relu", input_shape=(X_encoded.shape[1],) - ) - ) - for _ in range(layers - 1): - model.add(Dense(units, activation="relu")) - model.add(Dense(1)) - model.compile(optimizer="adam", loss="mse") - model.fit( - X_encoded[train], - y[train], - epochs=100, - batch_size=16, - verbose=0, - ) - mses.append( - mean_squared_error( - y[test], model.predict(X_encoded[test]).flatten() - ) - ) - return np.mean(mses) - - study = optuna.create_study(direction="minimize") - study.optimize(objective, n_trials=20) - return study.best_params - - -class Surrogatewrapper: - # Wrapper that builds the surrogate model and predicts the QOI value for given X - def __init__(self, model_type, X, y, params): - # Inputs: - # model_type = 'rbf' for RBFInterpolator - # = 'rf' for Random Forest - # = 'nn' for Neural Network - # X,y = the raw data - # params = tune parameter of the surrogate model - - self.model_type = model_type - self.encoder = None - if model_type == "rbf": - self.model = RBFInterpolator( - X, y, kernel="multiquadric", epsilon=params["epsilon"] - ) - elif model_type == "rf": - self.model = RandomForestRegressor(**params, random_state=42) - self.model.fit(X, y) - elif model_type == "nn": - self.encoder = OneHotEncoder(sparse_output=False) - X_encoded = self.encoder.fit_transform(X) - self.model = self._build_nn( - X_encoded.shape[1], params["n_units"], params["n_layers"] - ) - self.model.fit(X_encoded, y, epochs=100, batch_size=16, verbose=0) - - def _build_nn(self, input_dim, units, layers): - # Builds the neural network - model = Sequential() - model.add(Dense(units, activation="relu", input_shape=(input_dim,))) - for _ in range(layers - 1): - model.add(Dense(units, activation="relu")) - model.add(Dense(1)) - model.compile(optimizer="adam", loss="mse") - return model - - def predict(self, X): - X = X.reshape(1, -1) - if self.model_type == "nn": - X = self.encoder.transform(X) - return float(self.model.predict(X)[0]) - elif self.model_type == "rf": - return float(self.model.predict(X)[0]) - else: - return float(self.model(X)[0]) +from bird.postprocess.SA_optimization.surrogate import ( + Surrogate_wrapper, + tune_nn, + tune_rbf, + tune_rf, +) def simulated_annealing_surrogate( - surrogate, dim=12, max_iters=1000, max_spargers=8, temp=10.0, alpha=0.95 -): - # Runs the Simulated Annealing (SA) and reports the results - # Inputs: - # surrogate = tuned surrogate model - # dim = dimension of the problem - # max_spargers = maximum number of spargers allowed - # max_iters = maximum number of iterations for SA - # temp = parameter of Simulated Annealing (can be changes for tuned for other problems) - # = max temperature. It controls the exploration rate of SA - # alpha = parameter of Simulated Annealing (can be changes for tuned for other problems) - # = cooling rate. It determines how quickly temp decays. - - # Outputs: - # x_best = optimal solution - # y_best = optimal objective function value - # trace = optimization log. Updates at the end of each iteration. - # trace_x = tracks how X changes during each iteration. + surrogate: Surrogate_wrapper, + dim: int = 12, + max_iters: int = 1000, + max_spargers: int = 8, + temp: float = 10.0, + alpha: float = 0.95, +) -> tuple[np.ndarray, float, list[tuple]]: + """ + Runs the Simulated Annealing (SA) constrained by the number of spargers and reports the results + + Parameters + ---------- + surrogate : Surrogate_wrapper + tuned surrogate model + dim: int + dimension of the problem + max_iters: int + maximum number of iterations for SA + max_spargers: int + maximum number of spargers (this is a constraint of the optimization problem) + temp: float + parameter of Simulated Annealing (can be changed or tuned for other problems) + max temperature. It controls the exploration rate of SA + alpha: + parameter of Simulated Annealing (can be changed or tuned for other problems) + cooling rate. It determines how quickly temp decays + + Returns + ---------- + x_best: np.ndarray + optimal solution + y_best: float + optimal objective function value + trace: list[tuple] + optimization log. Updates at the end of each iteration. + """ def is_valid(x): # Checks if the number of spargers <= max_spargers @@ -210,23 +95,44 @@ def is_valid(x): def run_optimization( - X, - y, - model_type="rbf", - max_spargers=8, - n_runs=10, - max_iters=1000, - bootstrap_size=100, + X: np.ndarray, + y: np.ndarray, + model_type: str = "rbf", + max_spargers: int = 8, + n_runs: int = 10, + max_iters: int = 1000, + bootstrap_size: int = 100, ): - # Bootstraps data, runs optimization and postprocesses the results. - # Inputs: - # X,y = the raw data - # model_type = 'rbf' for RBFInterpolator - # = 'rf' for Random Forest - # = 'nn' for Neural Network - # n_runs = number of bootstraps - # max_iters = maximum number of SA iterations - # bootstrap_size = size of bootstrap samples + """ + Bootstraps data, runs optimization and postprocesses the results. + + Parameters + ---------- + X: np.ndarray + Raw design configuration + Dimension N by d, where N is the number of simulations, + and d is the number of design variables + y: np.ndarray + Raw array of quantity of interest + Dimension N by 1, where N is the number of simulations + + model_type : str + 'rbf' for RBFInterpolator + 'rf' for Random Forest + 'nn' for Neural Network + + max_spargers: int + maximum number of spargers (this is a constraint of the optimization problem) + n_runs :int + number of bootstraps + max_iters: int + maximum number of SA iterations + bootstrap_size : int + size of bootstrap samples + + Returns + ---------- + """ all_x = [] all_y = [] @@ -254,7 +160,7 @@ def run_optimization( raise ValueError("Invalid model_type") # build the surrogate model - surrogate = Surrogatewrapper(model_type, X_sub, y_sub, params) + surrogate = Surrogate_wrapper(model_type, X_sub, y_sub, params) # run optimization x_best, y_best, trace = simulated_annealing_surrogate( @@ -313,35 +219,42 @@ def run_optimization( alpha=0.3, label="95% CI", ) - plt.xlabel("Iteration") - plt.ylabel("Best Surrogate-Predicted Objective") - plt.title( - f"Mean Convergence with 95% Confidence Interval ({model_type.upper()})" + pretty_labels( + "Iteration", + "Best Surrogate-Predicted Objective", + fontsize=16, + title=f"Mean Convergence with 95% Confidence Interval ({model_type.upper()})", + grid=True, + fontname="Times", ) - plt.grid(True) - plt.legend() + pretty_legend(fontsize=16, fontname="Times") plt.savefig( f"Mean_Convergence_plot_{model_type}_size_{bootstrap_size}_max_spargers_{max_spargers}.png", dpi=300, ) + plt.savefig( + f"Mean_Convergence_plot_{model_type}_size_{bootstrap_size}_max_spargers_{max_spargers}.pdf", + ) plt.show() -# Read data from the csv file. -X_raw_data = pd.read_csv("Xdata_study_scaleup_0_4vvm_3000W.csv") -y_raw_data = pd.read_csv("ydata_study_scaleup_0_4vvm_3000W.csv") - -X = X_raw_data.values -y = y_raw_data.iloc[:, :-1].values -y = y * -1 - -# The function will build, tune, and optimize the surrogate model and postprocess the results. -run_optimization( - X, - y, - model_type="rbf", - max_spargers=8, - n_runs=10, - max_iters=1000, - bootstrap_size=100, -) +if __name__ == "__main__": + # Read data from the csv file. + X_raw_data = pd.read_csv("Xdata_study_scaleup_0_4vvm_3000W.csv") + y_raw_data = pd.read_csv("ydata_study_scaleup_0_4vvm_3000W.csv") + + X = X_raw_data.values + y = y_raw_data.iloc[:, :-1].values + # We want to maximize, so we minimize the opposite + y = y * -1 + + # The function will build, tune, and optimize the surrogate model and postprocess the results. + run_optimization( + X, + y, + model_type="rbf", + max_spargers=8, + n_runs=10, + max_iters=1000, + bootstrap_size=100, + ) diff --git a/bird/postprocess/SA_optimization/surrogate.py b/bird/postprocess/SA_optimization/surrogate.py index 79a3f9c9..1dfec6f9 100644 --- a/bird/postprocess/SA_optimization/surrogate.py +++ b/bird/postprocess/SA_optimization/surrogate.py @@ -279,4 +279,3 @@ def predict(self, X): return float(self.model.predict(X)[0]) else: return float(self.model(X)[0]) - From 753582eb01482fcaa92f2e432c0a1a1d9ee1c4ae Mon Sep 17 00:00:00 2001 From: Malik Date: Fri, 30 May 2025 15:01:56 -0600 Subject: [PATCH 10/38] ignore pdf from codespelling --- .github/linters/.codespellrc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/linters/.codespellrc b/.github/linters/.codespellrc index a8ac1050..11311016 100644 --- a/.github/linters/.codespellrc +++ b/.github/linters/.codespellrc @@ -1,3 +1,3 @@ [codespell] -skip = None, .git,OFsolvers,tutorial_cases,experimental_cases,build,_build,__pycache__,data_conditional_mean,Figures,assets +skip = None, .git,OFsolvers,*.pdf, tutorial_cases,experimental_cases,build,_build,__pycache__,data_conditional_mean,Figures,assets ignore-words = .github/linters/.codespell-ignore-words From a9644c69bbde674580abc925c00faaeb0d327ef7 Mon Sep 17 00:00:00 2001 From: Malik Date: Thu, 5 Jun 2025 12:37:29 -0600 Subject: [PATCH 11/38] save optim results to folder, and remove tmp files --- .../Xdata_study_scaleup_0_4vvm_3000W.csv | 185 ------------------ bird/postprocess/SA_optimization/get_csv.py | 10 +- .../SA_optimization/get_optimal.py | 81 ++++++-- .../get_optimal_with_constraint.py | 61 +++--- bird/postprocess/SA_optimization/results.pkl | Bin 5208 -> 0 bytes bird/postprocess/SA_optimization/temp | 1 - .../ydata_study_scaleup_0_4vvm_3000W.csv | 185 ------------------ 7 files changed, 109 insertions(+), 414 deletions(-) delete mode 100644 bird/postprocess/SA_optimization/Xdata_study_scaleup_0_4vvm_3000W.csv delete mode 100644 bird/postprocess/SA_optimization/results.pkl delete mode 100644 bird/postprocess/SA_optimization/temp delete mode 100644 bird/postprocess/SA_optimization/ydata_study_scaleup_0_4vvm_3000W.csv diff --git a/bird/postprocess/SA_optimization/Xdata_study_scaleup_0_4vvm_3000W.csv b/bird/postprocess/SA_optimization/Xdata_study_scaleup_0_4vvm_3000W.csv deleted file mode 100644 index 771f8ccf..00000000 --- a/bird/postprocess/SA_optimization/Xdata_study_scaleup_0_4vvm_3000W.csv +++ /dev/null @@ -1,185 +0,0 @@ -0,1,0,1,0,0,2,2,2,2,0 -0,2,2,2,2,2,0,1,0,0,0 -2,1,0,1,2,0,0,1,1,0,1 -0,1,1,2,0,1,1,2,1,1,0 -0,2,2,2,2,1,1,1,0,2,0 -0,1,2,2,1,1,0,2,2,1,0 -2,0,2,1,0,2,1,1,1,2,2 -0,2,1,1,0,0,2,0,0,1,0 -0,0,0,1,1,1,1,0,2,2,2 -0,2,0,2,1,0,2,2,1,2,1 -2,2,1,1,0,2,2,2,0,1,1 -0,1,0,1,2,0,2,1,1,1,1 -2,1,1,0,0,1,2,1,1,2,1 -1,2,0,1,1,2,2,2,2,1,0 -1,1,0,0,0,2,2,0,2,1,2 -1,1,0,1,0,0,2,0,2,0,1 -0,1,2,2,2,0,1,2,2,1,0 -2,1,2,0,1,1,2,0,1,0,0 -0,1,1,0,0,0,2,2,2,2,1 -2,2,0,1,0,2,2,2,1,0,1 -0,2,0,2,1,2,2,0,0,1,2 -0,1,2,1,1,2,0,2,0,0,2 -1,1,0,1,0,2,1,2,1,1,0 -2,2,2,1,1,0,2,2,1,2,0 -2,2,1,1,1,2,1,2,1,1,0 -1,1,2,1,1,1,1,0,2,1,1 -0,0,2,1,2,0,1,0,0,0,1 -0,0,2,0,1,0,2,0,0,1,2 -0,1,0,1,1,0,2,1,0,1,0 -1,2,2,1,0,2,2,1,0,1,2 -0,0,1,2,1,1,1,2,2,1,2 -0,0,1,1,2,0,1,0,2,2,1 -2,0,0,2,1,0,2,0,0,2,1 -0,2,0,1,0,1,1,2,0,2,1 -0,1,2,2,1,0,1,0,2,2,1 -2,1,2,2,1,1,0,1,0,2,0 -1,2,2,2,1,2,2,0,2,1,0 -0,0,0,2,1,0,0,2,2,1,1 -1,0,0,0,1,0,0,2,2,1,1 -2,2,0,0,0,0,2,2,2,2,1 -2,1,2,1,2,2,1,0,1,2,1 -2,2,1,2,2,0,1,1,0,2,2 -2,1,0,2,2,2,1,1,0,1,1 -1,0,0,1,2,2,1,1,2,0,0 -1,2,2,0,2,0,1,1,0,0,2 -1,1,0,2,1,0,2,0,0,1,2 -1,2,0,1,1,0,1,1,0,2,0 -1,2,0,1,1,0,2,2,2,2,2 -1,2,1,2,2,1,1,2,2,0,1 -0,1,1,0,0,1,0,1,1,2,2 -0,0,0,1,0,0,0,0,1,0,2 -0,1,1,1,0,2,2,2,0,1,0 -1,0,1,0,1,1,2,0,2,0,2 -0,1,0,1,2,0,1,1,2,2,0 -2,0,2,1,2,1,0,2,2,0,2 -0,2,1,1,2,1,2,2,2,2,0 -0,1,0,2,2,0,0,0,2,2,2 -2,0,1,1,0,2,1,1,1,1,2 -2,2,1,1,1,2,1,1,2,2,0 -0,0,0,0,0,1,0,2,2,0,0 -0,0,0,2,2,1,0,2,0,1,1 -1,2,0,0,2,2,2,2,0,1,0 -1,1,2,0,2,2,1,0,1,0,0 -1,0,0,2,0,1,0,0,2,0,0 -2,1,2,1,0,1,2,1,1,2,2 -1,0,2,0,2,1,1,2,0,1,2 -1,0,2,0,2,0,0,0,2,2,1 -2,0,2,1,1,2,0,0,2,1,0 -1,0,1,0,2,0,2,2,1,0,2 -0,1,2,1,0,0,0,2,1,1,2 -0,0,1,0,2,1,0,0,1,0,2 -1,0,0,0,1,2,1,2,0,2,1 -0,2,1,0,0,1,1,1,0,0,0 -2,0,2,1,0,0,0,0,0,1,1 -1,0,0,1,1,2,2,1,2,1,0 -2,2,1,2,2,0,2,1,1,2,2 -0,2,2,1,2,0,1,1,2,0,1 -1,0,2,1,1,2,0,0,1,0,0 -1,0,1,1,0,1,1,2,1,1,1 -1,1,0,1,1,2,2,0,0,2,0 -1,0,2,2,2,1,1,1,1,2,1 -1,1,2,1,0,0,1,1,2,2,1 -0,1,0,0,2,1,0,1,2,0,2 -0,0,0,2,2,1,2,0,2,1,1 -2,0,2,1,1,1,0,0,0,0,1 -2,0,0,1,2,1,1,1,0,1,1 -1,0,1,0,0,0,0,1,1,0,0 -0,1,2,1,2,0,2,0,1,2,1 -0,0,2,1,0,0,2,1,1,2,1 -0,2,1,1,1,0,2,0,2,0,2 -2,1,1,2,0,0,0,1,1,0,0 -2,1,1,0,2,1,0,0,0,1,0 -0,2,1,0,0,1,2,2,2,0,2 -1,0,2,0,1,0,2,0,2,2,0 -1,1,1,1,2,2,1,0,1,0,0 -1,0,1,0,2,2,1,2,0,1,0 -2,0,2,2,1,2,1,0,2,0,2 -2,1,1,1,0,0,0,1,0,1,0 -1,1,1,1,2,1,2,1,0,0,2 -2,0,2,2,2,1,1,2,0,1,1 -0,1,1,2,2,1,2,2,1,0,0 -0,0,1,1,2,2,0,1,1,0,2 -2,2,2,1,0,0,1,1,2,2,1 -2,0,1,2,0,1,0,1,1,2,2 -0,1,0,2,2,1,2,1,1,1,0 -1,1,2,1,1,0,2,0,1,0,2 -2,1,0,0,1,2,2,2,2,1,0 -2,2,0,0,1,1,1,1,1,0,0 -2,0,1,1,1,1,1,0,1,2,0 -1,2,2,1,2,1,2,2,2,0,1 -1,0,2,0,1,0,1,1,0,0,0 -2,1,0,1,0,1,0,1,2,2,2 -0,1,2,2,0,0,1,1,2,2,1 -1,2,1,1,1,1,1,2,0,1,0 -2,2,1,1,2,2,1,2,0,2,2 -2,2,0,2,0,1,1,0,0,0,2 -1,2,2,0,0,2,0,0,1,1,2 -2,1,2,2,1,0,0,2,1,0,1 -1,1,2,0,0,2,2,0,0,0,2 -0,2,0,0,2,0,2,0,1,2,2 -0,1,0,0,0,1,2,1,0,0,2 -1,2,0,2,2,0,2,2,1,1,1 -2,0,1,0,2,1,1,1,2,2,2 -0,0,1,0,2,0,0,1,2,1,2 -0,1,0,1,2,1,2,2,0,0,2 -2,2,2,2,0,1,1,1,1,0,2 -2,2,2,2,0,1,1,2,0,1,0 -1,2,1,1,0,1,0,0,2,0,2 -0,1,0,0,1,0,2,1,0,0,0 -2,2,1,1,2,2,2,2,0,2,2 -0,2,2,0,1,2,2,0,2,0,0 -0,1,0,1,2,1,1,1,2,2,2 -0,1,2,2,1,2,0,1,0,0,2 -1,0,1,0,0,2,1,1,0,2,1 -2,0,1,0,0,0,1,1,0,0,0 -2,2,2,1,0,2,1,2,0,0,2 -2,0,1,0,2,2,0,2,0,2,2 -0,0,0,1,2,2,2,0,0,1,2 -0,2,0,1,2,0,1,1,0,2,1 -0,1,2,0,2,0,1,1,2,0,1 -1,1,1,0,0,1,2,2,1,1,1 -2,1,2,0,0,1,0,0,0,2,2 -2,1,2,2,2,1,2,0,0,2,2 -0,0,0,2,0,1,1,1,0,0,1 -0,2,0,2,2,2,2,2,0,1,1 -2,2,2,0,1,1,1,0,1,0,0 -1,0,1,1,2,2,2,1,0,0,0 -2,1,2,2,1,1,0,0,1,1,2 -0,1,2,1,2,1,0,0,1,1,2 -1,2,2,0,1,2,2,0,1,1,0 -0,2,2,0,0,1,0,2,2,2,0 -1,2,2,2,1,2,2,2,0,1,0 -1,0,2,1,2,2,2,2,1,1,2 -2,0,2,0,1,1,1,2,1,2,1 -0,1,0,1,0,1,1,2,1,2,1 -2,2,0,1,2,2,2,2,0,2,2 -1,0,2,2,0,1,1,0,0,0,1 -0,0,0,1,2,0,2,1,2,0,2 -0,0,0,1,2,2,2,0,1,1,0 -0,1,0,1,1,0,0,1,1,0,1 -2,0,2,0,1,0,0,2,1,2,2 -1,0,1,2,2,1,0,0,1,2,0 -2,0,2,0,1,0,0,1,2,1,0 -1,0,1,0,0,1,0,0,2,1,0 -2,1,2,2,0,1,1,2,2,1,2 -2,1,2,1,1,0,1,0,1,2,2 -1,0,0,2,2,0,1,1,1,1,0 -1,0,2,1,2,0,2,2,2,2,2 -1,0,0,0,2,1,2,0,0,0,2 -1,0,0,2,2,2,0,1,2,0,0 -0,1,1,0,2,2,1,2,1,1,0 -1,2,2,2,2,0,1,0,2,2,2 -1,0,1,1,2,1,2,0,1,1,1 -1,1,0,2,2,1,1,1,2,2,1 -2,2,1,0,2,2,2,0,1,2,1 -1,0,0,0,1,2,0,1,2,0,1 -0,1,2,0,2,2,1,1,0,1,2 -1,2,1,2,2,0,2,0,0,0,2 -0,0,0,1,2,2,1,0,1,0,1 -2,0,1,0,1,2,0,0,2,1,1 -1,0,0,2,2,0,2,1,0,0,1 -1,0,1,1,0,2,2,2,1,2,0 -0,0,1,1,0,2,1,1,0,1,0 -2,2,1,0,2,0,0,1,0,1,1 -1,1,0,1,2,0,1,2,2,0,0 diff --git a/bird/postprocess/SA_optimization/get_csv.py b/bird/postprocess/SA_optimization/get_csv.py index 1876d485..71040989 100644 --- a/bird/postprocess/SA_optimization/get_csv.py +++ b/bird/postprocess/SA_optimization/get_csv.py @@ -31,8 +31,8 @@ def get_config_result(study_fold: str = ".") -> None: count = 0 # Save data into CSV files - xfname = f"Xdata_{study_fold}.csv" - yfname = f"ydata_{study_fold}.csv" + xfname = os.path.join(study_fold, f"Xdata.csv") + yfname = os.path.join(study_fold, f"ydata.csv") with open(xfname, "w", newline="") as csvfile: writer = csv.writer(csvfile) for sims in results: @@ -52,6 +52,10 @@ def get_config_result(study_fold: str = ".") -> None: if __name__ == "__main__": - studies = {"study_scaleup_0_4vvm_3000W": r"0.0036$m^3$ 0.4vvm 0W"} + studies = { + "study_scaleup_0_4vvm_3000W": r"608$m^3$ 0.4vvm 3000W", + "study_scaleup_0_1vvm_6000W": r"608$m^3$ 0.1vvm 6000W", + "study_0_4vvm_1W": r"0.00361$m^3$ 0.4vvm 1W", + } for study in studies: get_config_result(study) diff --git a/bird/postprocess/SA_optimization/get_optimal.py b/bird/postprocess/SA_optimization/get_optimal.py index b667fbb9..0fdf2137 100644 --- a/bird/postprocess/SA_optimization/get_optimal.py +++ b/bird/postprocess/SA_optimization/get_optimal.py @@ -1,3 +1,4 @@ +import os import random import warnings @@ -90,6 +91,7 @@ def run_optimization( n_runs: int = 10, max_iters: int = 1000, bootstrap_size: int = 100, + out_folder: str = ".", ): """ Bootstraps data, runs optimization and postprocesses the results. @@ -115,6 +117,8 @@ def run_optimization( maximum number of SA iterations bootstrap_size : int size of bootstrap samples + out_folder: str + folder where to output the results Returns ---------- @@ -175,7 +179,10 @@ def run_optimization( ] ) df.to_csv( - f"best_bootstrap_solution_{model_type}_size_{bootstrap_size}.csv", + os.path.join( + out_folder, + f"best_bootstrap_solution_{model_type}_size_{bootstrap_size}.csv", + ), index=False, ) @@ -211,11 +218,17 @@ def run_optimization( ) pretty_legend(fontsize=16, fontname="Times") plt.savefig( - f"Mean_Convergence_plot_{model_type}_size_{bootstrap_size}.png", + os.path.join( + out_folder, + f"Mean_Convergence_plot_{model_type}_size_{bootstrap_size}.png", + ), dpi=300, ) plt.savefig( - f"Mean_Convergence_plot_{model_type}_size_{bootstrap_size}.pdf", + os.path.join( + out_folder, + f"Mean_Convergence_plot_{model_type}_size_{bootstrap_size}.pdf", + ), ) plt.show() @@ -255,24 +268,56 @@ def run_optimization( pretty_legend(fontsize=16, fontname="Times") plt.tight_layout() plt.savefig( - f"Mean_L1_distance_{model_type}_size_{bootstrap_size}.png", dpi=300 + os.path.join( + out_folder, + f"Mean_L1_distance_{model_type}_size_{bootstrap_size}.png", + ), + dpi=300, + ) + plt.savefig( + os.path.join( + out_folder, + f"Mean_L1_distance_{model_type}_size_{bootstrap_size}.pdf", + ) ) - plt.savefig(f"Mean_L1_distance_{model_type}_size_{bootstrap_size}.pdf") plt.show() if __name__ == "__main__": - # Read data from the csv file. - X_raw_data = pd.read_csv("Xdata_study_scaleup_0_4vvm_3000W.csv") - y_raw_data = pd.read_csv("ydata_study_scaleup_0_4vvm_3000W.csv") - - X = X_raw_data.values - y = y_raw_data.iloc[:, :-1].values - # We want to maximize, so we minimize the opposite - y = y * -1 - - # The function will build, tune, and optimize the surrogate model and postprocess the results. - run_optimization( - X, y, model_type="rbf", n_runs=10, max_iters=1000, bootstrap_size=150 - ) + # studies = ["study_scaleup_0_4vvm_3000W", "study_scaleup_0_1vvm_6000W", "study_0_4vvm_1W"] + studies = ["study_scaleup_0_1vvm_6000W", "study_0_4vvm_1W"] + # studies = ["study_scaleup_0_4vvm_3000W"] + + for study in studies: + # Read data from the csv file. + X_raw_data = pd.read_csv(os.path.join(study, "Xdata.csv")) + y_raw_data = pd.read_csv(os.path.join(study, "ydata.csv")) + + X = X_raw_data.values + y = y_raw_data.iloc[:, :-1].values + # We want to maximize, so we minimize the opposite + y = y * -1 + + # The function will build, tune, and optimize the surrogate model and postprocess the results. + run_optimization( + X, + y, + model_type="rbf", + n_runs=10, + max_iters=1000, + bootstrap_size=150, + out_folder=study, + ) + run_optimization( + X, + y, + model_type="rf", + n_runs=10, + max_iters=1000, + bootstrap_size=150, + out_folder=study, + ) + # run_optimization( + # X, y, model_type="nn", n_runs=10, max_iters=1000, bootstrap_size=150, out_folder=study + # ) diff --git a/bird/postprocess/SA_optimization/get_optimal_with_constraint.py b/bird/postprocess/SA_optimization/get_optimal_with_constraint.py index 4bfc7b02..b4ec9b52 100644 --- a/bird/postprocess/SA_optimization/get_optimal_with_constraint.py +++ b/bird/postprocess/SA_optimization/get_optimal_with_constraint.py @@ -1,3 +1,4 @@ +import os import random import numpy as np @@ -102,6 +103,7 @@ def run_optimization( n_runs: int = 10, max_iters: int = 1000, bootstrap_size: int = 100, + out_folder: str = ".", ): """ Bootstraps data, runs optimization and postprocesses the results. @@ -129,6 +131,8 @@ def run_optimization( maximum number of SA iterations bootstrap_size : int size of bootstrap samples + out_folder: str + folder where to output the results Returns ---------- @@ -190,7 +194,10 @@ def run_optimization( ] ) df.to_csv( - f"best_bootstrap_solution_{model_type}_size_{bootstrap_size}_max_spargers_{max_spargers}.csv", + os.path.join( + out_folder, + f"best_bootstrap_solution_{model_type}_size_{bootstrap_size}_max_spargers_{max_spargers}.csv", + ), index=False, ) @@ -229,32 +236,42 @@ def run_optimization( ) pretty_legend(fontsize=16, fontname="Times") plt.savefig( - f"Mean_Convergence_plot_{model_type}_size_{bootstrap_size}_max_spargers_{max_spargers}.png", + os.path.join( + out_folder, + f"Mean_Convergence_plot_{model_type}_size_{bootstrap_size}_max_spargers_{max_spargers}.png", + ), dpi=300, ) plt.savefig( - f"Mean_Convergence_plot_{model_type}_size_{bootstrap_size}_max_spargers_{max_spargers}.pdf", + os.path.join( + out_folder, + f"Mean_Convergence_plot_{model_type}_size_{bootstrap_size}_max_spargers_{max_spargers}.pdf", + ), ) plt.show() if __name__ == "__main__": - # Read data from the csv file. - X_raw_data = pd.read_csv("Xdata_study_scaleup_0_4vvm_3000W.csv") - y_raw_data = pd.read_csv("ydata_study_scaleup_0_4vvm_3000W.csv") - - X = X_raw_data.values - y = y_raw_data.iloc[:, :-1].values - # We want to maximize, so we minimize the opposite - y = y * -1 - - # The function will build, tune, and optimize the surrogate model and postprocess the results. - run_optimization( - X, - y, - model_type="rbf", - max_spargers=8, - n_runs=10, - max_iters=1000, - bootstrap_size=100, - ) + studies = ["study_scaleup_0_4vvm_3000W"] + + for study in studies: + for nsparg in [1, 2, 3, 4, 5, 6, 7, 8]: + X_raw_data = pd.read_csv(os.path.join(study, "Xdata.csv")) + y_raw_data = pd.read_csv(os.path.join(study, "ydata.csv")) + + X = X_raw_data.values + y = y_raw_data.iloc[:, :-1].values + # We want to maximize, so we minimize the opposite + y = y * -1 + + # The function will build, tune, and optimize the surrogate model and postprocess the results. + run_optimization( + X, + y, + model_type="rbf", + max_spargers=nsparg, + n_runs=10, + max_iters=1000, + bootstrap_size=150, + out_folder=study, + ) diff --git a/bird/postprocess/SA_optimization/results.pkl b/bird/postprocess/SA_optimization/results.pkl deleted file mode 100644 index 10aa5fe88138e206188453a8ec77f9050aaa9765..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5208 zcmXxoc{r3?7yxi7l`Pj*n6VFIhA4X_OR{#-DwRv65|Kn5NpwZlC`7vICav00NwnBv z$}*POZmC;WN=3JgwCLXNeD@uHcplHs`<`>=obP?lH%W4Zz8W(0PhtTF?W2BWkB^Sp zT;S!ZcA&_wr~h#FBZ_V(z`xmiCo{SPO78KYBRoJAAt?tPCUCbo)y*8J@)M#f+U3c&`}(LuzzipoAMEE&M)G#HD+ zjJBJ+incMaqxm})Mp|Hc2`YjCW7;|cKj!D48yHoCF~bY`Udhnb9`V^>jG2Xz0vLNi z*0hEp7Rf0Hl$T>)Qo#sGOv4 z80VMMRZMRVF|qylp)&o282N!2%0#`2OQF}-sTKH@m)?lcA5u$5>6?uQ6(p+O*k?+?2_*J=p*1>*pUB8G-Ny$|ojdo}^4-LGN^Qf=o{7Pz28`3p z8VvgP7%^dvaF=@VN{l?gEZ^F!@;dPkvEGu3TEjEk#$%FV>+ zJQ!o7!Y+duNvu`{{LMwMzYN&zzu``60iqX{udh)y@R zYY|3g!A#}Xw$4^z&~b&L+R|)yjBZP9Qj@Kgz7lPwziSw1e8I>9OsD@|QKvb5&N{pe zoQDoZVx$3PaxAsm`qeKaPSD5sQ8hUkqed|9lRX;0uAtX0u3Zv-FUTCDKxrKB=d;pV z$|SDmXI}B}O)6&$7>BjyYxLNCV&c>V>AQAP=Vd#X_Ry!QdFKuiVRT!0knWy*c3Xi-)Op{LqWF3!fN7|i6WRr&h?`q>oJW&Yz7a}pyTFjkS_^!SEnp4yfGW6moJx`y8&rQ#2Fp8AgzQiHbdtt=72Ieq+Q=)2trPPAi0n4g4 z&~Y99dUUbOX$MAbeE$Ts1L?>bUsD0Z%cK1vC9I zZ8XfpnwZEX@8BTR9r!nib*w&9nxP?w(GoDD9CMY!p8wanSqz_C1&mRU zG>)lx(Cu+IiR&x~ZuUr$!Dts4Yx}k9vPEZz^)R(O7tf$R>u4~hqe)0!`buIC9qNx> z^tg`ETrfuJhtMq*^N6+9`B=P;^}(nLOl96q=UtgIh~0i1f^JOe#wZtzy?ws@`OOky zB1W4)+4LYrzk#vNSuLpdrk^8$-__MAYpAoN3#L_iGx);(1QORW5HZYq%)rP1OmoWw zcH&$ABYVE6b{jpOr`J1PGBwW}FjiWXR<%BTJ_TQ7a%>B=F-ik7{35K&3fxTM zI#xaCo&0?(M&@Ab4-1}Wzw{;6vu{J@@`iI5se+lzd6!z_Ii0pQ8LrVAsLxsk%xq6b zmGweLVq%S1!ebn&&W{5#X4@~=(>g-OjfxvqnU`X;3QW&t0>4JEh*+!0<9=lLPmI=s znJ-(Cuut_dF;PsDpFb}bqi`^zeZ@c9gXlf#QSFScEv3$UveYi`JR8ALpvOBiwB(uE z4vZqejQtG*Om4{&>;7#hY)MuFMgd@!0_E6wHFRI;4VirKaW*y2O<>v?^6u-0J4hUl zyUjgGl82E47-`xEXqdYJZ|K)}Cs&t%y zHG9^&KdD+UMQR@{=QETp(er${p}+8CJ4Q3X7#{=)1qF1SX#FtISno~shs|Kxp1gD= zNiT_esO1=|IC2N0iD0Z7kIJ2WY>0LG#&DC*PQXYW%yh3tYk80rG2z~SE9~=ZG4cn~ zl_~f~DD#n6o6Re;2XiYi`c)dIp74p`mqSdDywr2E-WrT{fEf*Uz8?HJO02tK-eGcU-ISjt4JBE=9n9++@g_>zE=(xcp>>DC4jP8Q5=fvGV z^(Kj!Fnut$=J9Ndwn=lUZR_0as^;)8at1SAzhrHSQy-l(yEufqzyhP)U}mNriNj*L-%9%bGJaY{ zo!vxfoR#CLnjdss>ZxDhJincfQ6-qs)#qZjdb)>q7bdTXnB;yI> z&m1hRn1#_oFwT^mrCo!y#3XiQ@!D^bFe(DmDSu+Q-ETZGz6npkSDCsmEe6x`7dWw8 z8i{pk4llcEL+$NyFr!5VqRw$<#JY?JM!9G&M)lJ1G}A)7OX(gUJY+VbyIBzwS2=4lzObYVWTPsAuJ-G^hRh*V-%oCf0o^ zF6+`ms!p5(({biLy13{8F@BD0cw5>iMpj_VX89d6AB+)e|FTZ(c1<56J23rE)&Vt> zGKh8jYV&#c&VG!hf-yQ(mR8nXC)RqwcewL-HAd=ShTr(nwqJ{g^-THHnv|o2(K@iH zHaV)5GhB%^=Bh8dysR4|Q!u^h(G|C9nuzh!oV<5FqUus180$pT)`b?Bm_Rqipjw~m zW7ENmJ`~0;RVt%xfm@lHc@ai(VABt5uCH2il-MJV`{~GX>dwHC<_zF*6>R9<-#s&x zC#mnkC=^V$`R>NJ>GYZJT)Wi1@1p=CZ7|iWH22ld=^kG9t>K@O;naJ8H<&?8lI@-l z`gv%Jy|R%}XNA!jFrzNb*OId_WG>w$B`rOl_h2+0%rs~Ftz%>K9ZVd-2yEfK!-x%L zIc>@D%Jx`#4VOjBdlk|!+5pCQbYfECfCI6Io=my@%fDlE2Ta!~;#6D3WMXYml2M5_ z^`0{yjG6t5^Xs>C?P=9+egE#%A&la{R8L)-v#OC^-`#jY^2UB@A6&uo^1MeXU5}BR z?fF?H+8WgRLn@fTmv#3__G~1^{}wv7;u}?$Jis_nqHCJ6nZ!gBEj6E2q+m1y%*-L< z%HKzeiAgTjO+1rtgwa}QTybEkrA7!HR~g!zKT4gKWnfHQX85u1|KC5SZ3=Y}QqTJZ zF!rMIzD-8-_Y6Yk$Dz96R1eq-#;tVxu!bR~$II_M-^ZMZksFxa(3a*M8BWCbKNBOw zAzB#agPFGkWJQ!~5_{CcZ?QF|p6M(w4c#$~#k@RXO;gh1(=SpzV5M}t(ya229rQU9 zBrlHbIZ54LFqolGZp-kv|IgWxP1R~ArebswOz+5`Q@O`-dM+;&JF>^$#b}l^XLa`a zn2S}!#6owqh#&qK*?=*^9IZl}=>2O+u`UW0wqkS@%=pmh&wr1n_pc`;v|^5j3r6?A znAhIcv?kk=oE?p|-#6@|-eH+wrUirPu5;U zsZrm}906m`nEBmBfj)!66`MtWL{hcjD421uebi7z6B)0|zpTpO-DHf8gJ~>&x%`0o zWnxYC&s7+M)ZXfV>GaElzWo?ZtkqxslM3e(M!{f){pFn@WjxOyYHvfpO!O9(-qx-pCh$C7lA^gCqcAX| zmAh4BAJ9Fg+i!7Z>Qw69lL5vt8(kCrmhR!g{N0MqCJ!;P2Q!?a{iry>o8;uX-g)M_ bmx?O~)77r4$+|=Lqt2Z=yn#3t#+Ls9#_kjd diff --git a/bird/postprocess/SA_optimization/temp b/bird/postprocess/SA_optimization/temp deleted file mode 100644 index 78981922..00000000 --- a/bird/postprocess/SA_optimization/temp +++ /dev/null @@ -1 +0,0 @@ -a diff --git a/bird/postprocess/SA_optimization/ydata_study_scaleup_0_4vvm_3000W.csv b/bird/postprocess/SA_optimization/ydata_study_scaleup_0_4vvm_3000W.csv deleted file mode 100644 index e224b58a..00000000 --- a/bird/postprocess/SA_optimization/ydata_study_scaleup_0_4vvm_3000W.csv +++ /dev/null @@ -1,185 +0,0 @@ -4.639249158318668,0.12626038361898534 -4.830205075462752,0.1345759778255486 -11.883263228093135,0.3264360683677917 -13.920765170099399,0.4105486654309187 -6.542159679067755,0.2034214300932224 -12.152375869450877,0.36239501906304267 -8.022225385816165,0.2748815895051835 -9.57833436748858,0.28111820963360595 -7.621157957714864,0.25567306815826935 -6.854083596204951,0.2298569939977962 -11.145584784788815,0.31811738493951314 -11.017594168736228,0.3184159068790033 -13.50973761231716,0.3752779584066442 -12.030553522901826,0.3626718350283127 -8.500853761159016,0.2514870108881235 -8.743216029351826,0.2670107363696459 -11.717721199440591,0.3128707825208673 -11.345565509701485,0.353472952974596 -8.988391479133291,0.2580096856927115 -8.543409441773761,0.262301422633598 -6.560127864354076,0.20837078993418331 -6.4149526976824855,0.19347669879343268 -13.741028936629446,0.37516250222124864 -10.260225187485267,0.30671415538616664 -12.38699282309954,0.33678185294994506 -15.459376105064136,0.447246924226177 -9.09803267916285,0.304227995563132 -6.740910374100037,0.21154083742443155 -13.191334348313264,0.34796851870506956 -13.70507196126807,0.38117142223133704 -11.518308973994909,0.35289543460220707 -11.515168788856977,0.3461668678795173 -7.224363681870076,0.23657610962538503 -10.805708473886197,0.3263647525674359 -12.932602535390258,0.370807747121449 -10.892569560758172,0.31241264469905766 -12.069083972134544,0.36751044492654417 -6.4823749997684805,0.19262760715138516 -11.231649637811778,0.3277463243939802 -4.270684191558357,0.10987581442252059 -13.76528529571703,0.38285304273583004 -10.152583557069674,0.3408235347109574 -10.986707238586106,0.324251636154978 -9.79517497061306,0.3029635961468728 -10.419543443002388,0.3170020475695413 -10.258773239532914,0.27979900920419293 -10.189745733831108,0.3250937746551829 -6.514063372968979,0.2268487917845758 -13.953941108502349,0.37603794998487483 -11.992362749779748,0.34762137147830285 -7.882404941588837,0.23711080920489294 -9.275719183296298,0.26904923614363635 -7.776457060294847,0.25905651438188215 -9.725980401340914,0.2985287367453095 -6.807759844786992,0.2048265466788003 -6.630803695687349,0.21680559865756013 -4.063931442420922,0.11272271250075441 -11.225122173481324,0.3453857489445164 -9.82654794320513,0.31766875974197384 -6.157926107439263,0.18678044797135201 -5.855484586386016,0.18450597017794995 -9.18587411892937,0.2978687770910763 -9.88460112835837,0.2705988868749546 -6.841601401913933,0.2050600340598003 -12.79640726757815,0.3623376347189863 -12.83899374588669,0.3975083213578766 -8.457051563045496,0.26991461104084247 -9.763108036495394,0.3275758151015267 -8.984052674188392,0.23708461040402135 -11.471274422120723,0.3049553719602422 -10.615101031292475,0.311189711506409 -13.247670835151544,0.36060925350524264 -10.074632957437625,0.31206315974184845 -7.549823041704327,0.21747657909403267 -13.660770312425418,0.3507548164008165 -9.258995020455252,0.29796230302465954 -9.338803482609384,0.3070023813054725 -10.168223206620057,0.289058067197555 -15.414172835849321,0.4436665921077268 -6.382847851314592,0.18876460404289166 -12.541321134236878,0.3777538153682838 -14.625672661663916,0.38802297600654273 -9.2131453370491,0.2672227550883449 -6.020912999143897,0.18351282428727747 -9.594482823382547,0.3548268182647394 -9.434406063121779,0.32561538569508414 -11.27709117666853,0.3170389795034779 -11.85440229800048,0.3713104751517777 -8.143103526931212,0.24373125334484186 -5.987413503473131,0.19209801113080932 -10.66860645276813,0.3146498728341653 -10.973009201937053,0.326561903194073 -6.736162134712763,0.21831998403883163 -6.319124326915894,0.20386451019469645 -9.85518408546759,0.2547269254087896 -11.752111783293545,0.3360594969365309 -7.2505681489712845,0.22033395375408343 -11.538062664951463,0.32045394779240677 -9.480109510593579,0.2931065455457869 -7.24841607883414,0.23832775051957591 -10.347675591461396,0.28753868506914715 -10.686337772821627,0.30684839551276677 -9.822044747899417,0.32661245475083844 -9.447079333421648,0.3112984064131052 -11.114202667483596,0.3364211476686481 -9.532358320060213,0.31236106613200026 -11.688464102351732,0.35487546667211 -6.532609784598158,0.22168781874321142 -10.192019448061677,0.3054567325499729 -13.077747284183287,0.38903025246203415 -10.096158138823926,0.30867510872414256 -9.343519904040358,0.290375320672992 -11.40697959771358,0.31308878923916295 -12.020609433055474,0.3782750313181893 -7.172718740262101,0.24334049751097286 -7.584794001842911,0.2514622981514563 -9.152594292536907,0.2678778098535802 -12.045947232357136,0.33710722608802324 -4.379843967798498,0.10762641890440137 -3.9571319937219607,0.113635341004926 -9.604472609306962,0.27473135213632965 -8.418838881573317,0.25553219416942385 -10.210954922063188,0.323263474717489 -9.607306751578054,0.29177678625562137 -7.060706536012455,0.2464907666663334 -6.943395438649984,0.23411188612717468 -7.987496774296126,0.23715829335149538 -6.703187252555086,0.21613478186303836 -8.989863908497496,0.29754347680337007 -5.3003428235059005,0.15283433472600066 -7.54340245910743,0.24628416098103528 -9.813350149916667,0.3142972297214279 -9.471843550104339,0.3170324937170163 -14.278150609504612,0.4087077718493328 -9.677308352105648,0.28421158422409315 -7.4440856645917135,0.25581770286934824 -4.385277812944313,0.13265263272190378 -8.22363437693505,0.2223718632499199 -9.408385570236735,0.29099268450210747 -11.77627353707695,0.36591204701029667 -15.043583831144797,0.4168601817074705 -6.7135010881993855,0.20526445170131055 -7.3099809259113115,0.22216331625583294 -6.8867769079468415,0.2117247511316945 -4.409863387744625,0.11906173881006474 -7.641879896764433,0.23103459137397314 -8.483160007686088,0.24982929105677146 -12.2184349027241,0.34412720042998773 -13.108464684640968,0.3854827942762161 -12.301487123370132,0.3546389998626981 -6.778424897986802,0.20380859414550845 -11.153693300454865,0.33196148938956604 -12.848113577592285,0.383963743119625 -7.7318602570176695,0.24995391622250263 -13.889362645764818,0.41917795613750286 -5.029285733575436,0.1565712068835105 -12.815511254920658,0.32045351604623495 -8.916057019146086,0.2593018338761806 -7.649338687785831,0.2492080696611349 -12.8604460492114,0.3723917179012849 -6.601788303188531,0.18369133369166799 -10.288367893781292,0.30306427295098 -6.925703924819166,0.2403483369335239 -11.790867067900965,0.32527202858137033 -12.810837863572981,0.36388377313805176 -11.092170573659601,0.333338222107799 -10.189868448033742,0.2709142473792956 -5.078027062937434,0.15792477880845202 -7.223634361053522,0.21356319778744356 -8.948019498562504,0.24191457543885958 -13.222659710790236,0.3750710095241255 -8.076671575057484,0.2209566383161624 -15.312111223230433,0.3871211907716363 -14.170973262775446,0.42197025792806736 -9.32461479951292,0.29381079716589753 -13.309929308155027,0.3625732862415955 -11.21961903186075,0.3243213919583699 -4.66559471266868,0.1327459953758173 -8.65681333512524,0.2849376420634901 -10.051156775736247,0.33207149084178794 -10.559178404056045,0.31681027963019587 -9.170860767688787,0.2863959162680961 -12.08392615370928,0.3370654696996921 -8.519728499754038,0.24416325998118774 -7.275149264066791,0.2572700464241981 From 73245febea4d164df8a89c174749419e96d2b4a4 Mon Sep 17 00:00:00 2001 From: Malik Date: Mon, 9 Jun 2025 08:31:26 -0600 Subject: [PATCH 12/38] add script to generate paper figures --- .../SA_optimization/get_optimal.py | 30 +++++++++---------- .../SA_optimization/plot_qoi_vs_sparg.py | 13 ++++++++ 2 files changed, 28 insertions(+), 15 deletions(-) create mode 100644 bird/postprocess/SA_optimization/plot_qoi_vs_sparg.py diff --git a/bird/postprocess/SA_optimization/get_optimal.py b/bird/postprocess/SA_optimization/get_optimal.py index 0fdf2137..2aafa6f4 100644 --- a/bird/postprocess/SA_optimization/get_optimal.py +++ b/bird/postprocess/SA_optimization/get_optimal.py @@ -193,7 +193,7 @@ def run_optimization( upper_bound = mean_trace + 1.96 * std_trace / np.sqrt(n_runs) iterations = np.arange(max_iters + 1) - plt.figure(figsize=(8, 4)) + plt.figure(figsize=(8, 6)) plt.plot( iterations, mean_trace, @@ -210,13 +210,13 @@ def run_optimization( ) pretty_labels( "Iteration", - "Best Surrogate-Predicted Objective", - fontsize=16, + r"Predicted QOI [kg$^2$/kWh$^2$]", + fontsize=20, title=f"Mean Convergence with 95% Confidence Interval ({model_type.upper()})", grid=True, fontname="Times", ) - pretty_legend(fontsize=16, fontname="Times") + pretty_legend(fontsize=20, fontname="Times") plt.savefig( os.path.join( out_folder, @@ -286,8 +286,8 @@ def run_optimization( if __name__ == "__main__": # studies = ["study_scaleup_0_4vvm_3000W", "study_scaleup_0_1vvm_6000W", "study_0_4vvm_1W"] - studies = ["study_scaleup_0_1vvm_6000W", "study_0_4vvm_1W"] - # studies = ["study_scaleup_0_4vvm_3000W"] + # studies = ["study_scaleup_0_1vvm_6000W", "study_0_4vvm_1W"] + studies = ["study_scaleup_0_4vvm_3000W", "study_0_4vvm_1W"] for study in studies: # Read data from the csv file. @@ -309,15 +309,15 @@ def run_optimization( bootstrap_size=150, out_folder=study, ) - run_optimization( - X, - y, - model_type="rf", - n_runs=10, - max_iters=1000, - bootstrap_size=150, - out_folder=study, - ) + # run_optimization( + # X, + # y, + # model_type="rf", + # n_runs=10, + # max_iters=1000, + # bootstrap_size=150, + # out_folder=study, + # ) # run_optimization( # X, y, model_type="nn", n_runs=10, max_iters=1000, bootstrap_size=150, out_folder=study # ) diff --git a/bird/postprocess/SA_optimization/plot_qoi_vs_sparg.py b/bird/postprocess/SA_optimization/plot_qoi_vs_sparg.py new file mode 100644 index 00000000..b05b85fc --- /dev/null +++ b/bird/postprocess/SA_optimization/plot_qoi_vs_sparg.py @@ -0,0 +1,13 @@ +import numpy as np +from prettyPlot.plotting import * + +nsparg = [1, 2, 3, 4, 5, 6, 7, 8] +qoi_opt = [13.485, 14.276, 14.635, 15.692, 16.072, 17.657, 19.349, 20.712] + +fig = plt.figure() +plt.plot(nsparg, qoi_opt, linewidth=3, color="k") +pretty_labels( + r"N$_{\rm sparg}$", r"Optimal QOI [kg$^2$/kWh$^2$]", 20, fontname="Times" +) +plt.savefig("marginal_gain.png") +plt.savefig("marginal_gain.pdf") From 0b290671d342f13a1bf7303930476c2dcd626de9 Mon Sep 17 00:00:00 2001 From: Malik Date: Mon, 16 Jun 2025 09:23:27 -0600 Subject: [PATCH 13/38] remove tabs --- .../base_tank/tank_par.yaml | 34 +++++++++---------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/bird/meshing/stirred_tank_mesh_templates/base_tank/tank_par.yaml b/bird/meshing/stirred_tank_mesh_templates/base_tank/tank_par.yaml index 3afc6376..69b86f12 100644 --- a/bird/meshing/stirred_tank_mesh_templates/base_tank/tank_par.yaml +++ b/bird/meshing/stirred_tank_mesh_templates/base_tank/tank_par.yaml @@ -1,23 +1,23 @@ geometry: - Dt: 0.26 # Tank diameter [m] - Da: 0.0866667 # Impleller tip Diameter [m] - H: 0.39 # Height of the reactor [m] - nimpellers: 1 - C: [0.0866667] # height of the center of impellers [m] - W: 0.026 # impeller blade width [m] - L: 0.013 # impeller blade length (beyond the hub) [m] - Lin: 0.013 # impeller blade length (inside the hub) - J: 0.026 # Baffle width - Wh: 0.0026 # Hub height (Width) - polyrad: 0.00866667 # Stem radius (R_shaft) - Z0: 0.0 # bottom of reactor - nbaffles: 6 # number of baffles and impeller fins + Dt: 0.26 # Tank diameter [m] + Da: 0.0866667 # Impleller tip Diameter [m] + H: 0.39 # Height of the reactor [m] + nimpellers: 1 + C: [0.0866667] # height of the center of impellers [m] + W: 0.026 # impeller blade width [m] + L: 0.013 # impeller blade length (beyond the hub) [m] + Lin: 0.013 # impeller blade length (inside the hub) + J: 0.026 # Baffle width + Wh: 0.0026 # Hub height (Width) + polyrad: 0.00866667 # Stem radius (R_shaft) + Z0: 0.0 # bottom of reactor + nbaffles: 6 # number of baffles and impeller fins mesh: - nr: 120 # mesh points per unit radial length - nz: 240 # mesh points per unit axial length - Npoly: 4 # mesh points in the polygon at the axis - Na: 6 # mesh points in the azimuthal direction + nr: 120 # mesh points per unit radial length + nz: 240 # mesh points per unit axial length + Npoly: 4 # mesh points in the polygon at the axis + Na: 6 # mesh points in the azimuthal direction From 5af30ba8c795a344f316a51590494b43df5dd0d0 Mon Sep 17 00:00:00 2001 From: Malik Date: Mon, 16 Jun 2025 09:23:49 -0600 Subject: [PATCH 14/38] add a check for tabs in yaml files --- bird/meshing/_mesh_tools.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/bird/meshing/_mesh_tools.py b/bird/meshing/_mesh_tools.py index 61618b2f..4d6044f8 100644 --- a/bird/meshing/_mesh_tools.py +++ b/bird/meshing/_mesh_tools.py @@ -1,4 +1,5 @@ import json +import os import sys from pathlib import Path @@ -12,7 +13,31 @@ def parseJsonFile(input_filename): return inpt +def check_for_tabs_in_yaml(file_path: str) -> None: + """ + Checks if a YAML file contains any tab characters. + Raises a ValueError if tabs found. + + Parameters + ---------- + file_path: str + path to yaml filename + """ + + with open(file_path, "r", encoding="utf-8") as f: + lines = f.readlines() + for iline, line in enumerate(lines): + if "\t" in line: + raise ValueError( + f"Tab character found on line {iline} of '{file_path}'. " + "YAML files must use spaces for indentation." + ) + + def parseYAMLFile(input_filename): + if not os.path.exists(input_filename): + raise FileNotFoundError(input_filename) + check_for_tabs_in_yaml(input_filename) yaml = YAML(typ="safe") inpt = yaml.load(Path(input_filename)) return inpt From c4a09ec180554393ca00c4199380f3f25eda05cf Mon Sep 17 00:00:00 2001 From: Malik Date: Mon, 16 Jun 2025 09:24:21 -0600 Subject: [PATCH 15/38] use the yaml parse function instead of rewriting it --- bird/meshing/_stirred_tank_reactor.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/bird/meshing/_stirred_tank_reactor.py b/bird/meshing/_stirred_tank_reactor.py index dc3f2db5..265c8b41 100644 --- a/bird/meshing/_stirred_tank_reactor.py +++ b/bird/meshing/_stirred_tank_reactor.py @@ -2,7 +2,8 @@ from pathlib import Path import numpy as np -from ruamel.yaml import YAML + +from bird.meshing._mesh_tools import parseYAMLFile class StirredTankReactor: @@ -133,11 +134,6 @@ def __init__( def from_file(cls, yamlfile): if ".yaml" not in yamlfile: yamlfile += ".yaml" - if os.path.exists(yamlfile): - yamlpath = Path(yamlfile) - else: - raise FileNotFoundError(yamlfile) - yaml = YAML(typ="safe") - in_dict = yaml.load(yamlpath) + in_dict = parseYAMLFile(yamlfile) react_dict = {**in_dict["geometry"], **in_dict["mesh"]} return cls(**react_dict) From 7b1bfff4d2d9315264c8af3a7ab8c3d90708dae8 Mon Sep 17 00:00:00 2001 From: Malik Date: Tue, 17 Jun 2025 15:21:08 -0600 Subject: [PATCH 16/38] update the ci to test python 3.13 --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 852dc071..f56135d9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -52,7 +52,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.10', '3.11', '3.12'] + python-version: ['3.10', '3.11', '3.12', '3.13'] os: ['ubuntu-latest', 'macos-latest'] defaults: run: From 9e0db3bcbd48e2c99ff90bf1b65ce44b3885d80d Mon Sep 17 00:00:00 2001 From: Malik Date: Tue, 17 Jun 2025 15:25:03 -0600 Subject: [PATCH 17/38] remove intermediate python tests --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f56135d9..55a639ba 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -52,7 +52,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ['3.10', '3.11', '3.12', '3.13'] + python-version: ['3.10', '3.13'] os: ['ubuntu-latest', 'macos-latest'] defaults: run: From c5e1fa608e2be440da3255fdf3dc199665d3d4d8 Mon Sep 17 00:00:00 2001 From: Malik Date: Mon, 7 Jul 2025 19:06:20 -0600 Subject: [PATCH 18/38] document ofio --- bird/postprocess/conditional_mean.py | 2 +- bird/utilities/bubble_col_util.py | 2 +- bird/utilities/ofio.py | 498 +++++++++++++++--- .../bubble_column_pbe_20L/writeGlobalVars.py | 6 +- tests/io/test_case.py | 26 + tests/io/test_readOF.py | 110 ++++ tests/postprocess/test_cond_mean.py | 10 +- tests/postprocess/test_early_pred.py | 10 +- tests/postprocess/test_kla.py | 5 +- .../bubble_column_20L/writeGlobalVars.py | 6 +- .../loop_reactor_mixing/read_history.py | 30 +- .../loop_reactor_mixing/writeGlobalVars.py | 6 +- .../loop_reactor_reacting/writeGlobalVars.py | 6 +- 13 files changed, 618 insertions(+), 99 deletions(-) create mode 100644 tests/io/test_case.py create mode 100644 tests/io/test_readOF.py diff --git a/bird/postprocess/conditional_mean.py b/bird/postprocess/conditional_mean.py index 19918882..eb268146 100644 --- a/bird/postprocess/conditional_mean.py +++ b/bird/postprocess/conditional_mean.py @@ -88,7 +88,7 @@ def compute_cond_mean( diff=diff, ) else: - field_tmp = readOFScal(filename, nCells) + field_tmp = readOFScal(filename, nCells)["field"] vert_axis, field_cond_tmp = conditionalAverage( cellCentres[:, vert_ind], field_tmp, nbin=n_bins ) diff --git a/bird/utilities/bubble_col_util.py b/bird/utilities/bubble_col_util.py index b2d21795..068a42af 100644 --- a/bird/utilities/bubble_col_util.py +++ b/bird/utilities/bubble_col_util.py @@ -5,7 +5,7 @@ def readFromDict(val_dict, key, read_func=None, path=None, nCells=None): if key not in val_dict: - field = read_func(path, nCells) + field = read_func(path, nCells)["field"] val_dict[key] = field else: field = val_dict[key] diff --git a/bird/utilities/ofio.py b/bird/utilities/ofio.py index a8cd223f..43367689 100644 --- a/bird/utilities/ofio.py +++ b/bird/utilities/ofio.py @@ -4,86 +4,404 @@ import numpy as np -def readMesh(file): - A = np.loadtxt(file, usecols=(1, 2, 3)) - return A +def readMesh(filename: str) -> np.ndarray: + """ + Reads cell center location from meshCellCentres_X.obj + Parameters + ---------- + filename: str + meshCellCentres_X.obj filename -def readOFScal(file, nCells, nHeader=None): - # Check that the field is not a uniform field - try: - f = open(file, "r") - for i in range(20): + returns + ---------- + cell_centers: np.ndarray + Array (N,3) representing the cell centers (N is number of cells) + + """ + cell_centers = np.loadtxt(filename, usecols=(1, 2, 3)) + return cell_centers + + +def ofvec2arr(vec: str) -> np.ndarray: + """ + Converts a vector written as a string into a numpy array + + Parameters + ---------- + vec: str + Vector written as string + Must start with "(" + Must end with ")" + + returns + ---------- + vec_array: np.ndarray + Array (3,) representing the vector + + + """ + vec = vec.strip() + assert vec[0] == "(" + assert vec[-1] == ")" + + vec_list = vec[1:-1].split() + vec_array = np.array([float(entry) for entry in vec_list]) + return vec_array + + +def is_comment(line: str) -> bool: + """ + Checks if line is a comment + + Parameters + ---------- + line: str + Line of file + + returns + ---------- + is_comment: bool + True if line is a comment + False if line is not a comment + """ + is_comment = False + + sline = line.strip() + if sline.startswith("//"): + is_comment = True + elif sline.startswith("/*"): + is_comment = True + return is_comment + + +def read_meta_data(filename: str, mode: str | None = None) -> dict: + """ + Read meta data from field outputted by OpenFOAM in ASCII format + + Parameters + ---------- + filename: str + Field filename + mode: str | None + If "scalar", expects a scalar field + If "vector", expects a vector field + If None, obtained from field header + + returns + ---------- + meta_data: dict + Dictionary that contain info about the scalar field + ============= ===================================================== + Key Description (type) + ============= ===================================================== + name Name of the field (*str*) + n_cells Number of computational cells (*int*) + uniform Whether the field is uniform (*bool*) + uniform_value Uniform value if uniform field (*float* | *np.ndarray*) + type "vector" or "scalar" (*str*) + ============= ===================================================== + """ + meta_data = {} + meta_data["type"] = mode + + # Read meta data + with open(filename, "r") as f: + header_done = False + iline = 0 + while not header_done: line = f.readline() - f.close() - lineList = line.split() - if len(lineList) == 3 and lineList[1] == "uniform": - # Uniform field - val = float(lineList[2][:-1]) - Array = val * np.ones(nCells) + if not is_comment(line): + # Get field type + if (line.strip().startswith("class")) and (";" in line): + sline = line.strip().split() + field_type = sline[1][:-1] + if field_type == "volVectorField": + field_type = "vector" + elif field_type == "volScalarField": + field_type = "scalar" + else: + raise NotImplementedError + if mode is not None: + assert field_type == mode + meta_data["type"] = field_type + # Get field name + if (line.strip().startswith("object")) and (";" in line): + sline = line.strip().split() + field_name = sline[1][:-1] + meta_data["name"] = field_name + + # Check if uniform + if line.strip().startswith("internalField"): + if "nonuniform" in line: + meta_data["uniform"] = False + iline += 1 + # read until no comments + comment = True + while comment: + count_line = f.readline().strip() + if not is_comment(count_line): + comment = False + try: + n_cells = int(count_line) + meta_data["n_cells"] = n_cells + header_done = True + except ValueError: + raise ValueError( + f"Expected integer number of cells on line {iline}, got: '{count_line}'" + ) + elif "uniform" in line: + meta_data["uniform"] = True + sline = line.split() + if meta_data["type"] == "scalar": + unif_value = float(sline[-1].strip(";")) + elif meta_data["type"] == "vector": + unif_value = ofvec2arr(sline[-1].strip(";")) + else: + raise NotImplementedError( + f"Mode {mode} is unknown" + ) + meta_data["uniform_value"] = unif_value + header_done = True + + if len(line) == 0 and (not header_done): + raise ValueError( + f"File {filename} ends before meta-data found" + ) + + return meta_data + + +def readOFScal( + filename: str, + n_cells: int | None = None, + n_header: int | None = None, + meta_data: dict | None = None, +) -> dict: + """ + Read a scalar field outputted by OpenFOAM in ASCII format + + Parameters + ---------- + filename: str + Field filename + n_cells : int | None + Number of computational cells in the domain + n_header : int | None + Number of header lines + meta_data : dict | None + meta data dictionary + If None, it is read from filename + + returns + ---------- + data: dict + Dictionary that contain info about the scalar field + ======== ===================================================== + Key Description (type) + ======== ===================================================== + field For nonuniform fields, array of size the number of cells (*np.ndarray*). + For uniform fields with a specified n_cells, + array of size the number of cells (*np.ndarray*). + For uniform fields, a scalar value (*float*) + name Name of the scalar field (*str*) + n_cells Number of computational cells (*int*) + n_header Number of header lines (*int*) + ======== ===================================================== + """ + field = None + + if meta_data is None: + # Read meta data + meta_data = read_meta_data(filename, mode="scalar") + + # Set field + if meta_data["uniform"]: + if n_cells is None: + field = meta_data["uniform_value"] else: - # Not a uniform field - f = open(file, "r") - if nHeader is None: - # Find header - lines = f.readlines() - for iline, line in enumerate(lines[:-2]): - if str(nCells) in lines[iline] and "(" in lines[iline + 1]: + field = meta_data["uniform_value"] * np.ones(n_cells) + else: + n_cells = meta_data["n_cells"] + + # Get header size + if n_header is None: + oldline = "" + newline = "" + iline = 0 + eof = False + with open(filename, "r") as f: + while iline < 100 and (not eof): + line = f.readline() + if len(line) == 0: + eof = True + oldline = newline + newline = line + if str(n_cells) in oldline and "(" in newline: + n_header = iline + 1 break - nHeader = iline + 2 - f.close() - Array = np.loadtxt(file, skiprows=nHeader, max_rows=nCells) - except Exception as err: - print("Issue when reading %s" % file) - print(err) - sys.exit() - return Array + iline += 1 + else: + raise ValueError( + "Could not find a sequence {n_cells} and '(' in file ({filename})" + ) + # Rapid field read + try: + field = np.loadtxt(filename, skiprows=n_header, max_rows=n_cells) + except Exception as err: + print(f"Issue when reading {filename}") + print(err) + sys.exit() -def ofvec2arr(vec): - vec_list = vec[1:-1].split() - vec_float = [float(entry) for entry in vec_list] - return np.array(vec_float) + return { + "field": field, + "name": meta_data["name"], + "n_cells": n_cells, + "n_header": n_header, + } -def readOFVec(file, nCells, nHeader=None): - # Check that the field is not a uniform field - try: - f = open(file, "r") - for i in range(20): - line = f.readline() - f.close() - lineList = line.split() - if len(lineList) == 3 and lineList[1] == "uniform": - # Uniform field - raise NotImplementedError - val = ofvec2arr(lineList[2][:-1]) - Array = val * np.ones(nCells) +def readOFVec( + filename: str, + n_cells: int | None = None, + n_header: int | None = None, + meta_data: dict | None = None, +) -> dict: + """ + Read a vector field outputted by OpenFOAM in ASCII format + + Parameters + ---------- + filename: str + Vector field filename + n_cells : int | None + Number of computational cells in the domain + n_header : int | None + Number of header lines + meta_data : dict | None + meta data dictionary + If None, it is read from filename + + returns + ---------- + data: dict + Dictionary that contain info about the scalar field + ======== ===================================================== + Key Description (type) + ======== ===================================================== + field For nonuniform fields, array of size the number of cells by 3 (*np.ndarray*). + For uniform fields with a specified n_cells, + array of size the number of cells by 3 (*np.ndarray*). + For uniform fields, a scalar value (*float*) + name Name of the field (*str*) + n_cells Number of computational cells (*int*) + n_header Number of header lines (*int*) + ======== ===================================================== + """ + field = None + + if meta_data is None: + # Read meta data + meta_data = read_meta_data(filename, mode="vector") + + # Set field + if meta_data["uniform"]: + if n_cells is None: + field = meta_data["uniform_value"] else: - # Not a uniform field - f = open(file, "r") - if nHeader is None: - # Find header - lines = f.readlines() - for iline, line in enumerate(lines[:-2]): - if str(nCells) in lines[iline] and "(" in lines[iline + 1]: + field = meta_data["uniform_value"] * np.ones((n_cells, 3)) + else: + n_cells = meta_data["n_cells"] + if n_header is None: + oldline = "" + newline = "" + iline = 0 + eof = False + with open(filename, "r") as f: + while iline < 100 and (not eof): + line = f.readline() + if len(line) == 0: + eof = True + oldline = newline + newline = line + if str(n_cells) in oldline and "(" in newline: + n_header = iline + 1 break - nHeader = iline + 2 - f.close() - Array = np.loadtxt( - file, dtype=tuple, skiprows=nHeader, max_rows=nCells + iline += 1 + else: + raise ValueError( + "Could not find a sequence {n_cells} and '(' in file ({filename})" + ) + + try: + field = np.loadtxt( + filename, dtype=tuple, skiprows=n_header, max_rows=n_cells ) - for i in range(nCells): - Array[i, 0] = float(Array[i, 0][1:]) - Array[i, 1] = float(Array[i, 1]) - Array[i, 2] = float(Array[i, 2][:-1]) - Array = np.array(Array).astype(float) - except Exception as err: - print("Issue when reading %s" % file) - print(err) - sys.exit() + for i in range(n_cells): + field[i, 0] = float(field[i, 0][1:]) + field[i, 1] = float(field[i, 1]) + field[i, 2] = float(field[i, 2][:-1]) + field = np.array(field).astype(float) - return Array + except Exception as err: + print(f"Issue when reading {filename}") + print(err) + sys.exit() + + return { + "field": field, + "name": meta_data["name"], + "n_cells": n_cells, + "n_header": n_header, + } + + +def readOF( + filename: str, + n_cells: int | None = None, + n_header: int | None = None, + meta_data: dict | None = None, +) -> dict: + """ + Read an OpenFOAM field outputted in ASCII format + + Parameters + ---------- + filename: str + Field filename + n_cells : int | None + Number of computational cells in the domain + n_header : int | None + Number of header lines + meta_data : dict | None + meta data dictionary + If None, it is read from filename + + + returns + ---------- + data: dict + Dictionary that contain info about the scalar field + ======== ===================================================== + Key Description (type) + ======== ===================================================== + field For nonuniform fields, array of size the number of cells (*np.ndarray*). + For uniform fields with a specified n_cells, + array of size the number of cells (*np.ndarray*). + For uniform fields, a scalar value (*float*) + name Name of the field (*str*) + n_cells Number of computational cells (*int*) + n_header Number of header lines (*int*) + ======== ===================================================== + """ + if meta_data is None: + # Read meta data + meta_data = read_meta_data(filename) + if meta_data["type"] == "scalar": + return readOFScal(filename=filename, meta_data=meta_data) + if meta_data["type"] == "vector": + return readOFVec(filename=filename, meta_data=meta_data) def readSizeGroups(file): @@ -129,7 +447,29 @@ def readSizeGroups(file): return sizeGroup, binGroup -def getCaseTimes(casePath, remove_zero=False): +def getCaseTimes( + casePath: str, remove_zero: bool = False, verbose: bool = True +) -> tuple: + """ + Get list of all time folders from an OpenFOAM case + + Parameters + ---------- + casePath: str + Path to case folder + remove_zero : bool + Whether to remove zero from the time folder list + verbose : bool + Whether to print what time folders are included + + returns + ---------- + time_float_sorted: list[float] + List of time folder values in ascending order + time_str_sorted: list[str] + List of time folder names in ascending order + + """ # Read Time times_tmp = os.listdir(casePath) # remove non floats @@ -140,7 +480,8 @@ def getCaseTimes(casePath, remove_zero=False): if abs(a) < 1e-12: _ = times_tmp.pop(i) except ValueError: - print(f"{entry} not a time folder, removing") + if verbose: + print(f"{entry} not a time folder, removing") a = times_tmp.pop(i) # print('removed ', a) time_float = [float(entry) for entry in times_tmp] @@ -152,8 +493,23 @@ def getCaseTimes(casePath, remove_zero=False): return time_float_sorted, time_str_sorted -def getMeshTime(casePath): +def getMeshTime(casePath: str) -> str: + """ + Get the time at which the mesh was printed + + Parameters + ---------- + casePath: str + Path to case folder + + returns + ---------- + time_mesh: str + The name of the time at which "meshFaceCentresXXX" was created + """ + files_tmp = os.listdir(casePath) for entry in files_tmp: if entry.startswith("meshFaceCentres"): - return entry[16:-4] + time_mesh = entry[16:-4] + return time_mesh diff --git a/experimental_cases/disengagement/bubble_column_pbe_20L/writeGlobalVars.py b/experimental_cases/disengagement/bubble_column_pbe_20L/writeGlobalVars.py index 0594eccc..3445bdb8 100644 --- a/experimental_cases/disengagement/bubble_column_pbe_20L/writeGlobalVars.py +++ b/experimental_cases/disengagement/bubble_column_pbe_20L/writeGlobalVars.py @@ -34,10 +34,12 @@ def readInletArea(): def getLiqVol(): cellCentres = readMesh(os.path.join(".", f"meshCellCentres_0.obj")) - volume_field = readOFScal(os.path.join("0", "V"), len(cellCentres)) + volume_field = readOFScal(os.path.join("0", "V"), len(cellCentres))[ + "field" + ] alpha_field = readOFScal( os.path.join("0", "alpha.liquid"), len(cellCentres) - ) + )["field"] return np.sum(volume_field * alpha_field) diff --git a/tests/io/test_case.py b/tests/io/test_case.py new file mode 100644 index 00000000..95c23416 --- /dev/null +++ b/tests/io/test_case.py @@ -0,0 +1,26 @@ +import os +from pathlib import Path + +import numpy as np +from prettyPlot.plotting import plt, pretty_labels + +from bird.utilities.ofio import getCaseTimes + + +def test_case_time(): + caseFolder = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "postprocess", + "data_conditional_mean", + ) + # Read non uniform field + time_float, time_str = getCaseTimes(caseFolder) + assert np.linalg.norm(np.array(time_float) - np.array([1, 79, 80])) < 1e-6 + assert time_str == ["1", "79", "80"] + + +if __name__ == "__main__": + test_case_time() diff --git a/tests/io/test_readOF.py b/tests/io/test_readOF.py new file mode 100644 index 00000000..efa398ab --- /dev/null +++ b/tests/io/test_readOF.py @@ -0,0 +1,110 @@ +import os +from pathlib import Path + +import numpy as np +from prettyPlot.plotting import plt, pretty_labels + +from bird.utilities.ofio import readOF, readOFScal, readOFVec + + +def test_read_nonunif_scal(): + caseFolder = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "postprocess", + "data_conditional_mean", + ) + # Read non uniform field + data_dict = readOFScal(filename=os.path.join(caseFolder, "79", "CO2.gas")) + assert abs(data_dict["field"][0] - 0.616955) < 1e-6 + assert abs(data_dict["field"][-1] - 0.625389) < 1e-6 + assert abs(data_dict["n_cells"] - 137980) < 1e-6 + assert abs(data_dict["field"].shape[0] - 137980) < 1e-6 + assert data_dict["name"] == "CO2.gas" + # Read non uniform field with flexible interface + data_dict = readOF(filename=os.path.join(caseFolder, "79", "CO2.gas")) + assert abs(data_dict["field"][0] - 0.616955) < 1e-6 + assert abs(data_dict["field"][-1] - 0.625389) < 1e-6 + assert abs(data_dict["n_cells"] - 137980) < 1e-6 + assert abs(data_dict["field"].shape[0] - 137980) < 1e-6 + assert data_dict["name"] == "CO2.gas" + + +def test_read_unif_scal(): + caseFolder = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "postprocess", + "data_conditional_mean", + ) + # Read non uniform field + data_dict = readOFScal(filename=os.path.join(caseFolder, "79", "f.gas")) + assert abs(data_dict["field"] - 1) < 1e-6 + assert data_dict["n_cells"] is None + # Read non uniform field with flexible interface + data_dict = readOF(filename=os.path.join(caseFolder, "79", "f.gas")) + assert abs(data_dict["field"] - 1) < 1e-6 + assert data_dict["n_cells"] is None + # Read non uniform field with prespecified cell number + data_dict = readOFScal( + filename=os.path.join(caseFolder, "79", "f.gas"), n_cells=100 + ) + assert np.shape(data_dict["field"]) == (100,) + assert np.linalg.norm(data_dict["field"] - 1) < 1e-6 + assert data_dict["n_cells"] == 100 + assert data_dict["name"] == "f.gas" + + +def test_read_nonunif_vec(): + caseFolder = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "postprocess", + "data_conditional_mean", + ) + # Read non uniform field + data_dict = readOFVec(filename=os.path.join(caseFolder, "79", "U.gas")) + assert ( + np.linalg.norm( + data_dict["field"][0, :] - [0.140018, 1.20333, 0.127566] + ) + < 1e-6 + ) + assert ( + np.linalg.norm( + data_dict["field"][-1, :] - [0.0409271, 0.176052, 0.0302899] + ) + < 1e-6 + ) + assert abs(data_dict["n_cells"] - 137980) < 1e-6 + assert abs(data_dict["field"].shape[0] - 137980) < 1e-6 + assert data_dict["name"] == "U.gas" + # Read non uniform field with flexible interface + data_dict = readOF(filename=os.path.join(caseFolder, "79", "U.gas")) + assert ( + np.linalg.norm( + data_dict["field"][0, :] - [0.140018, 1.20333, 0.127566] + ) + < 1e-6 + ) + assert ( + np.linalg.norm( + data_dict["field"][-1, :] - [0.0409271, 0.176052, 0.0302899] + ) + < 1e-6 + ) + assert abs(data_dict["n_cells"] - 137980) < 1e-6 + assert abs(data_dict["field"].shape[0] - 137980) < 1e-6 + assert data_dict["name"] == "U.gas" + + +if __name__ == "__main__": + test_read_nonunif_scal() + test_read_unif_scal() + test_read_nonunif_vec() diff --git a/tests/postprocess/test_cond_mean.py b/tests/postprocess/test_cond_mean.py index c453b99a..d1c3e983 100644 --- a/tests/postprocess/test_cond_mean.py +++ b/tests/postprocess/test_cond_mean.py @@ -1,4 +1,5 @@ import os +from pathlib import Path from prettyPlot.plotting import plt, pretty_labels @@ -10,7 +11,14 @@ def test_compute_cond(): - caseFolder = os.path.join("bird", "postprocess", "data_conditional_mean") + caseFolder = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "postprocess", + "data_conditional_mean", + ) fields_list = [ "CO.gas", "CO.liquid", diff --git a/tests/postprocess/test_early_pred.py b/tests/postprocess/test_early_pred.py index f7899163..7339b92a 100644 --- a/tests/postprocess/test_early_pred.py +++ b/tests/postprocess/test_early_pred.py @@ -1,4 +1,6 @@ -from bird import BIRD_EARLY_PRED_DATA_DIR +import os +from pathlib import Path + from bird.postprocess.early_pred import ( bayes_fit, fit_and_ext, @@ -9,12 +11,18 @@ def test_fit(): + BIRD_EARLY_PRED_DATA_DIR = os.path.join( + Path(__file__).parent, "..", "..", "bird", "postprocess", "data_early" + ) data_dict, color_files = multi_data_load(BIRD_EARLY_PRED_DATA_DIR) data_dict = fit_and_ext(data_dict) plotAllEarly(data_dict, color_files=color_files, chop=True, extrap=True) def test_bayes_fit(): + BIRD_EARLY_PRED_DATA_DIR = os.path.join( + Path(__file__).parent, "..", "..", "bird", "postprocess", "data_early" + ) data_dict, color_files = multi_data_load(BIRD_EARLY_PRED_DATA_DIR) bayes_fit(data_dict) plotAllEarly_uq(data_dict, color_files=color_files) diff --git a/tests/postprocess/test_kla.py b/tests/postprocess/test_kla.py index c28f3fba..66ee5e80 100644 --- a/tests/postprocess/test_kla.py +++ b/tests/postprocess/test_kla.py @@ -1,8 +1,8 @@ import os +from pathlib import Path import pytest -from bird import BIRD_KLA_DATA_DIR from bird.postprocess.kla_utils import compute_kla, print_res_dict @@ -14,6 +14,9 @@ ], ) def test_kla(bootstrap, max_chop): + BIRD_KLA_DATA_DIR = os.path.join( + Path(__file__).parent, "..", "..", "bird", "postprocess", "data_kla" + ) res_dict = compute_kla( os.path.join(BIRD_KLA_DATA_DIR, "volume_avg.dat"), time_ind=0, diff --git a/tutorial_cases/bubble_column_20L/writeGlobalVars.py b/tutorial_cases/bubble_column_20L/writeGlobalVars.py index 0594eccc..3445bdb8 100644 --- a/tutorial_cases/bubble_column_20L/writeGlobalVars.py +++ b/tutorial_cases/bubble_column_20L/writeGlobalVars.py @@ -34,10 +34,12 @@ def readInletArea(): def getLiqVol(): cellCentres = readMesh(os.path.join(".", f"meshCellCentres_0.obj")) - volume_field = readOFScal(os.path.join("0", "V"), len(cellCentres)) + volume_field = readOFScal(os.path.join("0", "V"), len(cellCentres))[ + "field" + ] alpha_field = readOFScal( os.path.join("0", "alpha.liquid"), len(cellCentres) - ) + )["field"] return np.sum(volume_field * alpha_field) diff --git a/tutorial_cases/loop_reactor_mixing/read_history.py b/tutorial_cases/loop_reactor_mixing/read_history.py index 264711f8..a2a77ad7 100644 --- a/tutorial_cases/loop_reactor_mixing/read_history.py +++ b/tutorial_cases/loop_reactor_mixing/read_history.py @@ -11,12 +11,12 @@ def compute_gas_holdup(caseFolder, timeFolder, nCells, field_dict={}): if not ("alpha_liq" in field_dict) or field_dict["alpha_liq"] is None: alpha_liq_file = os.path.join(caseFolder, timeFolder, "alpha.liquid") - alpha_liq = readOFScal(alpha_liq_file, nCells) + alpha_liq = readOFScal(alpha_liq_file, nCells)["field"] # print("reading alpha_liq") field_dict["alpha_liq"] = alpha_liq if not ("volume" in field_dict) or field_dict["volume"] is None: volume_file = os.path.join(caseFolder, "0", "V") - volume = readOFScal(volume_file, nCells) + volume = readOFScal(volume_file, nCells)["field"] # print("reading Volume") field_dict["volume"] = volume alpha_liq = field_dict["alpha_liq"] @@ -28,17 +28,17 @@ def compute_gas_holdup(caseFolder, timeFolder, nCells, field_dict={}): def co2liq(caseFolder, timeFolder, nCells, field_dict={}): if not ("alpha_liq" in field_dict) or field_dict["alpha_liq"] is None: alpha_liq_file = os.path.join(caseFolder, timeFolder, "alpha.liquid") - alpha_liq = readOFScal(alpha_liq_file, nCells) + alpha_liq = readOFScal(alpha_liq_file, nCells)["field"] # print("reading alpha_liq") field_dict["alpha_liq"] = alpha_liq if not ("co2_liq" in field_dict) or field_dict["co2_liq"] is None: co2_liq_file = os.path.join(caseFolder, timeFolder, "CO2.liquid") - co2_liq = readOFScal(co2_liq_file, nCells) + co2_liq = readOFScal(co2_liq_file, nCells)["field"] # print("computing co2 liq") field_dict["co2_liq"] = co2_liq if not ("volume" in field_dict) or field_dict["volume"] is None: volume_file = os.path.join(caseFolder, "0", "V") - volume = readOFScal(volume_file, nCells) + volume = readOFScal(volume_file, nCells)["field"] # print("reading Volume") field_dict["volume"] = volume if not ("indliq" in field_dict) or field_dict["indliq"] is None: @@ -59,26 +59,26 @@ def co2liq(caseFolder, timeFolder, nCells, field_dict={}): def cliq(caseFolder, timeFolder, nCells, field_dict={}): if not ("alpha_liq" in field_dict) or field_dict["alpha_liq"] is None: alpha_liq_file = os.path.join(caseFolder, timeFolder, "alpha.liquid") - alpha_liq = readOFScal(alpha_liq_file, nCells) + alpha_liq = readOFScal(alpha_liq_file, nCells)["field"] # print("reading alpha_liq") field_dict["alpha_liq"] = alpha_liq if not ("rho_liq" in field_dict) or field_dict["rho_liq"] is None: rho_liq_file = os.path.join(caseFolder, timeFolder, "rhom") - rho_liq = readOFScal(rho_liq_file, nCells) + rho_liq = readOFScal(rho_liq_file, nCells)["field"] field_dict["rho_liq"] = rho_liq if not ("co2_liq" in field_dict) or field_dict["co2_liq"] is None: co2_liq_file = os.path.join(caseFolder, timeFolder, "CO2.liquid") - co2_liq = readOFScal(co2_liq_file, nCells) + co2_liq = readOFScal(co2_liq_file, nCells)["field"] # print("computing co2 liq") field_dict["co2_liq"] = co2_liq if not ("h2_liq" in field_dict) or field_dict["h2_liq"] is None: h2_liq_file = os.path.join(caseFolder, timeFolder, "H2.liquid") - h2_liq = readOFScal(h2_liq_file, nCells) + h2_liq = readOFScal(h2_liq_file, nCells)["field"] # print("computing h2 liq") field_dict["h2_liq"] = h2_liq if not ("volume" in field_dict) or field_dict["volume"] is None: volume_file = os.path.join(caseFolder, "0", "V") - volume = readOFScal(volume_file, nCells) + volume = readOFScal(volume_file, nCells)["field"] # print("reading Volume") field_dict["volume"] = volume if not ("indliq" in field_dict) or field_dict["indliq"] is None: @@ -109,17 +109,17 @@ def cliq(caseFolder, timeFolder, nCells, field_dict={}): def h2liq(caseFolder, timeFolder, nCells, field_dict={}): if not ("alpha_liq" in field_dict) or field_dict["alpha_liq"] is None: alpha_liq_file = os.path.join(caseFolder, timeFolder, "alpha.liquid") - alpha_liq = readOFScal(alpha_liq_file, nCells) + alpha_liq = readOFScal(alpha_liq_file, nCells)["field"] # print("reading alpha_liq") field_dict["alpha_liq"] = alpha_liq if not ("h2_liq" in field_dict) or field_dict["h2_liq"] is None: h2_liq_file = os.path.join(caseFolder, timeFolder, "H2.liquid") - h2_liq = readOFScal(h2_liq_file, nCells) + h2_liq = readOFScal(h2_liq_file, nCells)["field"] # print("computing h2 liq") field_dict["h2_liq"] = h2_liq if not ("volume" in field_dict) or field_dict["volume"] is None: volume_file = os.path.join(caseFolder, "0", "V") - volume = readOFScal(volume_file, nCells) + volume = readOFScal(volume_file, nCells)["field"] # print("reading Volume") field_dict["volume"] = volume if not ("indliq" in field_dict) or field_dict["indliq"] is None: @@ -140,12 +140,12 @@ def h2liq(caseFolder, timeFolder, nCells, field_dict={}): def vol_liq(caseFolder, timeFolder, nCells, field_dict={}): if not ("alpha_liq" in field_dict) or field_dict["alpha_liq"] is None: alpha_liq_file = os.path.join(caseFolder, timeFolder, "alpha.liquid") - alpha_liq = readOFScal(alpha_liq_file, nCells) + alpha_liq = readOFScal(alpha_liq_file, nCells)["field"] # print("reading alpha_liq") field_dict["alpha_liq"] = alpha_liq if not ("volume" in field_dict) or field_dict["volume"] is None: volume_file = os.path.join(caseFolder, "0", "V") - volume = readOFScal(volume_file, nCells) + volume = readOFScal(volume_file, nCells)["field"] # print("reading Volume") field_dict["volume"] = volume volume = field_dict["volume"] diff --git a/tutorial_cases/loop_reactor_mixing/writeGlobalVars.py b/tutorial_cases/loop_reactor_mixing/writeGlobalVars.py index 0594eccc..3445bdb8 100644 --- a/tutorial_cases/loop_reactor_mixing/writeGlobalVars.py +++ b/tutorial_cases/loop_reactor_mixing/writeGlobalVars.py @@ -34,10 +34,12 @@ def readInletArea(): def getLiqVol(): cellCentres = readMesh(os.path.join(".", f"meshCellCentres_0.obj")) - volume_field = readOFScal(os.path.join("0", "V"), len(cellCentres)) + volume_field = readOFScal(os.path.join("0", "V"), len(cellCentres))[ + "field" + ] alpha_field = readOFScal( os.path.join("0", "alpha.liquid"), len(cellCentres) - ) + )["field"] return np.sum(volume_field * alpha_field) diff --git a/tutorial_cases/loop_reactor_reacting/writeGlobalVars.py b/tutorial_cases/loop_reactor_reacting/writeGlobalVars.py index 0594eccc..3445bdb8 100644 --- a/tutorial_cases/loop_reactor_reacting/writeGlobalVars.py +++ b/tutorial_cases/loop_reactor_reacting/writeGlobalVars.py @@ -34,10 +34,12 @@ def readInletArea(): def getLiqVol(): cellCentres = readMesh(os.path.join(".", f"meshCellCentres_0.obj")) - volume_field = readOFScal(os.path.join("0", "V"), len(cellCentres)) + volume_field = readOFScal(os.path.join("0", "V"), len(cellCentres))[ + "field" + ] alpha_field = readOFScal( os.path.join("0", "alpha.liquid"), len(cellCentres) - ) + )["field"] return np.sum(volume_field * alpha_field) From c5b79b283b6047521e0ef8d384323cddf78e8ac8 Mon Sep 17 00:00:00 2001 From: Malik Date: Mon, 7 Jul 2025 19:07:23 -0600 Subject: [PATCH 19/38] remove data from package --- setup.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/setup.py b/setup.py index 49263f60..57b409d7 100644 --- a/setup.py +++ b/setup.py @@ -37,10 +37,7 @@ "*requirements.txt", "*.json", "*.yaml", - "*.csv", "*.dat", - "data_conditional_mean", - "data_kla", ] }, extras_require={ From 26a51f563fba1e41493e82571ed06d4854e8a467 Mon Sep 17 00:00:00 2001 From: Malik Date: Tue, 8 Jul 2025 15:16:06 -0600 Subject: [PATCH 20/38] add tests for dictionary reading --- bird/utilities/ofio.py | 216 +++++++++++++++++- tests/io/test_read_foam_dict.py | 83 +++++++ ...est_readOF.py => test_read_foam_fields.py} | 0 3 files changed, 298 insertions(+), 1 deletion(-) create mode 100644 tests/io/test_read_foam_dict.py rename tests/io/{test_readOF.py => test_read_foam_fields.py} (100%) diff --git a/bird/utilities/ofio.py b/bird/utilities/ofio.py index 43367689..f7f13902 100644 --- a/bird/utilities/ofio.py +++ b/bird/utilities/ofio.py @@ -1,4 +1,5 @@ import os +import re import sys import numpy as np @@ -19,6 +20,8 @@ def readMesh(filename: str) -> np.ndarray: Array (N,3) representing the cell centers (N is number of cells) """ + assert filename.startswith("meshCellCentres") + assert filename.endswith(".obj") cell_centers = np.loadtxt(filename, usecols=(1, 2, 3)) return cell_centers @@ -502,7 +505,7 @@ def getMeshTime(casePath: str) -> str: casePath: str Path to case folder - returns + Returns ---------- time_mesh: str The name of the time at which "meshFaceCentresXXX" was created @@ -513,3 +516,214 @@ def getMeshTime(casePath: str) -> str: if entry.startswith("meshFaceCentres"): time_mesh = entry[16:-4] return time_mesh + + +def remove_comments(text: str) -> str: + """ + Remove C++-style comments (// and /*) from the input and markers like #{ #} + + Parameters + ---------- + text: str + Raw input text containing comments + + Returns + ---------- + text: str + Text with all comments removed + """ + + # text = re.sub( + # r"/\*.*?\*/", "", text, flags=re.DOTALL + # ) # Remove /* */ comments + # text_unc = re.sub(r"//.*", "", text) # Remove // comments + + text = re.sub(r"/\*.*?\*/", "", text, flags=re.DOTALL) + text = re.sub(r"//.*", "", text) + text = re.sub(r"#\{", "{", text) + text = re.sub(r"#\};", "}", text) + text = re.sub(r"#codeStream", "", text) + return text + + +def tokenize(text: str) -> list[str]: + """ + Add spaces around special characters (brace and semicolons) to make them separate tokens + + Parameters + ---------- + text: str + The cleaned (comment-free) OpenFOAM-style text. + + Returns: + ---------- + token_list: list[str] + List of tokens. + """ + text = re.sub(r"([{}();])", r" \1 ", text) + token_list = text.split() + return token_list + + +def parse_tokens(tokens: list[str]) -> dict: + """ + Parse OpenFOAM tokens into a nested Python dictionary. + Special handling for `code { ... }` blocks to be stored as raw strings. + + Parameters + ---------- + tokens: list[str] + A list of tokens produced by `tokenize`. + + Returns + ---------- + parsed: dict + A nested dictionary that represents the OpenFOAM dictionary. + """ + + def parse_block(index: int) -> tuple: + result = {} + while index < len(tokens): + token = tokens[index] + if token == "}": + return result, index + 1 + elif token == "{": + raise SyntaxError("Unexpected '{'") + + key = token + index += 1 + + # key followed by dictionary + if index < len(tokens) and tokens[index] == "{": + index += 1 + if key == "code": + code_lines = [] + while tokens[index] != "}": + code_lines.append(tokens[index]) + index += 1 + index += 1 + if index < len(tokens) and tokens[index] == ";": + index += 1 + result[key] = " ".join(code_lines).strip() + else: + subdict, index = parse_block(index) + result[key] = subdict + + # key followed by list + elif index < len(tokens) and tokens[index] == "(": + index += 1 + + # Peek to check if it's a dict-list (starts with '(' then '{') + if tokens[index] == "(": + dictlist = {} + while tokens[index] != ")": + if tokens[index] != "(": + raise SyntaxError( + f"Expected '(' for label in dict-list, got {tokens[index]}" + ) + # Read full label (e.g., "(gas and liquid)") + label_tokens = [] + while tokens[index] != ")": + label_tokens.append(tokens[index]) + index += 1 + label_tokens.append(tokens[index]) # include ')' + index += 1 + label = " ".join(label_tokens) + + if tokens[index] != "{": + raise SyntaxError( + f"Expected '{{' after label {label}" + ) + index += 1 + subdict, index = parse_block(index) + dictlist[label] = subdict + index += 1 # skip final ')' + if index < len(tokens) and tokens[index] == ";": + index += 1 + result[key] = dictlist + else: + # Standard list + lst = [] + while tokens[index] != ")": + lst.append(tokens[index]) + index += 1 + index += 1 + if index < len(tokens) and tokens[index] == ";": + index += 1 + result[key] = lst + + # key followed by scalar + elif index < len(tokens): + value = tokens[index] + index += 1 + if index < len(tokens) and tokens[index] == ";": + index += 1 + result[key] = value + + return result, index + + parsed, _ = parse_block(0) + return parsed + + +def parse_openfoam_dict(filename: str) -> dict: + """ + Parse OpenFOAM dictionary into a python dictionary + + Parameters + ---------- + filename: str + OpenFOAM dictionary filename + + Returns + ---------- + dict_of: dict + A Python dictionary representing the structure of the OpenFOAM dictionary. + """ + with open(filename, "r+") as f: + text = f.read() + text = remove_comments(text) + tokens = tokenize(text) + foam_dict = parse_tokens(tokens) + return foam_dict + + +def write_openfoam_dict(d: dict, filename: str, indent: int = 0) -> None: + """ + Save a Python dictionary back to an OpenFOAM-style file. + + Parameters + ---------- + d: dict + Python dictionary to save + filename: str + The file that will contain the saved dictionary + indent: int + Number of indentation space + """ + + lines = [] + + indent_str = " " * indent + + for key, value in d.items(): + if isinstance(value, dict): + lines.append(f"{indent_str}{key}") + lines.append(f"{indent_str}{{") + lines.extend(write_openfoam_dict(value, indent + 4)) + lines.append(f"{indent_str}}}") + elif isinstance(value, list): + lines.append(f"{indent_str}{key}") + lines.append(f"{indent_str}(") + for item in value: + lines.append(f"{indent_str} {item}") + lines.append(f"{indent_str});") + else: + lines.append(f"{indent_str}{key} {value};") + + with open(filename, "w") as f: + lines = write_openfoam_dict(foam_dict) + f.write("\n".join(lines)) + f.write( + "\n\n// ************************************************************************* //\n" + ) diff --git a/tests/io/test_read_foam_dict.py b/tests/io/test_read_foam_dict.py new file mode 100644 index 00000000..3f9fa47d --- /dev/null +++ b/tests/io/test_read_foam_dict.py @@ -0,0 +1,83 @@ +import os +from pathlib import Path + +import numpy as np + +from bird.utilities.ofio import parse_openfoam_dict + + +def test_read_phaseProperties(): + const_folder = os.path.join( + Path(__file__).parent, + "..", + "..", + "tutorial_cases", + "loop_reactor_mixing", + "constant", + ) + # Read non uniform field + foam_dict = parse_openfoam_dict( + filename=os.path.join(const_folder, "phaseProperties") + ) + + assert foam_dict["phases"] == ["gas", "liquid"] + assert foam_dict["gas"]["constantCoeffs"]["d"] == "3e-3" + assert ( + foam_dict["liquid"]["Sc"]["code"] + == "os << ( $LeLiqMix * $CpMixLiq * $muMixLiq / $kThermLiq ) ;" + ) + assert ( + foam_dict["diffusiveMassTransfer.liquid"]["( gas in liquid )"]["type"] + == "Higbie" + ) + assert ( + foam_dict["lift"]["( gas in liquid )"]["lift"]["swarmCorrection"][ + "type" + ] + == "none" + ) + + +def test_read_thermophysicalProperties(): + const_folder = os.path.join( + Path(__file__).parent, + "..", + "..", + "tutorial_cases", + "loop_reactor_mixing", + "constant", + ) + # Read non uniform field + foam_dict = parse_openfoam_dict( + filename=os.path.join(const_folder, "thermophysicalProperties.gas") + ) + + assert foam_dict["species"] == ["H2", "CO2", "N2"] + assert ( + foam_dict["CO2"]["thermodynamics"]["highCpCoeffs"][0] == "3.85746029" + ) + assert len(foam_dict["CO2"]["thermodynamics"]["highCpCoeffs"]) == 7 + + +def test_read_momentumTransport(): + const_folder = os.path.join( + Path(__file__).parent, + "..", + "..", + "tutorial_cases", + "loop_reactor_mixing", + "constant", + ) + # Read non uniform field + foam_dict = parse_openfoam_dict( + filename=os.path.join(const_folder, "momentumTransport.gas") + ) + + assert foam_dict["simulationType"] == "RAS" + assert foam_dict["RAS"]["turbulence"] == "on" + + +if __name__ == "__main__": + test_read_phaseProperties() + test_read_thermophysicalProperties() + test_read_momentumTransport() diff --git a/tests/io/test_readOF.py b/tests/io/test_read_foam_fields.py similarity index 100% rename from tests/io/test_readOF.py rename to tests/io/test_read_foam_fields.py From ac71303670c2cd8867a692aec39a97c48db6fc4b Mon Sep 17 00:00:00 2001 From: Malik Date: Tue, 8 Jul 2025 15:23:43 -0600 Subject: [PATCH 21/38] show how to read a control dict --- tests/io/test_read_foam_dict.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/tests/io/test_read_foam_dict.py b/tests/io/test_read_foam_dict.py index 3f9fa47d..1be1d279 100644 --- a/tests/io/test_read_foam_dict.py +++ b/tests/io/test_read_foam_dict.py @@ -76,8 +76,25 @@ def test_read_momentumTransport(): assert foam_dict["simulationType"] == "RAS" assert foam_dict["RAS"]["turbulence"] == "on" +def test_read_controlDict(): + syst_folder = os.path.join( + Path(__file__).parent, + "..", + "..", + "tutorial_cases", + "loop_reactor_mixing", + "system", + ) + # Read non uniform field + foam_dict = parse_openfoam_dict( + filename=os.path.join(syst_folder, "controlDict") + ) + + assert foam_dict["writeControl"] == "adjustableRunTime" + assert foam_dict["maxCo"] == "0.5" if __name__ == "__main__": test_read_phaseProperties() test_read_thermophysicalProperties() test_read_momentumTransport() + test_read_controlDict() From 4134b94a19d205aa9393064175995ba50c4e80d9 Mon Sep 17 00:00:00 2001 From: Malik Date: Tue, 8 Jul 2025 15:28:28 -0600 Subject: [PATCH 22/38] remove too strict assertion --- bird/utilities/ofio.py | 6 +++--- tests/postprocess/test_cond_mean.py | 4 ++++ 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/bird/utilities/ofio.py b/bird/utilities/ofio.py index f7f13902..59ee0d46 100644 --- a/bird/utilities/ofio.py +++ b/bird/utilities/ofio.py @@ -20,8 +20,8 @@ def readMesh(filename: str) -> np.ndarray: Array (N,3) representing the cell centers (N is number of cells) """ - assert filename.startswith("meshCellCentres") - assert filename.endswith(".obj") + assert "meshCellCentres" in filename + assert ".obj" in filename cell_centers = np.loadtxt(filename, usecols=(1, 2, 3)) return cell_centers @@ -513,7 +513,7 @@ def getMeshTime(casePath: str) -> str: files_tmp = os.listdir(casePath) for entry in files_tmp: - if entry.startswith("meshFaceCentres"): + if "meshFaceCentres" in entry: time_mesh = entry[16:-4] return time_mesh diff --git a/tests/postprocess/test_cond_mean.py b/tests/postprocess/test_cond_mean.py index d1c3e983..fafca77f 100644 --- a/tests/postprocess/test_cond_mean.py +++ b/tests/postprocess/test_cond_mean.py @@ -39,3 +39,7 @@ def test_compute_cond(): plot_name = sequencePlot(cond, [caseFolder], field_name) pretty_labels(plot_name, "y [m]", 14) plt.close() + + +if __name__ == "__main__": + test_compute_cond() From 73e1cb9886f2bcdde5e7028b81e1fe8c488d463e Mon Sep 17 00:00:00 2001 From: Malik Date: Tue, 8 Jul 2025 15:29:15 -0600 Subject: [PATCH 23/38] format --- tests/io/test_read_foam_dict.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/io/test_read_foam_dict.py b/tests/io/test_read_foam_dict.py index 1be1d279..adf15227 100644 --- a/tests/io/test_read_foam_dict.py +++ b/tests/io/test_read_foam_dict.py @@ -76,6 +76,7 @@ def test_read_momentumTransport(): assert foam_dict["simulationType"] == "RAS" assert foam_dict["RAS"]["turbulence"] == "on" + def test_read_controlDict(): syst_folder = os.path.join( Path(__file__).parent, @@ -93,6 +94,7 @@ def test_read_controlDict(): assert foam_dict["writeControl"] == "adjustableRunTime" assert foam_dict["maxCo"] == "0.5" + if __name__ == "__main__": test_read_phaseProperties() test_read_thermophysicalProperties() From 587ec49d842f3ee6800272ccd028ded131f13e2e Mon Sep 17 00:00:00 2001 From: Malik Date: Tue, 8 Jul 2025 16:47:40 -0600 Subject: [PATCH 24/38] update version --- bird/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bird/version.py b/bird/version.py index 75d8e719..9cdbdc6f 100644 --- a/bird/version.py +++ b/bird/version.py @@ -1,3 +1,3 @@ """Bio reactor design version""" -__version__ = "0.0.19" +__version__ = "0.0.21" From fd69f6b1d2a21dfb0297774618fff78c7b534dd9 Mon Sep 17 00:00:00 2001 From: Malik Date: Tue, 8 Jul 2025 16:59:38 -0600 Subject: [PATCH 25/38] remove config files --- bird/version.py | 2 +- setup.py | 3 --- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/bird/version.py b/bird/version.py index 9cdbdc6f..a4ff1ec3 100644 --- a/bird/version.py +++ b/bird/version.py @@ -1,3 +1,3 @@ """Bio reactor design version""" -__version__ = "0.0.21" +__version__ = "0.0.22" diff --git a/setup.py b/setup.py index 57b409d7..3d32deab 100644 --- a/setup.py +++ b/setup.py @@ -35,9 +35,6 @@ package_data={ "": [ "*requirements.txt", - "*.json", - "*.yaml", - "*.dat", ] }, extras_require={ From bac6e157240a3a080a39ec867d0d836b59c8e089 Mon Sep 17 00:00:00 2001 From: Malik Date: Wed, 9 Jul 2025 08:02:47 -0600 Subject: [PATCH 26/38] remove unused imports and format --- bird/utilities/stl_plotting.py | 1 - tests/io/test_case.py | 1 - tests/io/test_read_foam_fields.py | 1 - tests/meshing/test_block_cyl_mesh.py | 62 +++++++++++++++++++++++++- tests/meshing/test_block_rect_mesh.py | 22 +++++++-- tests/meshing/test_stirred_tank.py | 10 ++++- tests/preprocess/test_dynamic_mixer.py | 20 ++++++++- tests/preprocess/test_stl_patch.py | 36 ++++++++++++++- 8 files changed, 142 insertions(+), 11 deletions(-) diff --git a/bird/utilities/stl_plotting.py b/bird/utilities/stl_plotting.py index 6f120894..19fdaabb 100644 --- a/bird/utilities/stl_plotting.py +++ b/bird/utilities/stl_plotting.py @@ -3,7 +3,6 @@ def plotSTL(stl_file): - import matplotlib.pyplot as plt from mpl_toolkits import mplot3d from stl import mesh diff --git a/tests/io/test_case.py b/tests/io/test_case.py index 95c23416..96ebb30e 100644 --- a/tests/io/test_case.py +++ b/tests/io/test_case.py @@ -2,7 +2,6 @@ from pathlib import Path import numpy as np -from prettyPlot.plotting import plt, pretty_labels from bird.utilities.ofio import getCaseTimes diff --git a/tests/io/test_read_foam_fields.py b/tests/io/test_read_foam_fields.py index efa398ab..46e49525 100644 --- a/tests/io/test_read_foam_fields.py +++ b/tests/io/test_read_foam_fields.py @@ -2,7 +2,6 @@ from pathlib import Path import numpy as np -from prettyPlot.plotting import plt, pretty_labels from bird.utilities.ofio import readOF, readOFScal, readOFVec diff --git a/tests/meshing/test_block_cyl_mesh.py b/tests/meshing/test_block_cyl_mesh.py index 1847dbc8..394da6ce 100644 --- a/tests/meshing/test_block_cyl_mesh.py +++ b/tests/meshing/test_block_cyl_mesh.py @@ -1,9 +1,9 @@ import os import sys +from pathlib import Path import numpy as np -from bird import BIRD_BLOCK_CYL_MESH_TEMP_DIR from bird.meshing.block_cyl_mesh import ( assemble_geom, assemble_mesh, @@ -19,6 +19,14 @@ def base_mesh(input_file, topo_file, output_folder): def test_side_sparger(): + BIRD_BLOCK_CYL_MESH_TEMP_DIR = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "meshing", + "block_cyl_mesh_templates", + ) input_file = os.path.join( BIRD_BLOCK_CYL_MESH_TEMP_DIR, "sideSparger/input.json" ) @@ -30,6 +38,14 @@ def test_side_sparger(): def test_flat_donut(): + BIRD_BLOCK_CYL_MESH_TEMP_DIR = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "meshing", + "block_cyl_mesh_templates", + ) input_file = os.path.join( BIRD_BLOCK_CYL_MESH_TEMP_DIR, "flatDonut/input.json" ) @@ -41,6 +57,14 @@ def test_flat_donut(): def test_base_column(): + BIRD_BLOCK_CYL_MESH_TEMP_DIR = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "meshing", + "block_cyl_mesh_templates", + ) input_file = os.path.join( BIRD_BLOCK_CYL_MESH_TEMP_DIR, "baseColumn/input.json" ) @@ -52,6 +76,14 @@ def test_base_column(): def test_base_column_refine(): + BIRD_BLOCK_CYL_MESH_TEMP_DIR = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "meshing", + "block_cyl_mesh_templates", + ) input_file = os.path.join( BIRD_BLOCK_CYL_MESH_TEMP_DIR, "baseColumn_refineSparg/input.json" ) @@ -63,6 +95,14 @@ def test_base_column_refine(): def test_base_column_projected(): + BIRD_BLOCK_CYL_MESH_TEMP_DIR = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "meshing", + "block_cyl_mesh_templates", + ) input_file = os.path.join( BIRD_BLOCK_CYL_MESH_TEMP_DIR, "baseColumn_projected/input.json" ) @@ -74,6 +114,14 @@ def test_base_column_projected(): def test_multiring(): + BIRD_BLOCK_CYL_MESH_TEMP_DIR = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "meshing", + "block_cyl_mesh_templates", + ) input_file = os.path.join( BIRD_BLOCK_CYL_MESH_TEMP_DIR, "multiRing_simple/input.json" ) @@ -85,6 +133,14 @@ def test_multiring(): def test_multiring_coarse(): + BIRD_BLOCK_CYL_MESH_TEMP_DIR = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "meshing", + "block_cyl_mesh_templates", + ) input_file = os.path.join( BIRD_BLOCK_CYL_MESH_TEMP_DIR, "multiRing_coarse/input.json" ) @@ -93,3 +149,7 @@ def test_multiring_coarse(): ) output_folder = "system_tmp" base_mesh(input_file, topo_file, output_folder) + + +if __name__ == "__main__": + test_multiring_coarse() diff --git a/tests/meshing/test_block_rect_mesh.py b/tests/meshing/test_block_rect_mesh.py index 9cc0f00b..e3deb9b5 100644 --- a/tests/meshing/test_block_rect_mesh.py +++ b/tests/meshing/test_block_rect_mesh.py @@ -1,9 +1,9 @@ import os import sys +from pathlib import Path import numpy as np -from bird import BIRD_BLOCK_RECT_MESH_TEMP_DIR from bird.meshing.block_rect_mesh import ( assemble_geom, assemble_mesh, @@ -19,16 +19,32 @@ def base_mesh(input_file, output_folder): def test_loop_reactor(): + BIRD_BLOCK_RECT_MESH_TEMP_DIR = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "meshing", + "block_rect_mesh_templates", + ) input_file = os.path.join( - BIRD_BLOCK_RECT_MESH_TEMP_DIR, "loopReactor/input.json" + BIRD_BLOCK_RECT_MESH_TEMP_DIR, "loopReactor", "input.json" ) output_folder = "system_tmp" base_mesh(input_file, output_folder) def test_subblock_reactor(): + BIRD_BLOCK_RECT_MESH_TEMP_DIR = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "meshing", + "block_rect_mesh_templates", + ) input_file = os.path.join( - BIRD_BLOCK_RECT_MESH_TEMP_DIR, "sub_blocks/input.json" + BIRD_BLOCK_RECT_MESH_TEMP_DIR, "sub_blocks", "input.json" ) output_folder = "system_tmp" base_mesh(input_file, output_folder) diff --git a/tests/meshing/test_stirred_tank.py b/tests/meshing/test_stirred_tank.py index 73d23a46..2dacfc36 100644 --- a/tests/meshing/test_stirred_tank.py +++ b/tests/meshing/test_stirred_tank.py @@ -1,10 +1,10 @@ import argparse import os import sys +from pathlib import Path import numpy as np -from bird import BIRD_STIRRED_TANK_MESH_TEMP_DIR from bird.meshing.stirred_tank_mesh import ( get_reactor_geom, write_blocks, @@ -16,6 +16,14 @@ def test_stirred_tank(): + BIRD_STIRRED_TANK_MESH_TEMP_DIR = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "meshing", + "stirred_tank_mesh_templates", + ) inp = os.path.join( BIRD_STIRRED_TANK_MESH_TEMP_DIR, "base_tank", "tank_par.yaml" ) diff --git a/tests/preprocess/test_dynamic_mixer.py b/tests/preprocess/test_dynamic_mixer.py index 47668528..b2722c2d 100644 --- a/tests/preprocess/test_dynamic_mixer.py +++ b/tests/preprocess/test_dynamic_mixer.py @@ -1,13 +1,22 @@ import os +from pathlib import Path import numpy as np -from bird import BIRD_PRE_DYNMIX_TEMP_DIR from bird.meshing._mesh_tools import parseJsonFile from bird.preprocess.dynamic_mixer.mixing_fvModels import * def test_expl_list(): + BIRD_PRE_DYNMIX_TEMP_DIR = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "preprocess", + "dynamic_mixer", + "mixing_template", + ) input_dict = parseJsonFile( os.path.join(BIRD_PRE_DYNMIX_TEMP_DIR, "expl_list", "mixers.json") ) @@ -15,6 +24,15 @@ def test_expl_list(): def test_loop_list(): + BIRD_PRE_DYNMIX_TEMP_DIR = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "preprocess", + "dynamic_mixer", + "mixing_template", + ) input_dict = parseJsonFile( os.path.join( BIRD_PRE_DYNMIX_TEMP_DIR, "loop_reactor_list", "mixers.json" diff --git a/tests/preprocess/test_stl_patch.py b/tests/preprocess/test_stl_patch.py index 6ed831cf..ff6f326e 100644 --- a/tests/preprocess/test_stl_patch.py +++ b/tests/preprocess/test_stl_patch.py @@ -1,14 +1,25 @@ import os +from pathlib import Path import numpy as np +from prettyPlot.plotting import pretty_labels -from bird import BIRD_PRE_PATCH_TEMP_DIR from bird.meshing._mesh_tools import parseJsonFile from bird.preprocess.stl_patch.stl_bc import write_boundaries -from bird.utilities.stl_plotting import plotSTL, pretty_labels +from bird.utilities.stl_plotting import plotSTL def test_spider_sparger(): + BIRD_PRE_PATCH_TEMP_DIR = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "preprocess", + "stl_patch", + "bc_patch_mesh_template", + ) + input_dict = parseJsonFile( os.path.join(BIRD_PRE_PATCH_TEMP_DIR, "spider_spg/inlets_outlets.json") ) @@ -19,6 +30,15 @@ def test_spider_sparger(): def test_loop_reactor(): + BIRD_PRE_PATCH_TEMP_DIR = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "preprocess", + "stl_patch", + "bc_patch_mesh_template", + ) input_dict = parseJsonFile( os.path.join( BIRD_PRE_PATCH_TEMP_DIR, "loop_reactor_expl/inlets_outlets.json" @@ -31,6 +51,15 @@ def test_loop_reactor(): def test_loop_reactor_branch(): + BIRD_PRE_PATCH_TEMP_DIR = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "preprocess", + "stl_patch", + "bc_patch_mesh_template", + ) input_dict = parseJsonFile( os.path.join( BIRD_PRE_PATCH_TEMP_DIR, "loop_reactor_branch/inlets_outlets.json" @@ -43,4 +72,7 @@ def test_loop_reactor_branch(): if __name__ == "__main__": + from prettyPlot.plotting import plt + test_spider_sparger() + plt.show() From e5f7f1053bff4e78aa5b98d008f1853e7068ee94 Mon Sep 17 00:00:00 2001 From: Malik Date: Wed, 9 Jul 2025 08:28:38 -0600 Subject: [PATCH 27/38] docstring for tests --- tests/io/test_case.py | 3 +++ tests/io/test_read_foam_dict.py | 12 ++++++++++++ tests/io/test_read_foam_fields.py | 9 +++++++++ 3 files changed, 24 insertions(+) diff --git a/tests/io/test_case.py b/tests/io/test_case.py index 96ebb30e..cbb55537 100644 --- a/tests/io/test_case.py +++ b/tests/io/test_case.py @@ -7,6 +7,9 @@ def test_case_time(): + """ + Test for listing all time folders in a case + """ caseFolder = os.path.join( Path(__file__).parent, "..", diff --git a/tests/io/test_read_foam_dict.py b/tests/io/test_read_foam_dict.py index adf15227..6b539d92 100644 --- a/tests/io/test_read_foam_dict.py +++ b/tests/io/test_read_foam_dict.py @@ -7,6 +7,9 @@ def test_read_phaseProperties(): + """ + Test for reading content of `constant/phaseProperties` + """ const_folder = os.path.join( Path(__file__).parent, "..", @@ -39,6 +42,9 @@ def test_read_phaseProperties(): def test_read_thermophysicalProperties(): + """ + Test for reading content of `constant/thermophysicalProperties` + """ const_folder = os.path.join( Path(__file__).parent, "..", @@ -60,6 +66,9 @@ def test_read_thermophysicalProperties(): def test_read_momentumTransport(): + """ + Test for reading content of `constant/momentumTransport` + """ const_folder = os.path.join( Path(__file__).parent, "..", @@ -78,6 +87,9 @@ def test_read_momentumTransport(): def test_read_controlDict(): + """ + Test for reading content of `system/controlDict` + """ syst_folder = os.path.join( Path(__file__).parent, "..", diff --git a/tests/io/test_read_foam_fields.py b/tests/io/test_read_foam_fields.py index 46e49525..b293fc66 100644 --- a/tests/io/test_read_foam_fields.py +++ b/tests/io/test_read_foam_fields.py @@ -7,6 +7,9 @@ def test_read_nonunif_scal(): + """ + Test for reading non uniform scalarField + """ caseFolder = os.path.join( Path(__file__).parent, "..", @@ -32,6 +35,9 @@ def test_read_nonunif_scal(): def test_read_unif_scal(): + """ + Test for reading uniform scalarField + """ caseFolder = os.path.join( Path(__file__).parent, "..", @@ -59,6 +65,9 @@ def test_read_unif_scal(): def test_read_nonunif_vec(): + """ + Test for reading non uniform vectorField + """ caseFolder = os.path.join( Path(__file__).parent, "..", From 2511102ffaeadf9d98d98c84d01cdd97f777ed7a Mon Sep 17 00:00:00 2001 From: Malik Date: Wed, 9 Jul 2025 14:24:52 -0600 Subject: [PATCH 28/38] centralize postprocess --- bird/postprocess/computeQoI/compute_QOI.py | 137 +++--- bird/postprocess/conditional_mean.py | 49 +-- bird/postprocess/post_quantities.py | 480 +++++++++++++++++++++ bird/utilities/bubble_col_util.py | 244 ----------- tests/postprocess/test_post_quantities.py | 104 +++++ 5 files changed, 649 insertions(+), 365 deletions(-) create mode 100644 bird/postprocess/post_quantities.py delete mode 100644 bird/utilities/bubble_col_util.py create mode 100644 tests/postprocess/test_post_quantities.py diff --git a/bird/postprocess/computeQoI/compute_QOI.py b/bird/postprocess/computeQoI/compute_QOI.py index e7074d12..257774d8 100644 --- a/bird/postprocess/computeQoI/compute_QOI.py +++ b/bird/postprocess/computeQoI/compute_QOI.py @@ -1,15 +1,12 @@ import argparse -import sys - -import numpy as np - -sys.path.append("../utilities") import os import pickle +import sys -from ofio import * +import numpy as np -from bird.utilities.bubble_col_util import * +from bird.postprocess.post_quantities import * +from bird.utilities.ofio import * parser = argparse.ArgumentParser( description="Compute means QoI of OpenFOAM fields" @@ -29,15 +26,14 @@ nargs="+", help="List of variables to compute", default=[ - "GH", - "GH_height", + "gh", "d", "CO2_liq", "CO_liq", "H2_liq", - "kla_CO2", - "kla_CO", - "kla_H2", + "c_CO2_liq", + "c_CO_liq", + "c_H2_liq", ], required=False, ) @@ -109,87 +105,80 @@ def get_var( localFolder = os.path.join(case_path, time_folder) localFolder_vol = os.path.join(case_path, mesh_time_str) if name.lower() == "gh": - var, val_dict = computeGH( - localFolder, localFolder_vol, nCells, cellCentres, val_dict - ) - elif name.lower() == "gh_height": - var, val_dict = computeGH_height( - localFolder, + var, val_dict = compute_gas_holdup( + case_path, + time_folder, nCells, - cellCentres, - height_liq_base=7.0, - val_dict=val_dict, + volume_time="0", + field_dict=val_dict, ) elif name.lower() == "d": - var, val_dict = computeDiam(localFolder, nCells, cellCentres, val_dict) + var, val_dict = compute_ave_bubble_diam( + case_path, + time_folder, + nCells, + volume_time="0", + field_dict=val_dict, + ) elif name.lower() == "co2_liq": - var, val_dict = computeSpec_liq( - localFolder, + var, val_dict = compute_ave_y_liq( + case_path, + time_folder, nCells, - field_name="CO2.liquid", - key="co2_liq", - cellCentres=cellCentres, - val_dict=val_dict, + volume_time="0", + spec_name="CO2", + field_dict=val_dict, ) elif name.lower() == "co_liq": - var, val_dict = computeSpec_liq( - localFolder, + var, val_dict = compute_ave_y_liq( + case_path, + time_folder, nCells, - field_name="CO.liquid", - key="co_liq", - cellCentres=cellCentres, - val_dict=val_dict, + volume_time="0", + spec_name="CO", + field_dict=val_dict, ) elif name.lower() == "h2_liq": - var, val_dict = computeSpec_liq( - localFolder, + var, val_dict = compute_ave_y_liq( + case_path, + time_folder, nCells, - field_name="H2.liquid", - key="h2_liq", - cellCentres=cellCentres, - val_dict=val_dict, + volume_time="0", + spec_name="H2", + field_dict=val_dict, ) - elif name.lower() == "kla_co": - if "D_CO" in diff_name_list: - diff = diff_val_list[diff_name_list.index("D_CO")] - else: - diff = None - var, val_dict = computeSpec_kla( - localFolder, + elif name.lower() == "c_co2_liq": + var, val_dict = compute_ave_conc_liq( + case_path, + time_folder, nCells, - key_suffix="co", - cellCentres=cellCentres, - val_dict=val_dict, - diff=diff, + volume_time="0", + spec_name="CO2", + mol_weight=44.00995 * 1e-3, + field_dict=val_dict, ) - elif name.lower() == "kla_co2": - if "D_CO2" in diff_name_list: - diff = diff_val_list[diff_name_list.index("D_CO2")] - else: - diff = None - var, val_dict = computeSpec_kla( - localFolder, + elif name.lower() == "c_co_liq": + var, val_dict = compute_ave_conc_liq( + case_path, + time_folder, nCells, - key_suffix="co2", - cellCentres=cellCentres, - val_dict=val_dict, - diff=diff, + volume_time="0", + spec_name="CO", + mol_weight=28.01055 * 1e-3, + field_dict=val_dict, ) - elif name.lower() == "kla_h2": - if "D_H2" in diff_name_list: - diff = diff_val_list[diff_name_list.index("D_H2")] - else: - diff = None - var, val_dict = computeSpec_kla( - localFolder, + elif name.lower() == "c_h2_liq": + var, val_dict = compute_ave_conc_liq( + case_path, + time_folder, nCells, - key_suffix="h2", - cellCentres=cellCentres, - val_dict=val_dict, - diff=diff, + volume_time="0", + spec_name="H2", + mol_weight=2.01594 * 1e-3, + field_dict=val_dict, ) else: - sys.exit(f"ERROR: unknown variable {name}") + raise NotImplementedError(f"Unknown variable {name}") return var, val_dict diff --git a/bird/postprocess/conditional_mean.py b/bird/postprocess/conditional_mean.py index eb268146..5f246b66 100644 --- a/bird/postprocess/conditional_mean.py +++ b/bird/postprocess/conditional_mean.py @@ -3,7 +3,7 @@ from prettyPlot.plotting import plt -from bird.utilities.bubble_col_util import * +from bird.postprocess.post_quantities import * from bird.utilities.mathtools import * from bird.utilities.ofio import * @@ -41,54 +41,9 @@ def compute_cond_mean( for field_name in field_name_list: field_file.append(os.path.join(case_path, time_folder, field_name)) - # if os.path.isfile(d_gas_file): - # has_d = True - # else: - # has_d = False - for filename, name in zip(field_file, field_name_list): val_dict = {} - if name.lower() == "kla_co": - if "D_CO" in diff_name_list: - diff = diff_val_list[diff_name_list.index("D_CO")] - else: - diff = None - field_tmp, val_dict = computeSpec_kla_field( - os.path.join(case_path, time_folder), - nCells, - key_suffix="co", - cellCentres=cellCentres, - val_dict=val_dict, - diff=diff, - ) - elif name.lower() == "kla_co2": - if "D_CO2" in diff_name_list: - diff = diff_val_list[diff_name_list.index("D_CO2")] - else: - diff = None - field_tmp, val_dict = computeSpec_kla_field( - os.path.join(case_path, time_folder), - nCells, - key_suffix="co2", - cellCentres=cellCentres, - val_dict=val_dict, - diff=diff, - ) - elif name.lower() == "kla_h2": - if "D_H2" in diff_name_list: - diff = diff_val_list[diff_name_list.index("D_H2")] - else: - diff = None - field_tmp, val_dict = computeSpec_kla_field( - os.path.join(case_path, time_folder), - nCells, - key_suffix="h2", - cellCentres=cellCentres, - val_dict=val_dict, - diff=diff, - ) - else: - field_tmp = readOFScal(filename, nCells)["field"] + field_tmp = readOFScal(filename, nCells)["field"] vert_axis, field_cond_tmp = conditionalAverage( cellCentres[:, vert_ind], field_tmp, nbin=n_bins ) diff --git a/bird/postprocess/post_quantities.py b/bird/postprocess/post_quantities.py new file mode 100644 index 00000000..3b8ae5a2 --- /dev/null +++ b/bird/postprocess/post_quantities.py @@ -0,0 +1,480 @@ +import numpy as np + +from bird.utilities.ofio import * + + +def read_field( + case_folder: str, + time_folder: str, + field_name: str, + n_cells: int | None = None, + field_dict: dict = {}, +) -> dict: + """ + Read field at a given time and store it in dictionary for later reuse + + Parameters + ---------- + case_folder: str + Path to case folder + time_folder: str + Name of time folder to analyze + field_name: str + Name of the field file to read + n_cells : int | None + Number of cells in the domain + field_dict : dict + Dictionary of fields used to avoid rereading the same fields to calculate different quantities + + Returns + ---------- + field : np.ndarray | float + Field read + field_dict : dict + Dictionary of fields read + """ + + if not (field_name in field_dict) or field_dict[field_name] is None: + # Read field if it had not been read before + field_file = os.path.join(case_folder, time_folder, field_name) + field = readOF(field_file, n_cells=n_cells)["field"] + field_dict[field_name] = field + else: + # Get field from dict if it has been read before + field = field_dict[field_name] + + return field, field_dict + + +def get_ind_liq( + case_folder: str | None = None, + time_folder: str | None = None, + n_cells: int | None = None, + field_dict: dict = {}, +) -> tuple: + """ + Get indices of pure liquid cells (where alpha.liquid > 0.5) + + Parameters + ---------- + case_folder: str + Path to case folder + time_folder: str + Name of time folder to analyze + n_cells : int | None + Number of cells in the domain + field_name: str + Name of the field file to read + field_dict : dict + Dictionary of fields used to avoid rereading the same fields to calculate different quantities + + Returns + ---------- + ind_liq : np.ndarray | float + indices of pure liquid cells + field_dict : dict + Dictionary of fields read + """ + + kwargs = { + "case_folder": case_folder, + "time_folder": time_folder, + "n_cells": n_cells, + } + + # Compute indices of pure liquid + if not ("ind_liq" in field_dict) or field_dict["ind_liq"] is None: + alpha_liq, field_dict = read_field( + case_folder, + time_folder, + field_name="alpha.liquid", + n_cells=n_cells, + field_dict=field_dict, + ) + ind_liq = np.argwhere(alpha_liq > 0.5) + field_dict["ind_liq"] = ind_liq + else: + ind_liq = field_dict["ind_liq"] + + return ind_liq, field_dict + + +def get_ind_gas( + case_folder: str | None = None, + time_folder: str | None = None, + n_cells: int | None = None, + field_dict: dict = {}, +) -> tuple: + """ + Get indices of pure gas cells (where alpha.liquid <= 0.5) + + Parameters + ---------- + case_folder: str + Path to case folder + time_folder: str + Name of time folder to analyze + n_cells : int | None + Number of cells in the domain + field_name: str + Name of the field file to read + field_dict : dict + Dictionary of fields used to avoid rereading the same fields to calculate different quantities + + Returns + ---------- + ind_gas : np.ndarray | float + indices of pure gas cells + field_dict : dict + Dictionary of fields read + """ + + kwargs = { + "case_folder": case_folder, + "time_folder": time_folder, + "n_cells": n_cells, + } + + # Compute indices of pure liquid + if not ("ind_gas" in field_dict) or field_dict["ind_gas"] is None: + alpha_liq = read_field( + case_folder, + time_folder, + field_name="alpha.liquid", + n_cells=n_cells, + field_dict=field_dict, + ) + ind_gas = np.argwhere(alpha_liq <= 0.5) + field_dict["ind_gas"] = ind_gas + else: + ind_gas = field_dict["ind_gas"] + + return ind_gas, field_dict + + +def compute_gas_holdup( + case_folder: str, + time_folder: str, + n_cells: int | None = None, + volume_time: str = "0", + field_dict: dict = {}, +) -> tuple: + """ + Calculate volume averaged gas hold up at a given time + $\frac{1}{V_{\rm tot}} \int_{V} (1-\alpha_{\rm liq}) dV$ + + Parameters + ---------- + case_folder: str + Path to case folder + time_folder: str + Name of time folder to analyze + n_cells : int | None + Number of cells in the domain + volume_time : str + Time folder to read to get the cell volumes + field_dict : dict + Dictionary of fields used to avoid rereading the same fields to calculate different quantities + + Returns + ---------- + gas_holdup: float + Volume averaged gas holdup + field_dict : dict + Dictionary of fields read + """ + + # Read relevant fields + kwargs = { + "case_folder": case_folder, + "time_folder": time_folder, + "n_cells": n_cells, + } + kwargs_vol = { + "case_folder": case_folder, + "time_folder": volume_time, + "n_cells": n_cells, + } + alpha_liq, field_dict = read_field( + field_name="alpha.liquid", field_dict=field_dict, **kwargs + ) + try: + cell_volume, field_dict = read_field( + field_name="V", field_dict=field_dict, **kwargs_vol + ) + except FileNotFoundError: + message = f"ERROR: could not find {os.path.join(case_folder,volume_time,'V')}\n" + message += "You can generate V with\n\t" + message += f"`postProcess -func writeCellVolumes -time {volume_time} -case {case_folder}`" + sys.exit(message) + + # Calculate + gas_holdup = np.sum((1 - alpha_liq) * cell_volume) / np.sum(cell_volume) + + return gas_holdup, field_dict + + +def compute_ave_y_liq( + case_folder: str, + time_folder: str, + n_cells: int | None = None, + volume_time: str = "0", + spec_name: str = "CO2", + field_dict={}, +) -> tuple: + """ + Calculate liquid volume averaged mass fraction of a species at a given time + + $\frac{1}{V_{\rm liq, tot}} \int_{V_{\rm liq}} Y dV_{\rm liq}$ + + Parameters + ---------- + case_folder: str + Path to case folder + time_folder: str + Name of time folder to analyze + n_cells : int | None + Number of cells in the domain + volume_time : str + Time folder to read to get the cell volumes + spec_name : str + Name of the species + field_dict : dict + Dictionary of fields used to avoid rereading the same fields to calculate different quantities + + Returns + ---------- + liq_ave_y: float + Liquid volume averaged mass fraction + field_dict : dict + Dictionary of fields read + """ + + # Read relevant fields + kwargs = { + "case_folder": case_folder, + "time_folder": time_folder, + "n_cells": n_cells, + } + kwargs_vol = { + "case_folder": case_folder, + "time_folder": volume_time, + "n_cells": n_cells, + } + alpha_liq, field_dict = read_field( + field_name="alpha.liquid", field_dict=field_dict, **kwargs + ) + y_liq, field_dict = read_field( + field_name=f"{spec_name}.liquid", field_dict=field_dict, **kwargs + ) + ind_liq, field_dict = get_ind_liq(field_dict=field_dict, **kwargs) + try: + cell_volume, field_dict = read_field( + field_name="V", field_dict=field_dict, **kwargs_vol + ) + except FileNotFoundError: + message = f"ERROR: could not find {os.path.join(case_folder,volume_time,'V')}\n" + message += "You can generate V with\n\t" + message += f"`postProcess -func writeCellVolumes -time {volume_time} -case {case_folder}`" + sys.exit(message) + + # Only compute over the liquid + if isinstance(alpha_liq, np.ndarray): + alpha_liq = alpha_liq[ind_liq] + if isinstance(cell_volume, np.ndarray): + cell_volume = cell_volume[ind_liq] + if isinstance(y_liq, np.ndarray): + y_liq = y_liq[ind_liq] + + # Calculate + liq_ave_y = np.sum(alpha_liq * y_liq * cell_volume) / np.sum( + alpha_liq * cell_volume + ) + + return liq_ave_y, field_dict + + +def compute_ave_conc_liq( + case_folder: str, + time_folder: str, + n_cells: int | None = None, + volume_time: str = "0", + spec_name: str = "CO2", + mol_weight: float = 0.04401, + rho_val: float | None = 1000, + field_dict={}, + verbose: bool = True, +) -> tuple: + """ + Calculate liquid volume averaged concentration of a species at a given time + + $\frac{1}{V_{\rm liq, tot}} \int_{V_{\rm liq}} \rho_{\rm liq} Y / W dV_{\rm liq}$ + + Parameters + ---------- + case_folder: str + Path to case folder + time_folder: str + Name of time folder to analyze + n_cells : int | None + Number of cells in the domain + volume_time : str + Time folder to read to get the cell volumes + spec_name : str + Name of the species + mol_weight : float + Molecular weight of species (kg/mol) + rho_val : float | None + Constant density not available from time folder (kg/m3) + field_dict : dict + Dictionary of fields used to avoid rereading the same fields to calculate different quantities + verbose : bool + If true, output mol weight, species name and density + + Returns + ---------- + conc_ave: float + Liquid volume averaged species concentration + field_dict : dict + Dictionary of fields read + """ + if verbose: + print( + f"INFO: Computing concentration for {spec_name} with molecular weight {mol_weight:.4g} kg/mol" + ) + if rho_val is not None: + print(f"INFO: Assuming liquid density {rho_val} kg/m3") + + # Read relevant fields + kwargs = { + "case_folder": case_folder, + "time_folder": time_folder, + "n_cells": n_cells, + } + kwargs_vol = { + "case_folder": case_folder, + "time_folder": volume_time, + "n_cells": n_cells, + } + alpha_liq, field_dict = read_field( + field_name="alpha.liquid", field_dict=field_dict, **kwargs + ) + y_liq, field_dict = read_field( + field_name=f"{spec_name}.liquid", field_dict=field_dict, **kwargs + ) + ind_liq, field_dict = get_ind_liq(field_dict=field_dict, **kwargs) + try: + cell_volume, field_dict = read_field( + field_name="V", field_dict=field_dict, **kwargs_vol + ) + except FileNotFoundError: + message = f"ERROR: could not find {os.path.join(case_folder,volume_time,'V')}\n" + message += "You can generate V with\n\t" + message += f"`postProcess -func writeCellVolumes -time {volume_time} -case {case_folder}`" + sys.exit(message) + + # Density of liquid is not always printed (special case) + if not ("rho_liq" in field_dict) or field_dict["rho_liq"] is None: + if rho_val is not None: + rho_liq = rho_val + field_dict["rho_liq"] = rho_val + else: + rho_liq_file = os.path.join(case_folder, time_folder, "rhom") + rho_liq = readOFScal(rho_liq_file, n_cells)["field"] + field_dict["rho_liq"] = rho_liq + else: + rho_liq = field_dict["rho_liq"] + + # Only compute over the liquid + if isinstance(y_liq, np.ndarray): + y_liq = y_liq[ind_liq] + if isinstance(alpha_liq, np.ndarray): + alpha_liq = alpha_liq[ind_liq] + if isinstance(alpha_liq, np.ndarray): + cell_volume = cell_volume[ind_liq] + if isinstance(rho_liq, np.ndarray): + rho_liq = rho_liq[ind_liq] + + conc_loc = rho_liq * y_liq / mol_weight + + conc_ave = np.sum(conc_loc * alpha_liq * cell_volume) / np.sum( + alpha_liq * cell_volume + ) + + return conc_ave, field_dict + + +def compute_ave_bubble_diam( + case_folder: str, + time_folder: str, + n_cells: int | None = None, + volume_time: str = "0", + field_dict={}, +) -> tuple: + """ + Calculate averaged bubble diameter over the liquid volume + $\frac{1}{V_{\rm liq, tot}} \int_{V_{\rm liq}} D dV$ + + Parameters + ---------- + case_folder: str + Path to case folder + time_folder: str + Name of time folder to analyze + n_cells : int | None + Number of cells in the domain + volume_time : str + Time folder to read to get the cell volumes + field_dict : dict + Dictionary of fields used to avoid rereading the same fields to calculate different quantities + + Returns + ---------- + diam: float + Volume averaged gas holdup + field_dict : dict + Dictionary of fields read + """ + + # Read relevant fields + kwargs = { + "case_folder": case_folder, + "time_folder": time_folder, + "n_cells": n_cells, + } + kwargs_vol = { + "case_folder": case_folder, + "time_folder": volume_time, + "n_cells": n_cells, + } + alpha_liq, field_dict = read_field( + field_name="alpha.liquid", field_dict=field_dict, **kwargs + ) + d_gas, field_dict = read_field( + field_name="d.gas", field_dict=field_dict, **kwargs + ) + ind_liq, field_dict = get_ind_liq(field_dict=field_dict, **kwargs) + try: + cell_volume, field_dict = read_field( + field_name="V", field_dict=field_dict, **kwargs_vol + ) + except FileNotFoundError: + message = f"ERROR: could not find {os.path.join(case_folder,volume_time,'V')}\n" + message += "You can generate V with\n\t" + message += f"`postProcess -func writeCellVolumes -time {volume_time} -case {case_folder}`" + sys.exit(message) + + # Only compute over the liquid + if isinstance(d_gas, np.ndarray): + d_gas = d_gas[ind_liq] + if isinstance(alpha_liq, np.ndarray): + alpha_liq = alpha_liq[ind_liq] + if isinstance(alpha_liq, np.ndarray): + cell_volume = cell_volume[ind_liq] + + # Calculate + diam = np.sum(d_gas * alpha_liq * cell_volume) / np.sum( + alpha_liq * cell_volume + ) + + return diam, val_dict diff --git a/bird/utilities/bubble_col_util.py b/bird/utilities/bubble_col_util.py deleted file mode 100644 index 068a42af..00000000 --- a/bird/utilities/bubble_col_util.py +++ /dev/null @@ -1,244 +0,0 @@ -import numpy as np - -from bird.utilities.ofio import * - - -def readFromDict(val_dict, key, read_func=None, path=None, nCells=None): - if key not in val_dict: - field = read_func(path, nCells)["field"] - val_dict[key] = field - else: - field = val_dict[key] - return field, val_dict - - -def check_indLiq(ind_liq, cellCentres): - height_liq = cellCentres[ind_liq, 1] - ind_to_remove = np.argwhere(height_liq > 9.5) - if len(ind_to_remove) > 0: - ind_liq_copy = ind_liq.copy() - n_remove = len(ind_to_remove) - print(f"ind liq found to be at high heights {n_remove} times") - ind_to_remove = list(ind_liq[ind_to_remove[:, 0]][:, 0]) - ind_liq_copy = list(set(list(ind_liq[:, 0])) - set(ind_to_remove)) - assert len(ind_liq_copy) == len(ind_liq) - n_remove - ind_liq = np.reshape(np.array(ind_liq_copy), (-1, 1)) - return ind_liq - - -def check_indHeight(ind_height, cellCentres): - height_liq = cellCentres[ind_height, 1] - ind_to_remove = np.argwhere(height_liq < 6) - if len(ind_to_remove) > 0: - ind_height_copy = ind_height.copy() - n_remove = len(ind_to_remove) - print(f"ind height found to be at low heights {n_remove} times") - ind_to_remove = list(ind_height[ind_to_remove[:, 0]][:, 0]) - ind_height_copy = list( - set(list(ind_height_copy[:, 0])) - set(ind_to_remove) - ) - assert len(ind_height_copy) == len(ind_height) - n_remove - ind_height = np.reshape(np.array(ind_height_copy), (-1, 1)) - return ind_height - - -def indLiqFromDict(val_dict, localFolder, nCells, cellCentres): - if "ind_liq" not in val_dict: - alpha_gas, val_dict = readFromDict( - val_dict=val_dict, - key="alpha_gas", - read_func=readOFScal, - path=os.path.join(localFolder, "alpha.gas"), - nCells=nCells, - ) - ind_liq = np.argwhere(alpha_gas < 0.8)[:, 0] - ind_liq = check_indLiq(ind_liq, cellCentres) - val_dict["ind_liq"] = ind_liq - else: - ind_liq = val_dict["ind_liq"] - - return ind_liq, val_dict - - -def computeGH(localFolder, localFolder_vol, nCells, cellCentres, val_dict={}): - alpha_gas, val_dict = readFromDict( - val_dict=val_dict, - key="alpha_gas", - read_func=readOFScal, - path=os.path.join(localFolder, "alpha.gas"), - nCells=nCells, - ) - volume, val_dict = readFromDict( - val_dict=val_dict, - key="volume", - read_func=readOFScal, - path=os.path.join(localFolder_vol, "V"), - nCells=nCells, - ) - ind_liq, val_dict = indLiqFromDict( - val_dict, localFolder, nCells, cellCentres - ) - - holdup = np.sum(alpha_gas[ind_liq] * volume[ind_liq]) / np.sum( - volume[ind_liq] - ) - return holdup, val_dict - - -def computeGH_height( - localFolder, nCells, cellCentres, height_liq_base, val_dict={} -): - alpha_gas, val_dict = readFromDict( - val_dict=val_dict, - key="alpha_gas", - read_func=readOFScal, - path=os.path.join(localFolder, "alpha.gas"), - nCells=nCells, - ) - - tol = 1e-3 - tol_max = 0.1 - nFound = 0 - iteration = 0 - while nFound <= 10 and tol < tol_max: - ind_height = np.argwhere(abs(alpha_gas - 0.8) < tol) - ind_height = check_indHeight(ind_height, cellCentres) - nFound = len(ind_height) - tol *= 1.1 - tol = np.clip(tol, a_min=None, a_max=0.2) - iteration += 1 - - if iteration > 1: - print(f"\tChanged GH tol to {tol:.2g}") - - height_liq = np.mean(cellCentres[ind_height, 1]) - holdup = height_liq / height_liq_base - 1 - - return holdup, val_dict - - -def computeDiam(localFolder, nCells, cellCentres, val_dict={}): - d_gas, val_dict = readFromDict( - val_dict=val_dict, - key="d_gas", - read_func=readOFScal, - path=os.path.join(localFolder, "d.gas"), - nCells=nCells, - ) - ind_liq, val_dict = indLiqFromDict( - val_dict, localFolder, nCells, cellCentres - ) - - diam = np.mean(d_gas[ind_liq]) - - return diam, val_dict - - -def computeSpec_liq( - localFolder, nCells, field_name, key, cellCentres, val_dict={} -): - species, val_dict = readFromDict( - val_dict=val_dict, - key=key, - read_func=readOFScal, - path=os.path.join(localFolder, field_name), - nCells=nCells, - ) - ind_liq, val_dict = indLiqFromDict( - val_dict, localFolder, nCells, cellCentres - ) - - species = np.mean(species[ind_liq]) - - return species, val_dict - - -def computeSpec_kla_field( - localFolder, nCells, key_suffix, cellCentres, val_dict={}, diff=None -): - if "slip_vel" not in val_dict: - u_gas, val_dict = readFromDict( - val_dict=val_dict, - key="u_gas", - read_func=readOFVec, - path=os.path.join(localFolder, "U.gas"), - nCells=nCells, - ) - u_liq, val_dict = readFromDict( - val_dict=val_dict, - key="u_liq", - read_func=readOFVec, - path=os.path.join(localFolder, "U.liquid"), - nCells=nCells, - ) - slipvel = np.linalg.norm(u_liq - u_gas, axis=1) - val_dict["slipvel"] = slipvel - - rho_gas, val_dict = readFromDict( - val_dict=val_dict, - key="rho_gas", - read_func=readOFScal, - path=os.path.join(localFolder, "thermo:rho.gas"), - nCells=nCells, - ) - if "D_" + key_suffix not in val_dict: - if diff is None: - T_gas, val_dict = readFromDict( - val_dict=val_dict, - key="T_gas", - read_func=readOFScal, - path=os.path.join(localFolder, "T.gas"), - nCells=nCells, - ) - mu = 1.67212e-06 * np.sqrt(T_gas) / (1 + 170.672 / T_gas) - D = mu / rho_gas / 0.7 - else: - D = np.ones(rho_gas.shape) * diff - val_dict["D_" + key_suffix] = D - else: - D = val_dict["D_" + key_suffix] - - d_gas, val_dict = readFromDict( - val_dict=val_dict, - key="d_gas", - read_func=readOFScal, - path=os.path.join(localFolder, "d.gas"), - nCells=nCells, - ) - if "Sh_" + key_suffix not in val_dict: - # Sh = 1.12*np.sqrt(rho_gas*slipvel*d_gas/(D*0.7*rho_gas))*np.sqrt(0.7) - Sh = ( - 2.0 - + 0.552 - * np.sqrt(rho_gas * slipvel * d_gas / (D * 0.7 * rho_gas)) - * 0.8889 - ) - val_dict["Sh_" + key_suffix] = Sh - else: - Sh = val_dict["Sh_" + key_suffix] - - alpha_gas, val_dict = readFromDict( - val_dict=val_dict, - key="alpha_gas", - read_func=readOFScal, - path=os.path.join(localFolder, "alpha.gas"), - nCells=nCells, - ) - - kla = Sh * 6 * alpha_gas / d_gas / d_gas * D - - return kla, val_dict - - -def computeSpec_kla( - localFolder, nCells, key_suffix, cellCentres, val_dict={}, diff=None -): - kla, val_dict = computeSpec_kla_field( - localFolder, nCells, key_suffix, cellCentres, val_dict, diff - ) - - ind_liq, val_dict = indLiqFromDict( - val_dict, localFolder, nCells, cellCentres - ) - - return np.mean(kla[ind_liq]), val_dict diff --git a/tests/postprocess/test_post_quantities.py b/tests/postprocess/test_post_quantities.py new file mode 100644 index 00000000..9fb0bdbf --- /dev/null +++ b/tests/postprocess/test_post_quantities.py @@ -0,0 +1,104 @@ +import os +from pathlib import Path + +from prettyPlot.plotting import plt, pretty_labels + +from bird.postprocess.post_quantities import * + + +def test_compute_gh(): + """ + Test for gas holdup calculation + """ + case_folder = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "postprocess", + "data_conditional_mean", + ) + kwargs = {"case_folder": case_folder, "n_cells": None, "volume_time": "1"} + field_dict = {} + gh, field_dict = compute_gas_holdup( + time_folder="1", field_dict=field_dict, **kwargs + ) + field_dict = {} + gh, field_dict = compute_gas_holdup( + time_folder="79", field_dict=field_dict, **kwargs + ) + + +def test_ave_y_liq(): + """ + Test for liquid volume averaged species mass fraction + """ + case_folder = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "postprocess", + "data_conditional_mean", + ) + kwargs = { + "time_folder": "79", + "case_folder": case_folder, + "n_cells": None, + "volume_time": "1", + } + field_dict = {} + ave_y_co2, field_dict = compute_ave_y_liq( + spec_name="CO2", field_dict=field_dict, **kwargs + ) + ave_y_co, field_dict = compute_ave_y_liq( + spec_name="CO", field_dict=field_dict, **kwargs + ) + ave_y_h2, field_dict = compute_ave_y_liq( + spec_name="H2", field_dict=field_dict, **kwargs + ) + + +def test_ave_conc_liq(): + """ + Test for liquid volume averaged species concentration + """ + case_folder = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "postprocess", + "data_conditional_mean", + ) + kwargs = { + "time_folder": "79", + "case_folder": case_folder, + "n_cells": None, + "volume_time": "1", + } + field_dict = {} + ave_conc_co2, field_dict = compute_ave_conc_liq( + spec_name="CO2", + mol_weight=44.00995 * 1e-3, + field_dict=field_dict, + **kwargs, + ) + ave_conc_co, field_dict = compute_ave_conc_liq( + spec_name="CO", + mol_weight=28.01055 * 1e-3, + field_dict=field_dict, + **kwargs, + ) + ave_conc_h2, field_dict = compute_ave_conc_liq( + spec_name="H2", + mol_weight=2.01594 * 1e-3, + field_dict=field_dict, + **kwargs, + ) + + +if __name__ == "__main__": + test_compute_gh() + test_ave_y_liq() + test_ave_conc_liq() From d0d77b92555358a7453db33483ea67d7153965d2 Mon Sep 17 00:00:00 2001 From: Malik Date: Wed, 9 Jul 2025 14:30:13 -0600 Subject: [PATCH 29/38] test bubble diameter calculation --- tests/postprocess/test_post_quantities.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/tests/postprocess/test_post_quantities.py b/tests/postprocess/test_post_quantities.py index 9fb0bdbf..5f0460d8 100644 --- a/tests/postprocess/test_post_quantities.py +++ b/tests/postprocess/test_post_quantities.py @@ -29,6 +29,29 @@ def test_compute_gh(): ) +def test_compute_diam(): + """ + Test for bubble diameter calculation + """ + case_folder = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "postprocess", + "data_conditional_mean", + ) + kwargs = {"case_folder": case_folder, "n_cells": None, "volume_time": "1"} + field_dict = {} + diam, field_dict = compute_ave_bubble_diam( + time_folder="1", field_dict=field_dict, **kwargs + ) + field_dict = {} + diam, field_dict = compute_ave_bubble_diam( + time_folder="79", field_dict=field_dict, **kwargs + ) + + def test_ave_y_liq(): """ Test for liquid volume averaged species mass fraction From 1789d0c937631a7eca635a8ddb15eb831fe9b6d3 Mon Sep 17 00:00:00 2001 From: Malik Date: Wed, 9 Jul 2025 14:34:50 -0600 Subject: [PATCH 30/38] remove rogue val dict --- bird/postprocess/post_quantities.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bird/postprocess/post_quantities.py b/bird/postprocess/post_quantities.py index 3b8ae5a2..a5f7d8f6 100644 --- a/bird/postprocess/post_quantities.py +++ b/bird/postprocess/post_quantities.py @@ -477,4 +477,4 @@ def compute_ave_bubble_diam( alpha_liq * cell_volume ) - return diam, val_dict + return diam, field_dict From 1f7c87c672ba6a804b9c44055b393e9e886c5cdf Mon Sep 17 00:00:00 2001 From: Malik Date: Wed, 9 Jul 2025 14:37:03 -0600 Subject: [PATCH 31/38] push to pypi to pass test --- bird/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bird/version.py b/bird/version.py index a4ff1ec3..bb5a3100 100644 --- a/bird/version.py +++ b/bird/version.py @@ -1,3 +1,3 @@ """Bio reactor design version""" -__version__ = "0.0.22" +__version__ = "0.0.23" From 7069e23d4dfba08c87c55ae6a98a15b6fcda641a Mon Sep 17 00:00:00 2001 From: Malik Date: Wed, 9 Jul 2025 15:48:03 -0600 Subject: [PATCH 32/38] superficial velocity calculation --- bird/postprocess/post_quantities.py | 238 ++++++++++++++++++---- tests/postprocess/test_post_quantities.py | 26 +++ 2 files changed, 227 insertions(+), 37 deletions(-) diff --git a/bird/postprocess/post_quantities.py b/bird/postprocess/post_quantities.py index a5f7d8f6..16fcf7a5 100644 --- a/bird/postprocess/post_quantities.py +++ b/bird/postprocess/post_quantities.py @@ -9,7 +9,7 @@ def read_field( field_name: str, n_cells: int | None = None, field_dict: dict = {}, -) -> dict: +) -> tuple: """ Read field at a given time and store it in dictionary for later reuse @@ -46,6 +46,102 @@ def read_field( return field, field_dict +def read_cell_volume( + case_folder: str, + time_folder: str, + n_cells: int | None = None, + field_dict: dict = {}, +) -> tuple: + """ + Read volume at a given time and store it in dictionary for later reuse + + Parameters + ---------- + case_folder: str + Path to case folder + time_folder: str + Name of time folder to analyze + n_cells : int | None + Number of cells in the domain + field_dict : dict + Dictionary of fields used to avoid rereading the same fields to calculate different quantities + + Returns + ---------- + cell_volume : np.ndarray | float + cell volume read from file + field_dict : dict + Dictionary of fields read + """ + kwargs_vol = { + "case_folder": case_folder, + "time_folder": time_folder, + "n_cells": n_cells, + } + try: + cell_volume, field_dict = read_field( + field_name="V", field_dict=field_dict, **kwargs_vol + ) + except FileNotFoundError: + message = f"ERROR: could not find {os.path.join(case_folder,volume_time,'V')}\n" + message += "You can generate V with\n\t" + message += f"`postProcess -func writeCellVolumes -time {time_folder} -case {case_folder}`" + sys.exit(message) + + return cell_volume, field_dict + + +def read_cell_centers( + case_folder: str, + cell_centers_file: str, + field_dict: dict = {}, +) -> tuple: + """ + Read volume at a given time and store it in dictionary for later reuse + + Parameters + ---------- + case_folder: str + Path to case folder + cell_centers_file : str + Filename of cell center data + field_dict : dict + Dictionary of fields used to avoid rereading the same fields to calculate different quantities + + Returns + ---------- + cell_centers : np.ndarray + cell centers read from file + field_dict : dict + Dictionary of fields read + """ + + if ( + not ("cell_centers" in field_dict) + or field_dict["cell_centers"] is None + ): + try: + cell_centers = readMesh( + os.path.join(case_folder, cell_centers_file) + ) + field_dict["cell_centers"] = cell_centers + except FileNotFoundError: + message = f"ERROR: could not find {cell_centers_file}\n" + message += "You can generate it with\n\t" + message += f"`writeMeshObj -case {case_folder}`\n" + time_float, time_str = getCaseTimes(case_folder) + correct_path = f"meshCellCentres_{time_str[0]}.obj" + if not correct_path == cell_centers_file: + message += ( + f"And adjust the cell center file path to {correct_path}" + ) + sys.exit(message) + else: + cell_centers = field_dict["cell_centers"] + + return cell_centers, field_dict + + def get_ind_liq( case_folder: str | None = None, time_folder: str | None = None, @@ -198,15 +294,9 @@ def compute_gas_holdup( alpha_liq, field_dict = read_field( field_name="alpha.liquid", field_dict=field_dict, **kwargs ) - try: - cell_volume, field_dict = read_field( - field_name="V", field_dict=field_dict, **kwargs_vol - ) - except FileNotFoundError: - message = f"ERROR: could not find {os.path.join(case_folder,volume_time,'V')}\n" - message += "You can generate V with\n\t" - message += f"`postProcess -func writeCellVolumes -time {volume_time} -case {case_folder}`" - sys.exit(message) + cell_volume, field_dict = read_cell_volume( + field_dict=field_dict, **kwargs_vol + ) # Calculate gas_holdup = np.sum((1 - alpha_liq) * cell_volume) / np.sum(cell_volume) @@ -214,6 +304,95 @@ def compute_gas_holdup( return gas_holdup, field_dict +def compute_superficial_velocity( + case_folder: str, + time_folder: str, + n_cells: int | None = None, + volume_time: str = "0", + direction: int = 1, + cell_centers_file: str = "meshCellCentres_0.obj", + field_dict: dict = {}, +) -> tuple: + """ + Calculate superficial velocity in a given direction at a given time + + Parameters + ---------- + case_folder: str + Path to case folder + time_folder: str + Name of time folder to analyze + n_cells : int | None + Number of cells in the domain + volume_time : str + Time folder to read to get the cell volumes + direction : int + Direction along which to calculate the superficial velocity + cell_centers_file : str + Filename of cell center data + field_dict : dict + Dictionary of fields used to avoid rereading the same fields to calculate different quantities + + Returns + ---------- + sup_vel: float + Superficial velocity + field_dict : dict + Dictionary of fields read + """ + + # Read relevant fields + kwargs = { + "case_folder": case_folder, + "time_folder": time_folder, + "n_cells": n_cells, + } + kwargs_vol = { + "case_folder": case_folder, + "time_folder": volume_time, + "n_cells": n_cells, + } + alpha_gas, field_dict = read_field( + field_name="alpha.gas", field_dict=field_dict, **kwargs + ) + U_gas, field_dict = read_field( + field_name="U.gas", field_dict=field_dict, **kwargs + ) + U_gas = U_gas[:, direction] + + cell_volume, field_dict = read_cell_volume( + field_dict=field_dict, **kwargs_vol + ) + cell_centers, field_dict = read_cell_centers( + case_folder=case_folder, + cell_centers_file=cell_centers_file, + field_dict=field_dict, + ) + + # Find all cells in the middle of the domain + max_dir = np.amax(cell_centers[:, direction]) + min_dir = np.amin(cell_centers[:, direction]) + middle_dir = (max_dir + min_dir) / 2 + tol = (max_dir - min_dir) * 0.05 + ind_middle = np.argwhere( + (cell_centers[:, direction] >= middle_dir - tol) + & (cell_centers[:, direction] <= middle_dir + tol) + ) + + # Only compute in the middle + if isinstance(alpha_gas, np.ndarray): + alpha_gas = alpha_gas[ind_middle] + if isinstance(cell_volume, np.ndarray): + cell_volume = cell_volume[ind_middle] + if isinstance(U_gas, np.ndarray): + U_gas = U_gas[ind_middle] + + # Compute + sup_vel = np.sum(U_gas * alpha_gas * cell_volume) / np.sum(cell_volume) + + return sup_vel, field_dict + + def compute_ave_y_liq( case_folder: str, time_folder: str, @@ -268,15 +447,10 @@ def compute_ave_y_liq( field_name=f"{spec_name}.liquid", field_dict=field_dict, **kwargs ) ind_liq, field_dict = get_ind_liq(field_dict=field_dict, **kwargs) - try: - cell_volume, field_dict = read_field( - field_name="V", field_dict=field_dict, **kwargs_vol - ) - except FileNotFoundError: - message = f"ERROR: could not find {os.path.join(case_folder,volume_time,'V')}\n" - message += "You can generate V with\n\t" - message += f"`postProcess -func writeCellVolumes -time {volume_time} -case {case_folder}`" - sys.exit(message) + + cell_volume, field_dict = read_cell_volume( + field_dict=field_dict, **kwargs_vol + ) # Only compute over the liquid if isinstance(alpha_liq, np.ndarray): @@ -363,15 +537,10 @@ def compute_ave_conc_liq( field_name=f"{spec_name}.liquid", field_dict=field_dict, **kwargs ) ind_liq, field_dict = get_ind_liq(field_dict=field_dict, **kwargs) - try: - cell_volume, field_dict = read_field( - field_name="V", field_dict=field_dict, **kwargs_vol - ) - except FileNotFoundError: - message = f"ERROR: could not find {os.path.join(case_folder,volume_time,'V')}\n" - message += "You can generate V with\n\t" - message += f"`postProcess -func writeCellVolumes -time {volume_time} -case {case_folder}`" - sys.exit(message) + + cell_volume, field_dict = read_cell_volume( + field_dict=field_dict, **kwargs_vol + ) # Density of liquid is not always printed (special case) if not ("rho_liq" in field_dict) or field_dict["rho_liq"] is None: @@ -454,15 +623,10 @@ def compute_ave_bubble_diam( field_name="d.gas", field_dict=field_dict, **kwargs ) ind_liq, field_dict = get_ind_liq(field_dict=field_dict, **kwargs) - try: - cell_volume, field_dict = read_field( - field_name="V", field_dict=field_dict, **kwargs_vol - ) - except FileNotFoundError: - message = f"ERROR: could not find {os.path.join(case_folder,volume_time,'V')}\n" - message += "You can generate V with\n\t" - message += f"`postProcess -func writeCellVolumes -time {volume_time} -case {case_folder}`" - sys.exit(message) + + cell_volume, field_dict = read_cell_volume( + field_dict=field_dict, **kwargs_vol + ) # Only compute over the liquid if isinstance(d_gas, np.ndarray): diff --git a/tests/postprocess/test_post_quantities.py b/tests/postprocess/test_post_quantities.py index 5f0460d8..56b36625 100644 --- a/tests/postprocess/test_post_quantities.py +++ b/tests/postprocess/test_post_quantities.py @@ -52,6 +52,31 @@ def test_compute_diam(): ) +def test_compute_superficial_velocity(): + """ + Test for superficial velocity calculation + """ + case_folder = os.path.join( + Path(__file__).parent, + "..", + "..", + "bird", + "postprocess", + "data_conditional_mean", + ) + kwargs = { + "case_folder": case_folder, + "n_cells": None, + "volume_time": "1", + "direction": 1, + "cell_centers_file": "meshCellCentres_1.obj", + } + field_dict = {} + sup_vel, field_dict = compute_superficial_velocity( + time_folder="79", field_dict=field_dict, **kwargs + ) + + def test_ave_y_liq(): """ Test for liquid volume averaged species mass fraction @@ -122,6 +147,7 @@ def test_ave_conc_liq(): if __name__ == "__main__": + test_compute_superficial_velocity() test_compute_gh() test_ave_y_liq() test_ave_conc_liq() From a139d17b5056538a8e0f67e3996a634194d90372 Mon Sep 17 00:00:00 2001 From: Malik Date: Wed, 9 Jul 2025 15:49:40 -0600 Subject: [PATCH 33/38] update bird version --- bird/version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bird/version.py b/bird/version.py index bb5a3100..07d5c654 100644 --- a/bird/version.py +++ b/bird/version.py @@ -1,3 +1,3 @@ """Bio reactor design version""" -__version__ = "0.0.23" +__version__ = "0.0.24" From 393e33875f20300502414adc32360c5cadb4693a Mon Sep 17 00:00:00 2001 From: Malik Date: Wed, 9 Jul 2025 16:16:36 -0600 Subject: [PATCH 34/38] add flat panel reactor tutorial and add it to the ci --- .github/workflows/ci.yml | 5 + .../FlatPanel_250L_ASU/0.orig/T.gas | 44 +++++ .../FlatPanel_250L_ASU/0.orig/T.liquid | 44 +++++ .../FlatPanel_250L_ASU/0.orig/U.gas | 40 +++++ .../FlatPanel_250L_ASU/0.orig/U.liquid | 40 +++++ .../FlatPanel_250L_ASU/0.orig/alpha.gas.orig | 41 +++++ .../0.orig/alpha.liquid.orig | 40 +++++ .../FlatPanel_250L_ASU/0.orig/alphat.gas | 47 +++++ .../FlatPanel_250L_ASU/0.orig/alphat.liquid | 47 +++++ .../FlatPanel_250L_ASU/0.orig/epsilon.gas | 48 +++++ .../FlatPanel_250L_ASU/0.orig/epsilon.liquid | 48 +++++ .../FlatPanel_250L_ASU/0.orig/epsilonm | 48 +++++ .../FlatPanel_250L_ASU/0.orig/k.gas | 48 +++++ .../FlatPanel_250L_ASU/0.orig/k.liquid | 48 +++++ tutorial_cases/FlatPanel_250L_ASU/0.orig/km | 48 +++++ .../FlatPanel_250L_ASU/0.orig/nut.gas | 46 +++++ .../FlatPanel_250L_ASU/0.orig/nut.liquid | 46 +++++ tutorial_cases/FlatPanel_250L_ASU/0.orig/p | 39 ++++ .../FlatPanel_250L_ASU/0.orig/p_rgh | 40 +++++ tutorial_cases/FlatPanel_250L_ASU/AllmassT | 18 ++ tutorial_cases/FlatPanel_250L_ASU/README.md | 5 + tutorial_cases/FlatPanel_250L_ASU/constant/g | 21 +++ .../constant/momentumTransport.gas | 27 +++ .../constant/momentumTransport.liquid | 27 +++ .../constant/phaseProperties | 170 ++++++++++++++++++ .../constant/thermophysicalProperties.gas | 47 +++++ .../constant/thermophysicalProperties.liquid | 51 ++++++ tutorial_cases/FlatPanel_250L_ASU/presteps.sh | 4 + tutorial_cases/FlatPanel_250L_ASU/run.sh | 2 + .../FlatPanel_250L_ASU/system/blockMeshDict | 110 ++++++++++++ .../FlatPanel_250L_ASU/system/controlDict | 59 ++++++ .../FlatPanel_250L_ASU/system/controlDict~ | 59 ++++++ .../system/decomposeParDict | 45 +++++ .../FlatPanel_250L_ASU/system/fvConstraints | 23 +++ .../FlatPanel_250L_ASU/system/fvSchemes | 65 +++++++ .../FlatPanel_250L_ASU/system/fvSolution | 84 +++++++++ .../FlatPanel_250L_ASU/system/panel.m4 | 110 ++++++++++++ .../FlatPanel_250L_ASU/system/setFieldsDict | 37 ++++ 38 files changed, 1771 insertions(+) create mode 100644 tutorial_cases/FlatPanel_250L_ASU/0.orig/T.gas create mode 100644 tutorial_cases/FlatPanel_250L_ASU/0.orig/T.liquid create mode 100644 tutorial_cases/FlatPanel_250L_ASU/0.orig/U.gas create mode 100644 tutorial_cases/FlatPanel_250L_ASU/0.orig/U.liquid create mode 100644 tutorial_cases/FlatPanel_250L_ASU/0.orig/alpha.gas.orig create mode 100644 tutorial_cases/FlatPanel_250L_ASU/0.orig/alpha.liquid.orig create mode 100644 tutorial_cases/FlatPanel_250L_ASU/0.orig/alphat.gas create mode 100644 tutorial_cases/FlatPanel_250L_ASU/0.orig/alphat.liquid create mode 100644 tutorial_cases/FlatPanel_250L_ASU/0.orig/epsilon.gas create mode 100644 tutorial_cases/FlatPanel_250L_ASU/0.orig/epsilon.liquid create mode 100644 tutorial_cases/FlatPanel_250L_ASU/0.orig/epsilonm create mode 100644 tutorial_cases/FlatPanel_250L_ASU/0.orig/k.gas create mode 100644 tutorial_cases/FlatPanel_250L_ASU/0.orig/k.liquid create mode 100644 tutorial_cases/FlatPanel_250L_ASU/0.orig/km create mode 100644 tutorial_cases/FlatPanel_250L_ASU/0.orig/nut.gas create mode 100644 tutorial_cases/FlatPanel_250L_ASU/0.orig/nut.liquid create mode 100644 tutorial_cases/FlatPanel_250L_ASU/0.orig/p create mode 100644 tutorial_cases/FlatPanel_250L_ASU/0.orig/p_rgh create mode 100644 tutorial_cases/FlatPanel_250L_ASU/AllmassT create mode 100644 tutorial_cases/FlatPanel_250L_ASU/README.md create mode 100644 tutorial_cases/FlatPanel_250L_ASU/constant/g create mode 100644 tutorial_cases/FlatPanel_250L_ASU/constant/momentumTransport.gas create mode 100644 tutorial_cases/FlatPanel_250L_ASU/constant/momentumTransport.liquid create mode 100644 tutorial_cases/FlatPanel_250L_ASU/constant/phaseProperties create mode 100644 tutorial_cases/FlatPanel_250L_ASU/constant/thermophysicalProperties.gas create mode 100644 tutorial_cases/FlatPanel_250L_ASU/constant/thermophysicalProperties.liquid create mode 100644 tutorial_cases/FlatPanel_250L_ASU/presteps.sh create mode 100644 tutorial_cases/FlatPanel_250L_ASU/run.sh create mode 100644 tutorial_cases/FlatPanel_250L_ASU/system/blockMeshDict create mode 100644 tutorial_cases/FlatPanel_250L_ASU/system/controlDict create mode 100644 tutorial_cases/FlatPanel_250L_ASU/system/controlDict~ create mode 100644 tutorial_cases/FlatPanel_250L_ASU/system/decomposeParDict create mode 100644 tutorial_cases/FlatPanel_250L_ASU/system/fvConstraints create mode 100644 tutorial_cases/FlatPanel_250L_ASU/system/fvSchemes create mode 100644 tutorial_cases/FlatPanel_250L_ASU/system/fvSolution create mode 100644 tutorial_cases/FlatPanel_250L_ASU/system/panel.m4 create mode 100644 tutorial_cases/FlatPanel_250L_ASU/system/setFieldsDict diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 55a639ba..05effe9c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -176,4 +176,9 @@ jobs: cd tutorial_cases/airlift_40m bash run.sh cd ../../ + - name: Run flat panel reactor tutorial + run: | + cd tutorial_cases/FlatPanel_250L_ASU + bash run.sh + cd ../../ diff --git a/tutorial_cases/FlatPanel_250L_ASU/0.orig/T.gas b/tutorial_cases/FlatPanel_250L_ASU/0.orig/T.gas new file mode 100644 index 00000000..1d515f11 --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/0.orig/T.gas @@ -0,0 +1,44 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class volScalarField; + object T.gas; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +dimensions [0 0 0 1 0 0 0]; + +internalField uniform 308; + +boundaryField +{ + "wall_.*" + { + type zeroGradient; + } + outlet + { + type inletOutlet; + phi phi.gas; + inletValue $internalField; + value $internalField; + } + inlet + { + type fixedValue; + value $internalField; + } + frontAndBackPlanes + { + type empty; + } +} + +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // diff --git a/tutorial_cases/FlatPanel_250L_ASU/0.orig/T.liquid b/tutorial_cases/FlatPanel_250L_ASU/0.orig/T.liquid new file mode 100644 index 00000000..2d4e3a4d --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/0.orig/T.liquid @@ -0,0 +1,44 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class volScalarField; + object T.liquid; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +dimensions [0 0 0 1 0 0 0]; + +internalField uniform 308; + +boundaryField +{ + "wall_.*" + { + type zeroGradient; + } + outlet + { + type inletOutlet; + phi phi.liquid; + inletValue $internalField; + value $internalField; + } + inlet + { + type fixedValue; + value $internalField; + } + frontAndBackPlanes + { + type empty; + } +} + +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // diff --git a/tutorial_cases/FlatPanel_250L_ASU/0.orig/U.gas b/tutorial_cases/FlatPanel_250L_ASU/0.orig/U.gas new file mode 100644 index 00000000..808205dc --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/0.orig/U.gas @@ -0,0 +1,40 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format binary; + class volVectorField; + object U.gas; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +dimensions [0 1 -1 0 0 0 0]; + +internalField uniform (0 0.1 0); + +boundaryField +{ + inlet + { + type fixedValue; + value $internalField; + } + outlet + { + type pressureInletOutletVelocity; + phi phi.gas; + value $internalField; + } + "wall_.*" + { + type fixedValue; + value uniform (0 0 0); + } +} + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/0.orig/U.liquid b/tutorial_cases/FlatPanel_250L_ASU/0.orig/U.liquid new file mode 100644 index 00000000..84b3317a --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/0.orig/U.liquid @@ -0,0 +1,40 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format binary; + class volVectorField; + object U.liquid; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +dimensions [0 1 -1 0 0 0 0]; + +internalField uniform (0 0 0); + +boundaryField +{ + inlet + { + type fixedValue; + value $internalField; + } + outlet + { + type pressureInletOutletVelocity; + phi phi.liquid; + value $internalField; + } + "wall_.*" + { + type fixedValue; + value uniform (0 0 0); + } +} + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/0.orig/alpha.gas.orig b/tutorial_cases/FlatPanel_250L_ASU/0.orig/alpha.gas.orig new file mode 100644 index 00000000..e827a9ed --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/0.orig/alpha.gas.orig @@ -0,0 +1,41 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class volScalarField; + location "0"; + object alpha.gas; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +dimensions [0 0 0 0 0 0 0]; + +internalField uniform 0; + +boundaryField +{ + inlet + { + type fixedValue; + value uniform 0.5; + } + outlet + { + type inletOutlet; + phi phi.gas; + inletValue uniform 1; + value uniform 1; + } + "wall_.*" + { + type zeroGradient; + } +} + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/0.orig/alpha.liquid.orig b/tutorial_cases/FlatPanel_250L_ASU/0.orig/alpha.liquid.orig new file mode 100644 index 00000000..c2531311 --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/0.orig/alpha.liquid.orig @@ -0,0 +1,40 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class volScalarField; + object alpha.liquid; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +dimensions [0 0 0 0 0 0 0]; + +internalField uniform 1; + +boundaryField +{ + inlet + { + type fixedValue; + value uniform 0.5; + } + outlet + { + type inletOutlet; + phi phi.liquid; + inletValue uniform 0; + value uniform 0; + } + "wall_.*" + { + type zeroGradient; + } +} + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/0.orig/alphat.gas b/tutorial_cases/FlatPanel_250L_ASU/0.orig/alphat.gas new file mode 100644 index 00000000..8d53c794 --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/0.orig/alphat.gas @@ -0,0 +1,47 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class volScalarField; + object alphat.gas; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +dimensions [1 -1 -1 0 0 0 0]; + +internalField uniform 0; + +boundaryField +{ + inlet + { + type calculated; + value $internalField; + } + + outlet + { + type calculated; + value $internalField; + } + + "wall_.*" + { + type compressible::alphatWallFunction; + Prt 0.85; + value $internalField; + } + + defaultFaces + { + type empty; + } +} + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/0.orig/alphat.liquid b/tutorial_cases/FlatPanel_250L_ASU/0.orig/alphat.liquid new file mode 100644 index 00000000..4e503479 --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/0.orig/alphat.liquid @@ -0,0 +1,47 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class volScalarField; + object alphat.liquid; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +dimensions [1 -1 -1 0 0 0 0]; + +internalField uniform 0; + +boundaryField +{ + inlet + { + type calculated; + value $internalField; + } + + outlet + { + type calculated; + value $internalField; + } + + "wall_.*" + { + type compressible::alphatWallFunction; + Prt 0.85; + value $internalField; + } + + defaultFaces + { + type empty; + } +} + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/0.orig/epsilon.gas b/tutorial_cases/FlatPanel_250L_ASU/0.orig/epsilon.gas new file mode 100644 index 00000000..24d4f17e --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/0.orig/epsilon.gas @@ -0,0 +1,48 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class volScalarField; + object epsilon.gas; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +dimensions [0 2 -3 0 0 0 0]; + +internalField uniform 1.5e-4; + +boundaryField +{ + inlet + { + type fixedValue; + value $internalField; + } + + outlet + { + type inletOutlet; + phi phi.gas; + inletValue $internalField; + value $internalField; + } + + "wall_.*" + { + type epsilonWallFunction; + value $internalField; + } + + defaultFaces + { + type empty; + } +} + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/0.orig/epsilon.liquid b/tutorial_cases/FlatPanel_250L_ASU/0.orig/epsilon.liquid new file mode 100644 index 00000000..c307061f --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/0.orig/epsilon.liquid @@ -0,0 +1,48 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class volScalarField; + object epsilon.liquid; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +dimensions [0 2 -3 0 0 0 0]; + +internalField uniform 1.5e-4; + +boundaryField +{ + inlet + { + type fixedValue; + value $internalField; + } + + outlet + { + type inletOutlet; + phi phi.liquid; + inletValue $internalField; + value $internalField; + } + + "wall_.*" + { + type epsilonWallFunction; + value $internalField; + } + + defaultFaces + { + type empty; + } +} + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/0.orig/epsilonm b/tutorial_cases/FlatPanel_250L_ASU/0.orig/epsilonm new file mode 100644 index 00000000..cc6f87e6 --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/0.orig/epsilonm @@ -0,0 +1,48 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class volScalarField; + object epsilonm; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +dimensions [0 2 -3 0 0 0 0]; + +internalField uniform 1.5e-4; + +boundaryField +{ + inlet + { + type fixedValue; + value $internalField; + } + + outlet + { + type inletOutlet; + phi phim; + inletValue $internalField; + value $internalField; + } + + "wall_.*" + { + type zeroGradient; + value $internalField; + } + + defaultFaces + { + type empty; + } +} + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/0.orig/k.gas b/tutorial_cases/FlatPanel_250L_ASU/0.orig/k.gas new file mode 100644 index 00000000..0a844f7a --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/0.orig/k.gas @@ -0,0 +1,48 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class volScalarField; + object k.gas; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +dimensions [0 2 -2 0 0 0 0]; + +internalField uniform 3.75e-5; + +boundaryField +{ + inlet + { + type fixedValue; + value $internalField; + } + + outlet + { + type inletOutlet; + phi phi.gas; + inletValue $internalField; + value $internalField; + } + + "wall_.*" + { + type kqRWallFunction; + value $internalField; + } + + defaultFaces + { + type empty; + } +} + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/0.orig/k.liquid b/tutorial_cases/FlatPanel_250L_ASU/0.orig/k.liquid new file mode 100644 index 00000000..86ac25af --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/0.orig/k.liquid @@ -0,0 +1,48 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class volScalarField; + object k.liquid; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +dimensions [0 2 -2 0 0 0 0]; + +internalField uniform 3.75e-5; + +boundaryField +{ + inlet + { + type fixedValue; + value $internalField; + } + + outlet + { + type inletOutlet; + phi phi.liquid; + inletValue $internalField; + value $internalField; + } + + "wall_.*" + { + type kqRWallFunction; + value $internalField; + } + + defaultFaces + { + type empty; + } +} + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/0.orig/km b/tutorial_cases/FlatPanel_250L_ASU/0.orig/km new file mode 100644 index 00000000..6d8fb00f --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/0.orig/km @@ -0,0 +1,48 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class volScalarField; + object km; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +dimensions [0 2 -2 0 0 0 0]; + +internalField uniform 3.75e-5; + +boundaryField +{ + inlet + { + type fixedValue; + value $internalField; + } + + outlet + { + type inletOutlet; + phi phim; + inletValue $internalField; + value $internalField; + } + + "wall_.*" + { + type zeroGradient; + value $internalField; + } + + defaultFaces + { + type empty; + } +} + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/0.orig/nut.gas b/tutorial_cases/FlatPanel_250L_ASU/0.orig/nut.gas new file mode 100644 index 00000000..d2208625 --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/0.orig/nut.gas @@ -0,0 +1,46 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class volScalarField; + object nut.gas; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +dimensions [0 2 -1 0 0 0 0]; + +internalField uniform 1e-8; + +boundaryField +{ + inlet + { + type calculated; + value $internalField; + } + + outlet + { + type calculated; + value $internalField; + } + + "wall_.*" + { + type nutkWallFunction; + value $internalField; + } + + defaultFaces + { + type empty; + } +} + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/0.orig/nut.liquid b/tutorial_cases/FlatPanel_250L_ASU/0.orig/nut.liquid new file mode 100644 index 00000000..f08faba7 --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/0.orig/nut.liquid @@ -0,0 +1,46 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class volScalarField; + object nut.liquid; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +dimensions [0 2 -1 0 0 0 0]; + +internalField uniform 1e-8; + +boundaryField +{ + inlet + { + type calculated; + value $internalField; + } + + outlet + { + type calculated; + value $internalField; + } + + "wall_.*" + { + type nutkWallFunction; + value $internalField; + } + + defaultFaces + { + type empty; + } +} + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/0.orig/p b/tutorial_cases/FlatPanel_250L_ASU/0.orig/p new file mode 100644 index 00000000..46b3fa42 --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/0.orig/p @@ -0,0 +1,39 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class volScalarField; + object p; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +dimensions [1 -1 -2 0 0 0 0]; + +internalField uniform 1e5; + +boundaryField +{ + inlet + { + type calculated; + value $internalField; + } + outlet + { + type calculated; + value $internalField; + } + "wall_.*" + { + type calculated; + value $internalField; + } +} + +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // diff --git a/tutorial_cases/FlatPanel_250L_ASU/0.orig/p_rgh b/tutorial_cases/FlatPanel_250L_ASU/0.orig/p_rgh new file mode 100644 index 00000000..c66435ba --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/0.orig/p_rgh @@ -0,0 +1,40 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class volScalarField; + object p_rgh; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +dimensions [1 -1 -2 0 0 0 0]; + +internalField uniform 1e5; + +boundaryField +{ + inlet + { + type fixedFluxPressure; + value $internalField; + } + outlet + { + type prghPressure; + p $internalField; + value $internalField; + } + "wall_.*" + { + type fixedFluxPressure; + value $internalField; + } +} + +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // diff --git a/tutorial_cases/FlatPanel_250L_ASU/AllmassT b/tutorial_cases/FlatPanel_250L_ASU/AllmassT new file mode 100644 index 00000000..84a47c33 --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/AllmassT @@ -0,0 +1,18 @@ +#!/bin/sh +cd ${0%/*} || exit 1 # Run from this directory + +# Source tutorial run functions +. $WM_PROJECT_DIR/bin/tools/RunFunctions + +mv constant/NofvModels constant/fvModels + +echo -e " - Manipulate default specie" +sed 's|^defaultSpecie.*|defaultSpecie air;|' -i constant/thermophysicalProperties.gas + +echo -e " - Manipulate controlDict" +sed 's|^endTime.*|endTime 160;|' -i system/controlDict +sed 's|^deltaT.*|deltaT 0.0001;|' -i system/controlDict +sed 's|^adjustTimeStep.*|adjustTimeStep no;//yes;//|' -i system/controlDict + +## runApplication $(getApplication) +#------------------------------------------------------------------------------ diff --git a/tutorial_cases/FlatPanel_250L_ASU/README.md b/tutorial_cases/FlatPanel_250L_ASU/README.md new file mode 100644 index 00000000..7ab83e55 --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/README.md @@ -0,0 +1,5 @@ +### Flat Panel reactor + +Single core exec + +1. `bash run.sh` diff --git a/tutorial_cases/FlatPanel_250L_ASU/constant/g b/tutorial_cases/FlatPanel_250L_ASU/constant/g new file mode 100644 index 00000000..2c53688b --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/constant/g @@ -0,0 +1,21 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class uniformDimensionedVectorField; + location "constant"; + object g; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +dimensions [0 1 -2 0 0 0 0]; +value (0 -9.81 0); + + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/constant/momentumTransport.gas b/tutorial_cases/FlatPanel_250L_ASU/constant/momentumTransport.gas new file mode 100644 index 00000000..e73147a8 --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/constant/momentumTransport.gas @@ -0,0 +1,27 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class dictionary; + location "constant"; + object momentumTransport.gas; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +simulationType RAS; + +RAS +{ + model mixtureKEpsilon; // continuousGasKEpsilon; + + turbulence on; + printCoeffs on; +} + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/constant/momentumTransport.liquid b/tutorial_cases/FlatPanel_250L_ASU/constant/momentumTransport.liquid new file mode 100644 index 00000000..f3205aa6 --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/constant/momentumTransport.liquid @@ -0,0 +1,27 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class dictionary; + location "constant"; + object momentumTransport.liquid; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +simulationType RAS; + +RAS +{ + model mixtureKEpsilon; // LaheyKEpsilon; + + turbulence on; + printCoeffs on; +} + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/constant/phaseProperties b/tutorial_cases/FlatPanel_250L_ASU/constant/phaseProperties new file mode 100644 index 00000000..3d165947 --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/constant/phaseProperties @@ -0,0 +1,170 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class dictionary; + location "constant"; + object phaseProperties; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +type basicMultiphaseSystem; + +phases (gas liquid); + +gas +{ + type purePhaseModel; + diameterModel isothermal; + isothermalCoeffs + { + d0 3e-3; + p0 1e5; + } + + residualAlpha 1e-6; +} + +liquid +{ + type purePhaseModel; + diameterModel constant; + constantCoeffs + { + d 1e-4; + } + + residualAlpha 1e-6; +} + +blending +{ + default + { + type linear; + minFullyContinuousAlpha.gas 0.7; + minPartlyContinuousAlpha.gas 0.3; + minFullyContinuousAlpha.liquid 0.7; + minPartlyContinuousAlpha.liquid 0.3; + } + + drag + { + type linear; + minFullyContinuousAlpha.gas 0.7; + minPartlyContinuousAlpha.gas 0.5; + minFullyContinuousAlpha.liquid 0.7; + minPartlyContinuousAlpha.liquid 0.5; + } +} + +surfaceTension +( + (gas and liquid) + { + type constant; + sigma 0.07; + } +); + +aspectRatio +( + (gas in liquid) + { + type constant; + E0 1.0; + } + + (liquid in gas) + { + type constant; + E0 1.0; + } +); + +drag +( + (gas in liquid) + { + type SchillerNaumann; + residualRe 1e-3; + swarmCorrection + { + type none; + } + } + + (liquid in gas) + { + type SchillerNaumann; + residualRe 1e-3; + swarmCorrection + { + type none; + } + } + + (gas and liquid) + { + type segregated; + m 0.5; + n 8; + swarmCorrection + { + type none; + } + } +); + +virtualMass +( + (gas in liquid) + { + type constantCoefficient; + Cvm 0.5; + } + + (liquid in gas) + { + type constantCoefficient; + Cvm 0.5; + } +); + +heatTransfer +( + (gas in liquid) + { + type RanzMarshall; + residualAlpha 1e-4; + } + + (liquid in gas) + { + type RanzMarshall; + residualAlpha 1e-4; + } +); + +phaseTransfer +(); + +lift +(); + +wallLubrication +(); + +turbulentDispersion +(); + +interfaceCompression +(); + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/constant/thermophysicalProperties.gas b/tutorial_cases/FlatPanel_250L_ASU/constant/thermophysicalProperties.gas new file mode 100644 index 00000000..28d92618 --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/constant/thermophysicalProperties.gas @@ -0,0 +1,47 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class dictionary; + location "constant"; + object thermophysicalProperties.gas; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +thermoType +{ + type heRhoThermo; + mixture pureMixture; + transport const; + thermo hConst; + equationOfState perfectGas; + specie specie; + energy sensibleInternalEnergy; +} + +mixture +{ + specie + { + molWeight 28.9; + } + thermodynamics + { + Cp 1007; + Hf 0; + } + transport + { + mu 1.84e-05; + Pr 0.7; + } +} + + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/constant/thermophysicalProperties.liquid b/tutorial_cases/FlatPanel_250L_ASU/constant/thermophysicalProperties.liquid new file mode 100644 index 00000000..afb88b7e --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/constant/thermophysicalProperties.liquid @@ -0,0 +1,51 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class dictionary; + location "constant"; + object thermophysicalProperties.liquid; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +thermoType +{ + type heRhoThermo; + mixture pureMixture; + transport const; + thermo eConst; + equationOfState rPolynomial; + specie specie; + energy sensibleInternalEnergy; +} + +mixture +{ + specie + { + molWeight 18; + } + equationOfState + { + C (0.001278 -2.1055e-06 3.9689e-09 4.3772e-13 -2.0225e-16); + } + thermodynamics + { + Cv 4195; + Hf 0; + } + transport + { + mu 3.645e-4; + Pr 2.289; + } +} + + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/presteps.sh b/tutorial_cases/FlatPanel_250L_ASU/presteps.sh new file mode 100644 index 00000000..238ba587 --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/presteps.sh @@ -0,0 +1,4 @@ +cp -r 0.orig 0 +m4 ./system/panel.m4 > ./system/blockMeshDict +blockMesh +setFields diff --git a/tutorial_cases/FlatPanel_250L_ASU/run.sh b/tutorial_cases/FlatPanel_250L_ASU/run.sh new file mode 100644 index 00000000..bef3c727 --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/run.sh @@ -0,0 +1,2 @@ +bash presteps.sh +birdmultiphaseEulerFoam diff --git a/tutorial_cases/FlatPanel_250L_ASU/system/blockMeshDict b/tutorial_cases/FlatPanel_250L_ASU/system/blockMeshDict new file mode 100644 index 00000000..d2203a4e --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/system/blockMeshDict @@ -0,0 +1,110 @@ +//--------------------------------*- C++ -*---------------------------------- +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // +FoamFile +{ + version 2.0; + format ascii; + class dictionary; + object blockMeshDict; +} +// ************************************ + + + + + + +convertToMeters 1; + + //wall-sparge distance + // sparger diameter + // total lenght + // total depth +//define(H, 0.9144) //total height + //total height + + + + + + +vertices +( + ( 0 0 0 ) // Vertex block0_0 = 0 + ( 1.1176 0 0 ) // Vertex block0_1 = 1 + ( 1.1176 1.1 0 ) // Vertex block0_2 = 2 + ( 0 1.1 0 ) // Vertex block0_3 = 3 + ( 0.02925 0 0.02925 ) // Vertex block0_4 = 4 + ( 1.08835 0 0.02925 ) // Vertex block0_5 = 5 + ( 1.08835 1.1 0.02925 ) // Vertex block0_6 = 6 + ( 0.02925 1.1 0.02925 ) // Vertex block0_7 = 7 + ( 0.02925 0 0.03425) // Vertex block0_8 = 8 + ( 1.08835 0 0.03425) // Vertex block0_9 = 9 + ( 1.08835 1.1 0.03425) // Vertex block0_10 = 10 + ( 0.02925 1.1 0.03425) // Vertex block0_11 = 11 + ( 0 0 0.0635 ) // Vertex block0_12 = 12 + ( 1.1176 0 0.0635 ) // Vertex block0_13 = 13 + ( 1.1176 1.1 0.0635 ) // Vertex block0_14 = 14 + ( 0 1.1 0.0635 ) // Vertex block0_15 = 15 + +); + +blocks +( + //block 0 + hex ( 0 1 2 3 4 5 6 7 ) (100 100 10) simpleGrading (1 1 1) + //block 1 + hex ( 5 1 2 6 9 13 14 10) ( 10 100 10 ) simpleGrading (1 1 1) + //block 2 + hex ( 8 9 10 11 12 13 14 15) ( 100 100 10) simpleGrading (1 1 1) + //block 3 + hex ( 0 4 7 3 12 8 11 15 ) ( 10 100 10 ) simpleGrading (1 1 1) + //block 4 + hex (4 5 6 7 8 9 10 11) ( 100 100 10 ) simpleGrading (1 1 1) +); + +edges +( +); + +patches +( + patch inlet + ( + ( 4 5 9 8 ) + ) + + patch outlet + ( + ( 3 2 6 7 ) + ( 11 10 14 15 ) + ( 3 7 11 15 ) + ( 2 14 10 6 ) + ( 7 6 10 11) + ) + + wall wall_sides + ( + ( 0 3 15 12 ) + ( 1 13 14 2 ) + ) + + wall wall_frontback + ( + ( 12 15 14 13 ) + ( 0 1 2 3 ) + ) + + wall wall_botttom + ( + ( 0 1 5 4 ) + ( 8 9 13 12 ) + ( 0 4 8 12 ) + ( 1 13 9 5) + ) +); + +mergePatchPairs +( +); + diff --git a/tutorial_cases/FlatPanel_250L_ASU/system/controlDict b/tutorial_cases/FlatPanel_250L_ASU/system/controlDict new file mode 100644 index 00000000..1ae159fc --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/system/controlDict @@ -0,0 +1,59 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class dictionary; + location "system"; + object controlDict; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +application multiphaseEulerFoam; + +startFrom startTime; + +startTime 0; + +stopAt endTime; + +endTime 100; + +deltaT 0.0005; + +writeControl adjustableRunTime; + +writeInterval 0.5; + +purgeWrite 0; + +writeFormat ascii; + +writePrecision 6; + +writeCompression off; + +timeFormat general; + +timePrecision 6; + +runTimeModifiable yes; + +adjustTimeStep yes;//no; + +maxCo 0.3; + +maxDeltaT 1; + +functions +{ + #includeFunc fieldAverage(U.gas, U.liquid, alpha.gas, p) +} + + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/system/controlDict~ b/tutorial_cases/FlatPanel_250L_ASU/system/controlDict~ new file mode 100644 index 00000000..9e045ee7 --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/system/controlDict~ @@ -0,0 +1,59 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class dictionary; + location "system"; + object controlDict; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +application multiphaseEulerFoam; + +startFrom startTime; + +startTime 0; + +stopAt endTime; + +endTime 100; + +deltaT 0.0005; + +writeControl runTime; + +writeInterval 1; + +purgeWrite 0; + +writeFormat ascii; + +writePrecision 6; + +writeCompression off; + +timeFormat general; + +timePrecision 6; + +runTimeModifiable yes; + +adjustTimeStep no; + +maxCo 0.5; + +maxDeltaT 1; + +functions +{ + #includeFunc fieldAverage(U.gas, U.liquid, alpha.gas, p) +} + + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/system/decomposeParDict b/tutorial_cases/FlatPanel_250L_ASU/system/decomposeParDict new file mode 100644 index 00000000..a4095548 --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/system/decomposeParDict @@ -0,0 +1,45 @@ +/*--------------------------------*- C++ -*----------------------------------*\ +| ========= | | +| \\ / F ield | OpenFOAM: The Open Source CFD Toolbox | +| \\ / O peration | Version: 2.4.0 | +| \\ / A nd | Web: www.OpenFOAM.org | +| \\/ M anipulation | | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + version 2.0; + format ascii; + class dictionary; + location "system"; + object decomposeParDict; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +numberOfSubdomains 32; // running the case + +method hierarchical; + +simpleCoeffs +{ + n ( 2 16 1 ); + delta 0.001; +} + +hierarchicalCoeffs +{ + n ( 2 16 1 ); + delta 0.001; + order xyz; +} + +manualCoeffs +{ + dataFile ""; +} + +distributed no; + +roots ( ); + + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/system/fvConstraints b/tutorial_cases/FlatPanel_250L_ASU/system/fvConstraints new file mode 100644 index 00000000..dbf1d468 --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/system/fvConstraints @@ -0,0 +1,23 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class dictionary; + object fvConstraints; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +limitp +{ + type limitPressure; + + min 1e4; +} + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/system/fvSchemes b/tutorial_cases/FlatPanel_250L_ASU/system/fvSchemes new file mode 100644 index 00000000..21904d1e --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/system/fvSchemes @@ -0,0 +1,65 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class dictionary; + location "system"; + object fvSchemes; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +ddtSchemes +{ + default Euler; +} + +gradSchemes +{ + default Gauss linear; +} + +divSchemes +{ + default none; + + div(phi,alpha.gas) Gauss vanLeer; + div(phi,alpha.liquid) Gauss vanLeer; + div(phir,alpha.liquid,alpha.gas) Gauss vanLeer; + div(phir,alpha.gas,alpha.liquid) Gauss vanLeer; + + "div\(alphaRhoPhi.*,U.*\)" Gauss limitedLinearV 1; + "div\(phi.*,U.*\)" Gauss limitedLinearV 1; + + "div\(alphaRhoPhi.*,(h|e).*\)" Gauss limitedLinear 1; + "div\(alphaRhoPhi.*,K.*\)" Gauss limitedLinear 1; + "div\(alphaRhoPhi.*,\(p\|thermo:rho.*\)\)" Gauss limitedLinear 1; + + "div\(alphaRhoPhi.*,(k|epsilon).*\)" Gauss limitedLinear 1; + "div\(phim,(k|epsilon)m\)" Gauss limitedLinear 1; + + "div\(\(\(\(alpha.*\*thermo:rho.*\)*nuEff.*\)\*dev2\(T\(grad\(U.*\)\)\)\)\)" Gauss linear; +} + +laplacianSchemes +{ + default Gauss linear uncorrected; +} + +interpolationSchemes +{ + default linear; +} + +snGradSchemes +{ + default uncorrected; +} + + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/system/fvSolution b/tutorial_cases/FlatPanel_250L_ASU/system/fvSolution new file mode 100644 index 00000000..f080b7ff --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/system/fvSolution @@ -0,0 +1,84 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class dictionary; + location "system"; + object fvSolution; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +solvers +{ + "alpha.*" + { + nAlphaCorr 1; + nAlphaSubCycles 2; + } + + p_rgh + { + solver GAMG; + smoother DIC; + tolerance 1e-8; + relTol 0; + } + + p_rghFinal + { + $p_rgh; + relTol 0; + } + + "U.*" + { + solver smoothSolver; + smoother symGaussSeidel; + tolerance 1e-7; + relTol 0; + minIter 1; + } + + "e.*" + { + solver smoothSolver; + smoother symGaussSeidel; + tolerance 1e-7; + relTol 0; + minIter 1; + } + + "(k|epsilon).*" + { + solver smoothSolver; + smoother symGaussSeidel; + tolerance 1e-7; + relTol 0; + minIter 1; + } +} + +PIMPLE +{ + nOuterCorrectors 3; + nCorrectors 1; + nNonOrthogonalCorrectors 0; + +} + +relaxationFactors +{ + equations + { + ".*" 1; + } +} + + +// ************************************************************************* // diff --git a/tutorial_cases/FlatPanel_250L_ASU/system/panel.m4 b/tutorial_cases/FlatPanel_250L_ASU/system/panel.m4 new file mode 100644 index 00000000..e3e9ccdc --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/system/panel.m4 @@ -0,0 +1,110 @@ +//--------------------------------*- C++ -*---------------------------------- +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // +FoamFile +{ + version 2.0; + format ascii; + class dictionary; + object blockMeshDict; +} +// ************************************ +changecom(//)changequote([,]) +define(calc, [esyscmd(perl -e 'printf ($1)')]) +define(calcint, [esyscmd(perl -e 'printf int($1)')]) +define(VCOUNT, 0) +define(vlabel, [[// ]Vertex $1 = VCOUNT define($1, VCOUNT)define([VCOUNT], incr(VCOUNT))]) + +convertToMeters 1; + +define(h, 0.02925) //wall-sparge distance +define(b, 0.005) // sparger diameter +define(L, 1.1176) // total lenght +define(D, 0.0635) // total depth +//define(H, 0.9144) //total height +define(H, 1.1) //total height +define(Lmh, calc(L-h)) +define(hpb, calc(h+b)) +define(NPX, 100) +define(NPZ, 10) +define(NPY, 100) + +vertices +( + ( 0 0 0 ) vlabel(block0_0) + ( L 0 0 ) vlabel(block0_1) + ( L H 0 ) vlabel(block0_2) + ( 0 H 0 ) vlabel(block0_3) + ( h 0 h ) vlabel(block0_4) + ( Lmh 0 h ) vlabel(block0_5) + ( Lmh H h ) vlabel(block0_6) + ( h H h ) vlabel(block0_7) + ( h 0 hpb) vlabel(block0_8) + ( Lmh 0 hpb) vlabel(block0_9) + ( Lmh H hpb) vlabel(block0_10) + ( h H hpb) vlabel(block0_11) + ( 0 0 D ) vlabel(block0_12) + ( L 0 D ) vlabel(block0_13) + ( L H D ) vlabel(block0_14) + ( 0 H D ) vlabel(block0_15) + +); + +blocks +( + //block 0 + hex ( block0_0 block0_1 block0_2 block0_3 block0_4 block0_5 block0_6 block0_7 ) (NPX NPY NPZ) simpleGrading (1 1 1) + //block 1 + hex ( block0_5 block0_1 block0_2 block0_6 block0_9 block0_13 block0_14 block0_10) ( NPZ NPY NPZ ) simpleGrading (1 1 1) + //block 2 + hex ( block0_8 block0_9 block0_10 block0_11 block0_12 block0_13 block0_14 block0_15) ( NPX NPY NPZ) simpleGrading (1 1 1) + //block 3 + hex ( block0_0 block0_4 block0_7 block0_3 block0_12 block0_8 block0_11 block0_15 ) ( NPZ NPY NPZ ) simpleGrading (1 1 1) + //block 4 + hex (block0_4 block0_5 block0_6 block0_7 block0_8 block0_9 block0_10 block0_11) ( NPX NPY NPZ ) simpleGrading (1 1 1) +); + +edges +( +); + +patches +( + patch inlet + ( + ( block0_4 block0_5 block0_9 block0_8 ) + ) + + patch outlet + ( + ( block0_3 block0_2 block0_6 block0_7 ) + ( block0_11 block0_10 block0_14 block0_15 ) + ( block0_3 block0_7 block0_11 block0_15 ) + ( block0_2 block0_14 block0_10 block0_6 ) + ( block0_7 block0_6 block0_10 block0_11) + ) + + wall wall_sides + ( + ( block0_0 block0_3 block0_15 block0_12 ) + ( block0_1 block0_13 block0_14 block0_2 ) + ) + + wall wall_frontback + ( + ( block0_12 block0_15 block0_14 block0_13 ) + ( block0_0 block0_1 block0_2 block0_3 ) + ) + + wall wall_botttom + ( + ( block0_0 block0_1 block0_5 block0_4 ) + ( block0_8 block0_9 block0_13 block0_12 ) + ( block0_0 block0_4 block0_8 block0_12 ) + ( block0_1 block0_13 block0_9 block0_5) + ) +); + +mergePatchPairs +( +); + diff --git a/tutorial_cases/FlatPanel_250L_ASU/system/setFieldsDict b/tutorial_cases/FlatPanel_250L_ASU/system/setFieldsDict new file mode 100644 index 00000000..ce17d73c --- /dev/null +++ b/tutorial_cases/FlatPanel_250L_ASU/system/setFieldsDict @@ -0,0 +1,37 @@ +/*--------------------------------*- C++ -*----------------------------------*\ + ========= | + \\ / F ield | OpenFOAM: The Open Source CFD Toolbox + \\ / O peration | Website: https://openfoam.org + \\ / A nd | Version: 9 + \\/ M anipulation | +\*---------------------------------------------------------------------------*/ +FoamFile +{ + format ascii; + class dictionary; + location "system"; + object setFieldsDict; +} +// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // + +defaultFieldValues +( + volScalarFieldValue alpha.gas 1 + volScalarFieldValue alpha.liquid 0 +); + +regions +( + boxToCell + { + box (0 0 0) (1.2 0.914 0.1); + fieldValues + ( + volScalarFieldValue alpha.gas 0 + volScalarFieldValue alpha.liquid 1 + ); + } +); + + +// ************************************************************************* // From 4706d99bcb9ef668a6ce74d1e018e2a289d24ab6 Mon Sep 17 00:00:00 2001 From: Malik Date: Wed, 9 Jul 2025 16:33:30 -0600 Subject: [PATCH 35/38] adding ref and only run 1 timestep --- tutorial_cases/FlatPanel_250L_ASU/README.md | 10 ++++++++++ tutorial_cases/FlatPanel_250L_ASU/system/controlDict | 2 +- tutorial_cases/FlatPanel_250L_ASU/system/panel.m4 | 6 +++--- 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/tutorial_cases/FlatPanel_250L_ASU/README.md b/tutorial_cases/FlatPanel_250L_ASU/README.md index 7ab83e55..c3c0d170 100644 --- a/tutorial_cases/FlatPanel_250L_ASU/README.md +++ b/tutorial_cases/FlatPanel_250L_ASU/README.md @@ -1,5 +1,15 @@ ### Flat Panel reactor +``` +@inproceedings{parra2022fluid, + title={Fluid-Dynamic Simulation of a Flat-Panel Bioreactor with Emphasis on Mixing, Mass Transfer and Inorganic CO 2 Chemistry}, + author={Parra-Alvarez, John and Lua, Mauro and Laurens, Lieve and Sitaraman, Hari and Stickel, Jonathan and Mark, Heinnickel and Troy, Paddock}, + booktitle={2022 AIChE Annual Meeting}, + year={2022}, + organization={AIChE} +} +``` + Single core exec 1. `bash run.sh` diff --git a/tutorial_cases/FlatPanel_250L_ASU/system/controlDict b/tutorial_cases/FlatPanel_250L_ASU/system/controlDict index 1ae159fc..baa693c3 100644 --- a/tutorial_cases/FlatPanel_250L_ASU/system/controlDict +++ b/tutorial_cases/FlatPanel_250L_ASU/system/controlDict @@ -20,7 +20,7 @@ startFrom startTime; startTime 0; -stopAt endTime; +stopAt writeNow;//endTime; endTime 100; diff --git a/tutorial_cases/FlatPanel_250L_ASU/system/panel.m4 b/tutorial_cases/FlatPanel_250L_ASU/system/panel.m4 index e3e9ccdc..ba92dc02 100644 --- a/tutorial_cases/FlatPanel_250L_ASU/system/panel.m4 +++ b/tutorial_cases/FlatPanel_250L_ASU/system/panel.m4 @@ -24,9 +24,9 @@ define(D, 0.0635) // total depth define(H, 1.1) //total height define(Lmh, calc(L-h)) define(hpb, calc(h+b)) -define(NPX, 100) -define(NPZ, 10) -define(NPY, 100) +define(NPX, 50) +define(NPZ, 5) +define(NPY, 50) vertices ( From bdda423816416dac801576cf163cf4b6c45cbc07 Mon Sep 17 00:00:00 2001 From: Malik Date: Thu, 10 Jul 2025 11:56:08 -0600 Subject: [PATCH 36/38] add a better contributing guideline --- docs/source/contribute.rst | 31 +++++++++++++++++++--- tutorial_cases/runall.sh | 53 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 81 insertions(+), 3 deletions(-) create mode 100644 tutorial_cases/runall.sh diff --git a/docs/source/contribute.rst b/docs/source/contribute.rst index 54c1fd74..147e399c 100644 --- a/docs/source/contribute.rst +++ b/docs/source/contribute.rst @@ -1,7 +1,7 @@ Contributing ===== -We welcome pull requests from anybody! +We welcome pull requests from anyone! Formatting ------------ @@ -16,8 +16,33 @@ You can automatically enforce the formatting guidelines with bash fixFormat.sh -Test +Tests ------------ -Please ensure your contribution passes the tests in ``tests`` +Please ensure your contribution passes the tests in the CI (``.github/worklows/ci.yml``) +To run the unit tests +.. code-block:: console + + conda activate bird + pip install pytest + BIRD_HOME=`python -c "import bird; print(bird.BIRD_DIR)"` + cd ${BIRD_HOME}/../ + pytest . + +To run the regression tests + +.. code-block:: console + + source /etc/rc + conda activate bird + pip install pytest + BIRD_HOME=`python -c "import bird; print(bird.BIRD_DIR)"` + cd ${BIRD_HOME}/../tutorial_cases + bash run_all.sh + +Demonstrating and documenting your contribution +------------ +We prefer the use of docstrings and type hinting. A good example to follow are functions in ``bird/utilities/ofio.py``. + +If you add a new capability, please make sure to add relevant unit tests in the ``tests/`` folder. A good example to follow are tests ``tests/io`` diff --git a/tutorial_cases/runall.sh b/tutorial_cases/runall.sh new file mode 100644 index 00000000..c58bc65f --- /dev/null +++ b/tutorial_cases/runall.sh @@ -0,0 +1,53 @@ +#Compile solver + +conda activate bird +BIRD_HOME=`python -c "import bird; print(bird.BIRD_DIR)"` +cd ${BIRD_HOME}/../OFsolvers/birdmultiphaseEulerFoam +export WM_COMPILE_OPTION=Debug +./Allwmake +cd ../../ + +# Run all tests + +## Run deckwer17 PBE +cd experimental_cases/deckwer17 +bash run.sh +cd ../../ +## Run deckwer17 constantD +cd experimental_cases/deckwer17 +cp constant/phaseProperties_constantd constant/phaseProperties +bash run.sh +cd ../../ +## Run deckwer19 PBE +cd experimental_cases/deckwer19 +bash run.sh +cd ../../ +## Run side sparger tutorial +cd tutorial_cases/side_sparger +bash run.sh +cd ../../ +## Run bubble column tutorial +cd tutorial_cases/bubble_column_20L +bash run.sh +cd ../../ +## Run stirred-tank tutorial +cd tutorial_cases/stirred_tank +bash run.sh +cd ../../ +## Run reactive loop reactor tutorial +cd tutorial_cases/loop_reactor_reacting +bash run.sh +cd ../../ +## Run mixing loop reactor tutorial +cd tutorial_cases/loop_reactor_mixing +bash run.sh +cd ../../ +## Run airlift reactor tutorial +cd tutorial_cases/airlift_40m +bash run.sh +cd ../../ +## Run flat panel reactor tutorial +cd tutorial_cases/FlatPanel_250L_ASU +bash run.sh +cd ../../ + From dc55a82466dde79b3172f7a4a515512ad792ba44 Mon Sep 17 00:00:00 2001 From: Malik Date: Thu, 10 Jul 2025 11:56:39 -0600 Subject: [PATCH 37/38] add description of python interface --- docs/source/index.rst | 1 + docs/source/python_interface.rst | 68 ++++++++++++++++++++++++++++++++ 2 files changed, 69 insertions(+) create mode 100644 docs/source/python_interface.rst diff --git a/docs/source/index.rst b/docs/source/index.rst index 7f315c91..ad8f278b 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -27,6 +27,7 @@ We provide a solver ``birdmultiphaseEulerFoam`` that contains custom models adde meshing preprocess postprocess + python_interface tutorials contribute references diff --git a/docs/source/python_interface.rst b/docs/source/python_interface.rst new file mode 100644 index 00000000..9f721c50 --- /dev/null +++ b/docs/source/python_interface.rst @@ -0,0 +1,68 @@ +Python interface to OpenFOAM +===== + +We provide a simple python interface for reading OpenFOAM case results and dictionaries. +A more comprehensive interface is available through ``pyFoam`` although we found it difficult to use and without recent support, which motivated the implementation of a new python interface. + + +Read fields +------------ + +Internal scalar and vector fields +~~~~~~~~~~~~~~~~~~~~ + +Currently internal scalar and vector fields can be read using the python interface. In particular, note that +#. 1. We do not support reading tensor fields for now. +#. 2. We do not support reading boundary fields for now. +#. 3. We only read reconstructed files + +We are open to implementing 1. and 2. if need be. Implementing 3. is possible but will be more involved. + +The main function interface to read openFOAM fields is ``readOF`` in ``bird.utilities.ofio`` + +The function only reads fields written in ASCII format and decided based on the header whether the field is a scalar or a vector. In the case of scalar, ``readOF`` returns a (N,) numpy array, where N is the number of computational cells. In the case of a vector, ``readOF`` returns a (N,3) numpy array. + +If a uniform field is read, the number of cells may not be available from the field file and the function returns a float (equal to the uniform internal field value). If a numpy array is needed, the user can specify the number of cells in the field as an input. + +Reuse instead of re-read +~~~~~~~~~~~~~~~~~~~~ + +The ``read_field`` function uses ``readOF`` and takes a dictionary ``field_dict`` as input which is used to avoid reading multiple times the same field. For example, if one wants to compute the reactor-averaged concentration of a species, and then the reactor-averaged mass fraction of a species, the same mass fraction field will be used in both cases. As fields are read, ``field_dict`` will store the mass fraction field and recognize that the same field is needed. + +It is up to the user to reinitialize ``field_dict``. For example, if the reactor-averaged mass fraction needs to be computed at time ``1`` and then at time ``2``, the user needs to pass an empty dictionary (or nothing) to ``read_field`` before reading the fields at time ``2``. Otherwise, ``read_field`` will assume that the mass fraction field is the same between time ``1`` and time ``2``. + + +Read mesh-related fields +~~~~~~~~~~~~~~~~~~~~ + +We rely on OpenFOAM utilities to provide mesh-based fields. The results of the OpenFOAM utilities can still be processed using ``bird`` functions. + + +Reading cell volumes +^^^^^^^^^^^^^^^ + +A cell volume field can be generated using the following OpenFOAM command ``postProcess -func writeCellVolumes -time {time_folder} -case {case_folder}`` +It will generate a file ``{time_folder}/V`` which can be read with the ``readOF`` function of ``bird``. +This workflow is used in ``bird.postprocess.post_quantitities``, for example in the ``compute_gas_holdup`` function. + + +Reading cell centers +^^^^^^^^^^^^^^^ + +A mesh object file can be generated with the OpenFOAM command ``writeMeshObj -case {case_folder}`` +The file can then be read with the function ``readMesh`` from ``bird.utilities.ofio``. +Again, this is used in ``bird.postprocess.post_quantities`` in the ``compute_superficial_velocity`` function. + + + + +Read dictionaries +------------ + +We provide a function ``parse_openfoam_dict`` in ``bird.utilities.ofio`` that can parse OpenFOAM dictionaries. The function requires a lot of special characters handling but works for processing basic dictionaries needed to manage OpenFOAM cases (``controlDict``, ``setFieldsDict``, ``phaseProperties``, ``thermophysicalProperties``, ``momentumTransport``, ...) + + +Generate cases +------------ + +(to be added based on the reactor optimization work) From 483b731ce7f1aaca0ed52bdbdc59c8516f7213cc Mon Sep 17 00:00:00 2001 From: Malik Date: Thu, 10 Jul 2025 11:57:32 -0600 Subject: [PATCH 38/38] fix display of python versions --- bird/version.py | 2 +- setup.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/bird/version.py b/bird/version.py index 07d5c654..d01886b9 100644 --- a/bird/version.py +++ b/bird/version.py @@ -1,3 +1,3 @@ """Bio reactor design version""" -__version__ = "0.0.24" +__version__ = "0.0.25" diff --git a/setup.py b/setup.py index 3d32deab..d98a2918 100644 --- a/setup.py +++ b/setup.py @@ -29,8 +29,7 @@ "License :: OSI Approved :: BSD License", "Natural Language :: English", "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", ], package_data={ "": [