From 84ba161be07d57d916d24bf5b8ef832cedb8908d Mon Sep 17 00:00:00 2001 From: Craig Warren Date: Thu, 21 Jan 2016 15:31:02 +0000 Subject: [PATCH] Restructured some of the Taguchi optimisation code, and added function to write OAs on demand. --- gprMax/gprMax.py | 22 ++-- .../optimisation_taguchi.py | 111 ++++++------------ ...ess.py => optimisation_taguchi_fitness.py} | 0 user_libs/optimisations/OA_18_7_3_2.npy | Bin 206 -> 0 bytes user_libs/optimisations/OA_9_4_3_2.npy | Bin 116 -> 0 bytes 5 files changed, 49 insertions(+), 84 deletions(-) rename user_libs/optimisations/taguchi.py => gprMax/optimisation_taguchi.py (82%) rename user_libs/{optimisations/taguchi_fitness.py => optimisation_taguchi_fitness.py} (100%) delete mode 100644 user_libs/optimisations/OA_18_7_3_2.npy delete mode 100644 user_libs/optimisations/OA_9_4_3_2.npy diff --git a/gprMax/gprMax.py b/gprMax/gprMax.py index 3fcd88c8..e0d3493c 100644 --- a/gprMax/gprMax.py +++ b/gprMax/gprMax.py @@ -73,8 +73,8 @@ def main(): # Process for Taguchi optimisation # ######################################## if args.opt_taguchi: - from user_libs.optimisations.taguchi import taguchi_code_blocks, construct_OA, calculate_ranges_experiments, calculate_optimal_levels, plot_optimisation_history - + from gprMax.optimisation_taguchi import taguchi_code_blocks, construct_OA, calculate_ranges_experiments, calculate_optimal_levels, plot_optimisation_history + # Default maximum number of iterations of optimisation to perform (used if the stopping criterion is not achieved) maxiterations = 20 @@ -96,10 +96,11 @@ def main(): optparamshist = OrderedDict((key, list()) for key in optparams) # Import specified fitness function - fitness_metric = getattr(importlib.import_module('user_libs.optimisations.taguchi_fitness'), fitness['name']) + fitness_metric = getattr(importlib.import_module('user_libs.optimisation_taguchi_fitness'), fitness['name']) # Select OA - OA, N, k, s = construct_OA(optparams) + OA, N, cols, k, s, t = construct_OA(optparams) + print('\n{}\n\nTaguchi optimisation: orthogonal array with {} experiments, {} parameters ({} used), {} levels, and strength {} will be used.'.format(68*'*', N, cols, k, s, t)) # Initialise arrays and lists to store parameters required throughout optimisation # Lower, central, and upper values for each parameter @@ -236,13 +237,16 @@ def main(): # Stop optimisation if stopping criterion has been reached if fitnessvalueshist[i - 1] > fitness['stop']: + print('\nTaguchi optimisation stopped as fitness criteria reached') break -# # Stop optimisation if successive fitness values are within 1% -# if i > 2: -# fitnessvaluesclose = (np.abs(fitnessvalueshist[i - 2] - fitnessvalueshist[i - 1]) / fitnessvalueshist[i - 1]) * 100 -# if fitnessvaluesclose < 1: -# break + # Stop optimisation if successive fitness values are within 0.5% + if i > 2: + fitnessvaluesclose = (np.abs(fitnessvalueshist[i - 2] - fitnessvalueshist[i - 1]) / fitnessvalueshist[i - 1]) * 100 + fitnessvaluesthres = 0.1 + if fitnessvaluesclose < fitnessvaluesthres: + print('\nTaguchi optimisation stopped as successive fitness values within {}%'.format(fitnessvaluesthres)) + break # Save optimisation parameters history and fitness values history to file opthistfile = inputfileparts[0] + '_hist' diff --git a/user_libs/optimisations/taguchi.py b/gprMax/optimisation_taguchi.py similarity index 82% rename from user_libs/optimisations/taguchi.py rename to gprMax/optimisation_taguchi.py index ca527eb3..3f88848f 100644 --- a/user_libs/optimisations/taguchi.py +++ b/gprMax/optimisation_taguchi.py @@ -65,89 +65,50 @@ def construct_OA(optparams): Returns: OA (array): Orthogonal array N (int): Number of experiments in OA - k (int): Number of parameters to optimise in OA + cols (int): Number of columns in OA + k (int): Number of columns in OA cut down to number of parameters to optimise s (int): Number of levels in OA t (int): Strength of OA """ -# S=3; % 3 level OA -#J=3; -#M=S^J; % number of experiments -# -#for k=1:J % for basic columns -# j=(S^(k-1)-1)/(S-1)+1; -# for i=1:M -# A(i,j)=mod(floor((i-1)/(S^(J-k))),S); -# end -#end -# -#for k=2:J % for non-basic columns -# j=(S^(k-1)-1)/(S-1)+1; -# for p=1:j-1 -# for q=1:S-1 -# A(:,(j+(p-1)*(S-1)+q))=mod((A(:,p)*q+A(:,j)),S); -# end -# end -#end -# -# -#[N,K]=size(A); -#str1=num2str(N,'%0.1d'); -#str2=num2str(K,'%0.1d'); -#str3=num2str(S,'%0.1d'); -#TT=['OA(' str1 ',' str2 ',' str3 ',2).txt']; -#fid2=fopen(TT,'wt'); -# -#for j=1:N -# for k=1:K -# fprintf(fid2,'%0.1d ',A(j,k)); -# if k==K -# fprintf(fid2,'\n'); -# end -# end -#end - - s = 3 # Number of levels - t = 2 # Strength -# p = 2 -# N = s**p # Number of experiments -# a = np.zeros((N, 4), dtype=np.int) -# -# # Construct basic columns -# for ii in range(0, p): -# k = int((s**(ii - 1) - 1) / ((s - 1) + 1)) -# for m in range(0, N): -# a[m, k] = np.mod(np.floor((m - 1) / (s**(p - ii))), s) -# -# # Construct non-basic columns -# for ii in range(1, p): -# k = int((s**(ii - 1) - 1) / ((s - 1) + 1)) -# for jj in range(0, k - 1): -# for kk in range(0, s - 1): -# a[:, k + ((jj - 1) * (s - 1) + kk)] = np.mod(a[:, jj] * kk + a[:, k], s) -# -# print(a) - - # Load the appropriate OA - if len(optparams) <= 4: - OA = np.load(os.path.join(moduledirectory, 'OA_9_4_3_2.npy')) - elif len(optparams) <= 7: - OA = np.load(os.path.join(moduledirectory, 'OA_18_7_3_2.npy')) - else: - raise CmdInputError('Too many parameters to optimise for the available orthogonal arrays (OA). Please find and load a bigger, suitable OA.') - print(OA) - # Cut down OA columns to number of parameters to optimise - OA = OA[:, 0:len(optparams)] - - # Number of experiments - N = OA.shape[0] - + # Properties of the orthogonal array (OA) + # Strength + t = 2 + + # Number of levels + s = 3 + # Number of parameters to optimise - k = OA.shape[1] + k = len(optparams) + + p = int(np.ceil(np.log(k * (s - 1) + 1) / np.log(s))) + + # Number of experiments + N = s**p + + # Number of columns + cols = int((N - 1) / (s - 1)) + + # Algorithm to construct OA from: http://ieeexplore.ieee.org/xpl/articleDetails.jsp?reload=true&arnumber=6812898 + OA = np.zeros((N + 1, cols + 1), dtype=np.int8) + + # Construct basic columns + for ii in range(1, p + 1): + col = int((s**(ii - 1) - 1) / (s - 1) + 1) + for row in range(1, N + 1): + OA[row, col] = np.mod(np.floor((row - 1) / (s**(p - ii))), s) + # Construct non-basic columns + for ii in range(2, p + 1): + col = int((s**(ii - 1) - 1) / (s - 1) + 1) + for jj in range(1, col): + for kk in range(1, s): + OA[:, col + (jj - 1) * (s - 1) + kk] = np.mod(OA[:, jj] * kk + OA[:, col], s) + # First row and first columns are unneccessary, only there to match algorithm, and cut down columns to number of parameters to optimise + OA = OA[1::, 1::k] - return OA, N, k, s + return OA, N, cols, k, s, t def calculate_ranges_experiments(optparams, optparamsinit, levels, levelsopt, levelsdiff, OA, N, k, s, i): diff --git a/user_libs/optimisations/taguchi_fitness.py b/user_libs/optimisation_taguchi_fitness.py similarity index 100% rename from user_libs/optimisations/taguchi_fitness.py rename to user_libs/optimisation_taguchi_fitness.py diff --git a/user_libs/optimisations/OA_18_7_3_2.npy b/user_libs/optimisations/OA_18_7_3_2.npy deleted file mode 100644 index 8c40ded14881d03d7e62baa96ce7153b94c06a5b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 206 zcmbR27wQ`j$;jZwP_3SlTAW;@Zl$1JQ);NLqoAIaUsO_*m=~X4l#&V(4=E~51qv5u zBo?Fsxf+HRItu2RItsN4aKOdD$b=OEg@Fl2*gZ`3;@BP6^Q@<