Commit 74c91a55 authored by Ferenc Leichsenring's avatar Ferenc Leichsenring

Initial commit

parents
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.7" project-jdk-type="Python SDK" />
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="jdk" jdkName="Python 3.7" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="TestRunnerService">
<option name="PROJECT_TEST_RUNNER" value="Unittests" />
</component>
</module>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/mm_workshop.iml" filepath="$PROJECT_DIR$/.idea/mm_workshop.iml" />
</modules>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ChangeListManager">
<list default="true" id="2a594707-9030-4d4f-b51d-d22b8af892a8" name="Default Changelist" comment="" />
<option name="EXCLUDED_CONVERTED_TO_IGNORED" value="true" />
<option name="SHOW_DIALOG" value="false" />
<option name="HIGHLIGHT_CONFLICTS" value="true" />
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
<option name="LAST_RESOLUTION" value="IGNORE" />
</component>
<component name="FileTemplateManagerImpl">
<option name="RECENT_TEMPLATES">
<list>
<option value="Python Script" />
</list>
</option>
</component>
<component name="ProjectId" id="1VsipkvQEjSZXvtWxjykGuGC79Q" />
<component name="PropertiesComponent">
<property name="last_opened_file_path" value="$PROJECT_DIR$" />
<property name="settings.editor.selected.configurable" value="com.jetbrains.python.configuration.PyActiveSdkModuleConfigurable" />
</component>
<component name="RunDashboard">
<option name="ruleStates">
<list>
<RuleState>
<option name="name" value="ConfigurationTypeDashboardGroupingRule" />
</RuleState>
<RuleState>
<option name="name" value="StatusDashboardGroupingRule" />
</RuleState>
</list>
</option>
</component>
<component name="RunManager">
<configuration name="annTest" type="PythonConfigurationType" factoryName="Python" temporary="true">
<module name="mm_workshop" />
<option name="INTERPRETER_OPTIONS" value="" />
<option name="PARENT_ENVS" value="true" />
<envs>
<env name="PYTHONUNBUFFERED" value="1" />
</envs>
<option name="SDK_HOME" value="" />
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/src" />
<option name="IS_MODULE_SDK" value="true" />
<option name="ADD_CONTENT_ROOTS" value="true" />
<option name="ADD_SOURCE_ROOTS" value="true" />
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/src/annTest.py" />
<option name="PARAMETERS" value="" />
<option name="SHOW_COMMAND_LINE" value="false" />
<option name="EMULATE_TERMINAL" value="false" />
<option name="MODULE_MODE" value="false" />
<option name="REDIRECT_INPUT" value="false" />
<option name="INPUT_FILE" value="" />
<method v="2" />
</configuration>
<recent_temporary>
<list>
<item itemvalue="Python.annTest" />
</list>
</recent_temporary>
</component>
<component name="SvnConfiguration">
<configuration />
</component>
<component name="TaskManager">
<task active="true" id="Default" summary="Default task">
<changelist id="2a594707-9030-4d4f-b51d-d22b8af892a8" name="Default Changelist" comment="" />
<created>1578048387111</created>
<option name="number" value="Default" />
<option name="presentableId" value="Default" />
<updated>1578048387111</updated>
</task>
<servers />
</component>
<component name="XDebuggerManager">
<breakpoint-manager>
<default-breakpoints>
<breakpoint type="python-exception">
<properties notifyOnTerminate="true" exception="BaseException">
<option name="notifyOnTerminate" value="true" />
</properties>
</breakpoint>
</default-breakpoints>
</breakpoint-manager>
</component>
</project>
\ No newline at end of file
This diff is collapsed.
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.5"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.5"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
This diff is collapsed.
This diff is collapsed.
import numpy as np
from analyzer.generalAnalyzer import GeneralAnalyzer
import matplotlib.pyplot as plt
class CompObj(GeneralAnalyzer):
def __init__(self, problem, sampler, surrogate_list):
GeneralAnalyzer.__init__(self,problem, sampler, surrogate_list)
self.n_models = len(self.model_list)
def plotDataVsPrediction(self,N,
title = " ",
labels=None,
save_name = None):
pltlabels=['$z=z^*$']
X, Y = self.generate_data_points( N )
Y_mod = np.transpose(np.asarray(self.eval_models(X)))
fig = plt.figure(figsize=(6, 6))
ax = plt.gca()
ax.set_xlabel('z data', fontsize=12)
ax.set_ylabel('z model', fontsize=12)
ax.axis([np.min(Y), np.max(Y), np.min(Y), np.max(Y)])
ax.plot([np.min(Y), np.max(Y)], [np.min(Y), np.max(Y)], 'r-')
for yIdx in range(self.n_models):
ax.plot(Y,Y_mod[:,yIdx],'.',markersize=2)
if not labels is None:
for label in labels:pltlabels.append(label)
plt.legend(pltlabels,prop={'size': 12})
if not save_name is None:
self.saveFig(ax,save_name)
plt.title(title)
plt.show()
import numpy as np
from copy import deepcopy
class GeneralAnalyzer(object):
def __init__(self,problem,sampler,surrogate_list):
self.sampler = sampler
self.problem = problem
self.model_list = surrogate_list
def generate_data_points(self,N):
return self.sampler(N)
def eval_models(self, X):
Y_mod = []
for m in self.model_list:
Y_mod.append(deepcopy(m(X)))
return Y_mod
def rel_err(self, x,x_approx):
return np.abs( (x-x_approx) / x )
def gen_err(self,x,x_approx):
return np.mean( (x-x_approx)**2 )/np.var(x)
def saveFig(self,ax,saveName):
ax.savefig(saveName, bbox_inches='tight', dpi=300)
import numpy as np
from copy import deepcopy
import matplotlib.pyplot as plt
class Histogram(object):
def __init__(self, problem, sampler, surrogate_list):
self.f = problem
self.sampler = sampler
self.model_list = surrogate_list
self.n_models = len(self.model_list)
def generate_data_points(self,N):
return self.sampler(N)
def eval_models(self, X):
Y_mod = []
for m in self.model_list:
Y_mod.append(deepcopy(m(X)))
return Y_mod
def plot_rel_error_histogram(self,N ,labels=None, save_name = None):
X, Y = self.generate_data_points( N )
Y_mod = np.transpose(np.asarray( self.eval_models(X)))
#Y_mod = np.asarray( self.eval_models(X) ).reshape(N,-1)
err = [ np.log10(self.rel_err( Y, Y_mod[:,j] )) for j in range(self.n_models) ]
x_label = r"$\log_{10}( \epsilon_{\mathrm{rel}} )$"
y_label = 'normalized frequency'
self.plot_histograms( err,labels=labels, title = "relative error", x_label = x_label, y_label = y_label)
def rel_err(self, x,x_approx):
return np.abs( (x-x_approx) / x )
def gen_err(self,x,x_approx):
return np.mean( (x-x_approx)**2 )/np.var(x)
def generalization_error(self,N):
X,Y = self.generate_data_points( N )
Y_mod = np.asarray( self.eval_models(X) ).reshape(N,-1)
Y = np.asarray(Y)
return [ self.gen_err( Y, Y_mod[:,j] ) for j in range(self.n_models) ]
def plot_histograms(self, arr, title = " ", labels = None, n_bins = 50, x_label=r"$x$", y_label = r"$y$", save_name = None):
fig = plt.figure()
ax = plt.gca()
ax.set_xlabel(x_label, fontsize=16)
ax.set_ylabel(y_label, fontsize=16)
ax.grid(b=True, which='major', color='k', linestyle='--', alpha=0.4)
ax.grid(b=True, which='minor', color='k', linestyle='--', alpha=0.1)
ax.grid(False)
ax.tick_params(axis='x', which='major', labelsize=14)
ax.tick_params(axis='y', which='major', labelsize=14)
for a in arr:
plt.hist(a, n_bins, histtype='step', stacked=True, fill=False, density=True, lw=3)
plt.title(title)
if not labels is None:
plt.legend(labels,prop={'size': 15})
if not save_name is None:
plt.savefig(save_name, bbox_inches='tight',dpi=300)
plt.show()
\ No newline at end of file
import numpy as np
import matplotlib.pyplot as plt
from problems.ackley import Ackley
from sampler.lhc import LHC2
from surrogates.ann import KerasANN
from surrogates.gp import SKLearnGP
from analyzer.compObj import CompObj
from analyzer.histogram import Histogram
#Problem
x = np.linspace(0,1,10)
grid = np.array(np.meshgrid(x, x))
ack = Ackley(20, 0.2, np.pi*2,2)
print(ack(x))
#Sampler
lhc2=LHC2(ack)
X,Y=lhc2(1000)
#Surrogate
ann=KerasANN(idim=2,odim=1,neuronsHLayer=[20,20],loss='logcosh')
ann.trainNN(X,Y,epochs=20)
ySurr=ann(X)
gp = SKLearnGP()
gp.learn_from_data(X,Y)
# Analyser
hist = Histogram(ack,lhc2,[ann,gp])
hist.plot_rel_error_histogram(1000,labels=['gp','ann'])
#cpObj = CompObj(ack, lhc2, [gp,ann])
#cpObj.plotDataVsPrediction(1000,labels=['gp','ann'])
pass
\ No newline at end of file
This diff is collapsed.
import numpy as np
class Ackley(object):
def __init__(self, a=20., b=0.2, c=2 * np.pi, dim = 2):
self.a = a
self.b = b
self.c = c
self.dim = dim
def __call__(self, xx):
xx = xx.T
d = xx.shape[0]
# Scaling
xmin = -30
xmax = 30
sum1 = 0
sum2 = 0
for ii in range(d):
xi = xmin + xx[ii] * (xmax - xmin)
sum1 = sum1 + xi ** 2
sum2 = sum2 + np.cos(self.c * xi)
term1 = -self.a * np.exp(-self.b * np.sqrt(sum1 / d))
term2 = -np.exp(sum2 / d)
y = term1 + term2 + self.a + np.exp(1);
xx = xx.T
return y
if __name__ == "__main__":
ack = Ackley(20, 0.2, np.pi*2)
x = np.linspace(0, 1, 10)
grid = np.array(np.meshgrid(x, x))
# print(grid)
print(ack(x))
print(ack(grid).shape)
import numpy as np
try:
from smt.sampling_methods import LHS
except:
print('smt package not found, --> use pyDOE instead as LHS2 class --> LHC2')
from pyDOE import lhs as LHS2
class LHC(object):
def __init__(self, problem):
self.problem = problem
def __call__(self, N):
num_d = self.problem.dim
dim = [num_d, 2]
xlimits = np.array([0.0, 1.0] * num_d).reshape(dim)
sampling = LHS(xlimits=xlimits)
xx = sampling(N)
y = np.asarray([self.problem(x) for x in xx])
return xx, y
class LHC2(object):
def __init__(self, problem, limits=[0,1]):
self.problem = problem
self.limits = limits
def __call__(self, N):
num_d = self.problem.dim
dim = [num_d, 2]
xlmits = np.array(self.limits * num_d).reshape(dim)
samples=LHS2(num_d,N)
xx=np.zeros(shape=(N,num_d))
#Normierung
for dIdx in range(num_d):
xx[:,dIdx]=(xlmits[dIdx,1]-xlmits[dIdx,0])*samples[:,dIdx]+xlmits[dIdx,0]
#ZF Call
y = np.asarray([self.problem(x) for x in xx])
return xx, y
\ No newline at end of file
import numpy as np
import matplotlib.pyplot as plt
from keras.models import *
from keras.layers import *
from keras.callbacks import EarlyStopping
class KerasANN(object):
def __init__(self,idim,odim,neuronsHLayer=[],hActivation='relu',oActivation='sigmoid',**kwargs):
self.npHlayer=neuronsHLayer
self.hAFunc=hActivation
self.oAFunc=oActivation
self.idim=idim
self.odim=odim
self.kerasModel=None
self._scale=np.zeros(shape=(self.odim))
self._devi=np.zeros(shape=(self.odim))
self._compileNN(**kwargs)
def _compileNN(self,**kwargs):
compileKwargs={'optimizer':'adam',
'loss':'mse'}
for key,value in kwargs.items():
compileKwargs.update({key:value})
self.kerasModel=Sequential()
# Sonderfall: keine hidden layer
if len(self.npHlayer)==0:
self.kerasModel.add(Dense(self.odim,input_dim=self.idim, activation=self.oAFunc))
else:
#1. hidden layer
self.kerasModel.add(Dense(self.npHlayer[0],input_dim=self.idim, activation=self.hAFunc))
# further hidden layer
for nHlayer in self.npHlayer[1:]:
self.kerasModel.add(Dense(nHlayer, activation=self.hAFunc))
# output layer
self.kerasModel.add(Dense(self.odim, activation=self.oAFunc))
self.kerasModel.compile(**compileKwargs)
def trainNN(self,X,Y,epochs=100,batch_size=32,showLossPlot=True,**kwargs):
trainKwargs={'epochs':epochs,
'batch_size':batch_size,
'validation_split':0.2}
for key,value in kwargs.items():
trainKwargs.update({key:value})
#Y-->[0,1]
#odim == 1
for oIdx in range(self.odim):
self._scale[oIdx]=(np.max(Y)-np.min(Y))
self._devi[oIdx]=-((1/self._scale)*np.min(Y))
yUni=(Y/self._scale[0])+self._devi[oIdx]
#CALLBACKS
cbList = [EarlyStopping(monitor='val_loss', mode='min', verbose=1, patience=50)]
trainHistory=self.kerasModel.fit(X, yUni,callbacks=cbList, **trainKwargs)
self.kerasModel.evaluate(X, yUni)
print('Training finish!')
if showLossPlot:
figVal, axVal = plt.subplots(1, 1, figsize=(9, 5))
axVal.plot(trainHistory.history['loss'])
axVal.plot(trainHistory.history['val_loss'])
axVal.set_yscale('log')
axVal.legend(['loss', 'val_loss'])
axVal.set_xlabel('training progress', fontsize=12)
axVal.set_ylabel('loss', fontsize=12)
plt.show()
def __call__(self, x):
yPrUni=self.kerasModel.predict(x)
yPred=(yPrUni-self._devi)*self._scale
if self.odim==1:
return yPred.flatten()
else:
return yPred
from sklearn.gaussian_process import GaussianProcessRegressor
from sklearn.gaussian_process.kernels import (RBF, Matern, RationalQuadratic,
ExpSineSquared, DotProduct,
ConstantKernel)
predefined_sk_learn_kernels = [1.0 * RBF(length_scale=1.0, length_scale_bounds=(1e-1, 10.0)),
1.0 * RationalQuadratic(length_scale=1.0, alpha=0.1),
1.0 * ExpSineSquared(length_scale=1.0, periodicity=3.0,
length_scale_bounds=(0.1, 10.0),
periodicity_bounds=(1.0, 10.0)),
ConstantKernel(0.1, (0.01, 10.0))
* (DotProduct(sigma_0=1.0, sigma_0_bounds=(0.1, 10.0)) ** 2),
1.0 * Matern(length_scale=1.0, length_scale_bounds=(1e-1, 10.0),
nu=1.5)]
class SKLearnGP(object):
def __init__(self, kernel=predefined_sk_learn_kernels[0]):
self.kernel = kernel
self.gp = GaussianProcessRegressor(kernel=kernel)
def learn_from_data(self, X, Y):
self.gp.fit(X, Y)
def learn_from_problem(self, problem):
raise NotImplemented
def __call__(self, x, return_std=False):
return self.gp.predict(x, return_std=return_std)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment