# -*- coding: utf-8 -*- """deep_option_pricing_to_implement.ipynb # **Deep network to fit option prices** We introduce a deep network to fit options prices under Black-Scholes model. ## Generate the dataset The dataset contains a list (pandas dataframe or numpy array) with columns: stock price,strike, time to maturity, dividend rate, interest rate, volatility. Take variables: - stock price uniform [-50,150] or log-normal with mean 100 and volatility enough to have prices in range +-20% of 100 - strike the same - time to maturity uniform : from 5/255 (one week) to 2 (in years) - dividend rate : uniform 0 to 10% - interest rate : uniform 0 to 10% - volatility : uniform 0.1% to 50% ``` # Ce texte est au format code ``` """ # generate dataset of input variables : replace None with your code import pandas as pd df = pd.DataFrame(None) df.head(50) """### Recall Black-Scholes call option formula Load the function and make a new column in the dataset called 'call_price'. Backup the dataset with pickle. """ df['call_price'] = None df.head(50) df.to_pickle("option_prices.pkl") df_copy=df.copy() """### Normalize data It is customary to express data in moneyness i.e. divide stock price and call pric by the strike (this is equivalent to changing the numeraire). """ ## Normalize the data exploiting the fact that the BS Model is linear homogenous in S,K df_copy["S"] = df_copy["S"]/df_copy["K"] df_copy["C"] = df_copy["C"]/df_copy["K"] df_copy.head() df_copy.drop(columns = ['K'], inplace = True) df_copy.head(50) """### Construct the dataset""" X=df_copy[['S','T','q','r']] X.info() y = df_copy['C'] #target variable # split a dataset into train and test sets from sklearn.datasets import make_blobs from sklearn.model_selection import train_test_split X_train, X_test, y_train, y_test = train_test_split(X, y, train_size=0.8, random_state = 1) X_train,y_train,X_test,y_test print(X_train.shape, X_test.shape, y_train.shape, y_test.shape) ### Start the deep learning part from keras.models import Sequential from keras.layers import Dense, Dropout, Activation, LeakyReLU from keras import backend def custom_activation(x): return backend.exp(x) """## Define the network Use fully connected nodes with ReLU activation. For the loss function use 'mse'; use optimizer 'Adam' or 'RMSprop'. Also define a custom metric to output the relative error. """ model = None # Define a custom metric function to compute the relative error def relative_error(y_true, y_pred): return tf.reduce_mean(tf.abs((y_true - y_pred) / tf.maximum(tf.abs(y_true), tf.abs(y_pred)))) model.compile(loss='mse',optimizer='rmsprop', metrics=[relative_error]) model.summary() """## Train the model""" model.fit(X_train, y_train, batch_size=64, epochs=20, validation_split=0.1, verbose=2) """## Plot the results""" # Plot the training history including the custom metric (relative error) import matplotlib.pyplot as plt plt.plot(history.history['loss'], label='train_loss') plt.plot(history.history['val_loss'], label='val_loss') plt.plot(history.history['relative_error'], label='train_relative_error') plt.plot(history.history['val_relative_error'], label='val_relative_error') plt.xlabel('Epoch') plt.ylabel('Loss/Relative Error') plt.legend() plt.show()