Implementation— Time series Models Case study

Mangesh kendre
6 min readJan 22, 2024

--

Case Study 1: Stock Price Prediction

Objective: Predict stock prices for informed investment decisions.

Dataset: Historical stock price data for a publicly traded company.

Code Example (using ARIMA):

import pandas as pd
from statsmodels.tsa.arima.model import ARIMA
import matplotlib.pyplot as plt
# Load and preprocess data
stock_data = pd.read_csv('stock_prices.csv')
stock_data['Date'] = pd.to_datetime(stock_data['Date'])
stock_data.set_index('Date', inplace=True)
# Fit ARIMA model
model_arima = ARIMA(stock_data['Close'], order=(5,1,2))
results_arima = model_arima.fit()
# Forecast future values
forecast_steps = 365
forecast_arima = results_arima.get_forecast(steps=forecast_steps)
# Plot results
plt.plot(stock_data.index, stock_data['Close'], label='Actual')
plt.plot(forecast_arima.index, forecast_arima.predicted_mean, label='ARIMA Forecast')
plt.legend()
plt.show()

Code Example (using LSTM):

import pandas as pd
from sklearn.preprocessing import MinMaxScaler
from keras.models import Sequential
from keras.layers import LSTM, Dense
import matplotlib.pyplot as plt
# Load and preprocess data
stock_data = pd.read_csv('stock_prices.csv')
stock_data['Date'] = pd.to_datetime(stock_data['Date'])
stock_data.set_index('Date', inplace=True)
# Normalize data
scaler = MinMaxScaler()
scaled_data = scaler.fit_transform(stock_data['Close'].values.reshape(-1, 1))
# Prepare data for LSTM
train_size = int(len(scaled_data) * 0.80)
train_data, test_data = scaled_data[0:train_size], scaled_data[train_size:]
def create_dataset(dataset, time_steps=1):
data_x, data_y = [], []
for i in range(len(dataset)-time_steps):
a = dataset[i:(i+time_steps), 0]
data_x.append(a)
data_y.append(dataset[i + time_steps, 0])
return np.array(data_x), np.array(data_y)
time_steps = 30
X_train, y_train = create_dataset(train_data, time_steps)
X_test, y_test = create_dataset(test_data, time_steps)
# Reshape input to be [samples, time steps, features]
X_train = X_train.reshape(X_train.shape[0], X_train.shape[1], 1)
X_test = X_test.reshape(X_test.shape[0], X_test.shape[1], 1)
# Build LSTM model
model_lstm = Sequential()
model_lstm.add(LSTM(units=50, return_sequences=True, input_shape=(X_train.shape[1], 1)))
model_lstm.add(LSTM(units=50))
model_lstm.add(Dense(1))
model_lstm.compile(optimizer='adam', loss='mean_squared_error')
model_lstm.fit(X_train, y_train, epochs=1, batch_size=1, verbose=2)
# Make predictions
train_predict = model_lstm.predict(X_train)
test_predict = model_lstm.predict(X_test)
# Inverse transform predictions
train_predict = scaler.inverse_transform(train_predict)
test_predict = scaler.inverse_transform(test_predict)
# Plot results
plt.plot(stock_data.index[time_steps:train_size], stock_data['Close'].values[time_steps:train_size], label='Train Data')
plt.plot(stock_data.index[train_size+time_steps:], stock_data['Close'].values[train_size+time_steps:], label='Actual Test Data')
plt.plot(stock_data.index[time_steps:train_size], train_predict, label='LSTM Train Prediction')
plt.plot(stock_data.index[train_size+time_steps:], test_predict, label='LSTM Test Prediction')
plt.legend()
plt.show()

Case Study 2: Website Traffic Prediction

Objective: Forecast website traffic for capacity planning.

Dataset: Hourly website traffic data.

Code Example (using Exponential Smoothing):

import pandas as pd
from statsmodels.tsa.holtwinters import ExponentialSmoothing
import matplotlib.pyplot as plt
# Load and preprocess data
traffic_data = pd.read_csv('website_traffic.csv')
traffic_data['Datetime'] = pd.to_datetime(traffic_data['Datetime'])
traffic_data.set_index('Datetime', inplace=True)
# Fit Exponential Smoothing model
model_exp_smoothing = ExponentialSmoothing(traffic_data['Traffic'], trend='add', seasonal='add', seasonal_periods=24)
results_exp_smoothing = model_exp_smoothing.fit()
# Forecast future values
forecast_steps = 24
forecast_exp_smoothing = results_exp_smoothing.forecast(steps=forecast_steps)
# Plot results
plt.plot(traffic_data.index, traffic_data['Traffic'], label='Actual')
plt.plot(pd.date_range(traffic_data.index[-1], periods=forecast_steps, freq='H')[1:], forecast_exp_smoothing, label='Exponential Smoothing Forecast')
plt.legend()
plt.show()

Code Example (using Facebook Prophet):

import pandas as pd
from fbprophet import Prophet
import matplotlib.pyplot as plt
# Load and preprocess data
traffic_data = pd.read_csv('website_traffic.csv')
traffic_data['Datetime'] = pd.to_datetime(traffic_data['Datetime'])
traffic_data.rename(columns={'Datetime': 'ds', 'Traffic': 'y'}, inplace=True)
# Create and fit Prophet model
model_prophet = Prophet()
model_prophet.fit(traffic_data)
# Forecast future values
future = model_prophet.make_future_dataframe(periods=24, freq='H')
forecast_prophet = model_prophet.predict(future)
# Plot results
fig = model_prophet.plot(forecast_prophet)
plt.show()

These examples showcase the application of ARIMA, LSTM (Deep Learning), Exponential Smoothing, and Facebook Prophet for time series forecasting in diverse scenarios. Adjust the models and parameters based on specific data characteristics and requirements.

Case Study 3: Energy Consumption Forecasting

Objective: Forecast energy consumption for better resource planning.

Dataset: Hourly energy consumption data.

Code Example (using Seasonal ARIMA):

import pandas as pd
from statsmodels.tsa.statespace.sarimax import SARIMAX
import matplotlib.pyplot as plt
# Load and preprocess data
energy_data = pd.read_csv('energy_consumption.csv')
energy_data['Datetime'] = pd.to_datetime(energy_data['Datetime'])
energy_data.set_index('Datetime', inplace=True)
# Fit Seasonal ARIMA model
model_sarima = SARIMAX(energy_data['Consumption'], order=(1, 1, 1), seasonal_order=(1, 1, 1, 24))
results_sarima = model_sarima.fit()
# Forecast future values
forecast_steps = 24
forecast_sarima = results_sarima.get_forecast(steps=forecast_steps)
# Plot results
plt.plot(energy_data.index, energy_data['Consumption'], label='Actual')
plt.plot(forecast_sarima.index, forecast_sarima.predicted_mean, label='SARIMA Forecast')
plt.legend()
plt.show()

Code Example (using Long Short-Term Memory (LSTM)):

import pandas as pd
from sklearn.preprocessing import MinMaxScaler
from keras.models import Sequential
from keras.layers import LSTM, Dense
import matplotlib.pyplot as plt
# Load and preprocess data
energy_data = pd.read_csv('energy_consumption.csv')
energy_data['Datetime'] = pd.to_datetime(energy_data['Datetime'])
energy_data.set_index('Datetime', inplace=True)
# Normalize data
scaler = MinMaxScaler()
scaled_data = scaler.fit_transform(energy_data['Consumption'].values.reshape(-1, 1))
# Prepare data for LSTM
train_size = int(len(scaled_data) * 0.80)
train_data, test_data = scaled_data[0:train_size], scaled_data[train_size:]
time_steps = 24
X_train, y_train = create_dataset(train_data, time_steps)
X_test, y_test = create_dataset(test_data, time_steps)
# Reshape input to be [samples, time steps, features]
X_train = X_train.reshape(X_train.shape[0], X_train.shape[1], 1)
X_test = X_test.reshape(X_test.shape[0], X_test.shape[1], 1)
# Build LSTM model
model_lstm = Sequential()
model_lstm.add(LSTM(units=50, return_sequences=True, input_shape=(X_train.shape[1], 1)))
model_lstm.add(LSTM(units=50))
model_lstm.add(Dense(1))
model_lstm.compile(optimizer='adam', loss='mean_squared_error')
model_lstm.fit(X_train, y_train, epochs=1, batch_size=1, verbose=2)
# Make predictions
train_predict = model_lstm.predict(X_train)
test_predict = model_lstm.predict(X_test)
# Inverse transform predictions
train_predict = scaler.inverse_transform(train_predict)
test_predict = scaler.inverse_transform(test_predict)
# Plot results
plt.plot(energy_data.index[time_steps:train_size], energy_data['Consumption'].values[time_steps:train_size], label='Train Data')
plt.plot(energy_data.index[train_size+time_steps:], energy_data['Consumption'].values[train_size+time_steps:], label='Actual Test Data')
plt.plot(energy_data.index[time_steps:train_size], train_predict, label='LSTM Train Prediction')
plt.plot(energy_data.index[train_size+time_steps:], test_predict, label='LSTM Test Prediction')
plt.legend()
plt.show()

Case Study 4: Demand Forecasting for Retail

Objective: Forecast product demand for inventory management.

Dataset: Daily sales data for various products.

Code Example (using Prophet):

import pandas as pd
from fbprophet import Prophet
import matplotlib.pyplot as plt
# Load and preprocess data
sales_data = pd.read_csv('product_sales.csv')
sales_data['Date'] = pd.to_datetime(sales_data['Date'])
sales_data.rename(columns={'Date': 'ds', 'Sales': 'y'}, inplace=True)
# Create and fit Prophet model
model_prophet = Prophet()
model_prophet.fit(sales_data)
# Forecast future values
future = model_prophet.make_future_dataframe(periods=30, freq='D')
forecast_prophet = model_prophet.predict(future)
# Plot results
fig = model_prophet.plot(forecast_prophet)
plt.show()

These examples demonstrate how to use Seasonal ARIMA, LSTM, and Facebook Prophet for energy consumption and demand forecasting. Adjust the models and parameters based on the specific characteristics of your data.

Case Study 5: Stock Price Prediction

Objective: Predict future stock prices for investment decisions.

Dataset: Historical daily stock price data.

Code Example (using Long Short-Term Memory (LSTM)):

import pandas as pd
from sklearn.preprocessing import MinMaxScaler
from keras.models import Sequential
from keras.layers import LSTM, Dense
import matplotlib.pyplot as plt
# Load and preprocess data
stock_data = pd.read_csv('stock_prices.csv')
stock_data['Date'] = pd.to_datetime(stock_data['Date'])
stock_data.set_index('Date', inplace=True)
# Normalize data
scaler = MinMaxScaler()
scaled_data = scaler.fit_transform(stock_data['Close'].values.reshape(-1, 1))
# Prepare data for LSTM
train_size = int(len(scaled_data) * 0.80)
train_data, test_data = scaled_data[0:train_size], scaled_data[train_size:]
time_steps = 30
X_train, y_train = create_dataset(train_data, time_steps)
X_test, y_test = create_dataset(test_data, time_steps)
# Reshape input to be [samples, time steps, features]
X_train = X_train.reshape(X_train.shape[0], X_train.shape[1], 1)
X_test = X_test.reshape(X_test.shape[0], X_test.shape[1], 1)
# Build LSTM model
model_lstm = Sequential()
model_lstm.add(LSTM(units=50, return_sequences=True, input_shape=(X_train.shape[1], 1)))
model_lstm.add(LSTM(units=50))
model_lstm.add(Dense(1))
model_lstm.compile(optimizer='adam', loss='mean_squared_error')
model_lstm.fit(X_train, y_train, epochs=1, batch_size=1, verbose=2)
# Make predictions
train_predict = model_lstm.predict(X_train)
test_predict = model_lstm.predict(X_test)
# Inverse transform predictions
train_predict = scaler.inverse_transform(train_predict)
test_predict = scaler.inverse_transform(test_predict)
# Plot results
plt.plot(stock_data.index[time_steps:train_size], stock_data['Close'].values[time_steps:train_size], label='Train Data')
plt.plot(stock_data.index[train_size+time_steps:], stock_data['Close'].values[train_size+time_steps:], label='Actual Test Data')
plt.plot(stock_data.index[time_steps:train_size], train_predict, label='LSTM Train Prediction')
plt.plot(stock_data.index[train_size+time_steps:], test_predict, label='LSTM Test Prediction')
plt.legend()
plt.show()

Case Study 6: Website Traffic Prediction

Objective: Forecast website traffic to optimize server resources.

Dataset: Hourly website traffic data.

Code Example (using SARIMA):

import pandas as pd
from statsmodels.tsa.statespace.sarimax import SARIMAX
import matplotlib.pyplot as plt
# Load and preprocess data
traffic_data = pd.read_csv('website_traffic.csv')
traffic_data['Datetime'] = pd.to_datetime(traffic_data['Datetime'])
traffic_data.set_index('Datetime', inplace=True)
# Fit SARIMA model
model_sarima = SARIMAX(traffic_data['Visitors'], order=(1, 1, 1), seasonal_order=(1, 1, 1, 24))
results_sarima = model_sarima.fit()
# Forecast future values
forecast_steps = 24
forecast_sarima = results_sarima.get_forecast(steps=forecast_steps)
# Plot results
plt.plot(traffic_data.index, traffic_data['Visitors'], label='Actual')
plt.plot(forecast_sarima.index, forecast_sarima.predicted_mean, label='SARIMA Forecast')
plt.legend()
plt.show()

Case Study 7: Call Volume Prediction for Customer Service

Objective: Predict call volumes to optimize customer service staffing.

Dataset: Daily call volume data.

Code Example (using Seasonal ARIMA):

import pandas as pd
from statsmodels.tsa.statespace.sarimax import SARIMAX
import matplotlib.pyplot as plt
# Load and preprocess data
call_data = pd.read_csv('call_volume.csv')
call_data['Date'] = pd.to_datetime(call_data['Date'])
call_data.set_index('Date', inplace=True)
# Fit Seasonal ARIMA model
model_sarima = SARIMAX(call_data['Volume'], order=(1, 1, 1), seasonal_order=(1, 1, 1, 7))
results_sarima = model_sarima.fit()
# Forecast future values
forecast_steps = 7
forecast_sarima = results_sarima.get_forecast(steps=forecast_steps)
# Plot results
plt.plot(call_data.index, call_data['Volume'], label='Actual')
plt.plot(forecast_sarima.index, forecast_sarima.predicted_mean, label='SARIMA Forecast')
plt.legend()
plt.show()

These examples cover stock price prediction, website traffic forecasting, and call volume prediction using LSTM and SARIMA models. Adjust the parameters based on your dataset characteristics. Let me know if you want more case studies or specific models!

Case Study 8: Energy Consumption Forecasting

Objective: Predict future energy consumption for efficient resource planning.

Dataset: Hourly energy consumption data.

Code Example (using Prophet):

import pandas as pd
from fbprophet import Prophet
import matplotlib.pyplot as plt
# Load and preprocess data
energy_data = pd.read_csv('energy_consumption.csv')
energy_data['Datetime'] = pd.to_datetime(energy_data['Datetime'])
energy_data.rename(columns={'Datetime': 'ds', 'Consumption': 'y'}, inplace=True)
# Fit Prophet model
model_prophet = Prophet(yearly_seasonality=True)
model_prophet.fit(energy_data)
# Create future dataframe for prediction
future_prophet = model_prophet.make_future_dataframe(periods=365) # Forecasting for one year ahead
# Forecast future values
forecast_prophet = model_prophet.predict(future_prophet)
# Plot results
fig = model_prophet.plot(forecast_prophet)
plt.show()

Case Study 9: Monthly Sales Prediction

Objective: Predict monthly sales for inventory management.

Dataset: Monthly sales data.

Code Example (using Exponential Smoothing):

import pandas as pd
from statsmodels.tsa.holtwinters import ExponentialSmoothing
import matplotlib.pyplot as plt
# Load and preprocess data
sales_data = pd.read_csv('monthly_sales.csv')
sales_data['Month'] = pd.to_datetime(sales_data['Month'])
sales_data.set_index('Month', inplace=True)
# Fit Exponential Smoothing model
model_ets = ExponentialSmoothing(sales_data['Sales'], seasonal='add', seasonal_periods=12)
results_ets = model_ets.fit()
# Forecast future values
forecast_ets = results_ets.forecast(steps=12)
# Plot results
plt.plot(sales_data.index, sales_data['Sales'], label='Actual')
plt.plot(forecast_ets.index, forecast_ets, label='ETS Forecast')
plt.legend()
plt.show()

Case Study 10: Web Page Traffic Prediction

Objective: Forecast web page traffic for server optimization.

Dataset: Daily web page traffic data.

Code Example (using Long Short-Term Memory (LSTM)):

import pandas as pd
from sklearn.preprocessing import MinMaxScaler
from keras.models import Sequential
from keras.layers import LSTM, Dense
import matplotlib.pyplot as plt
# Load and preprocess data
web_traffic_data = pd.read_csv('web_traffic.csv')
web_traffic_data['Date'] = pd.to_datetime(web_traffic_data['Date'])
web_traffic_data.set_index('Date', inplace=True)
# Normalize data
scaler = MinMaxScaler()
scaled_data = scaler.fit_transform(web_traffic_data['Visitors'].values.reshape(-1, 1))
# Prepare data for LSTM
train_size = int(len(scaled_data) * 0.80)
train_data, test_data = scaled_data[0:train_size], scaled_data[train_size:]
time_steps = 30
X_train, y_train = create_dataset(train_data, time_steps)
X_test, y_test = create_dataset(test_data, time_steps)
# Reshape input to be [samples, time steps, features]
X_train = X_train.reshape(X_train.shape[0], X_train.shape[1], 1)
X_test = X_test.reshape(X_test.shape[0], X_test.shape[1], 1)
# Build LSTM model
model_lstm = Sequential()
model_lstm.add(LSTM(units=50, return_sequences=True, input_shape=(X_train.shape[1], 1)))
model_lstm.add(LSTM(units=50))
model_lstm.add(Dense(1))
model_lstm.compile(optimizer='adam', loss='mean_squared_error')
model_lstm.fit(X_train, y_train, epochs=1, batch_size=1, verbose=2)
# Make predictions
train_predict = model_lstm.predict(X_train)
test_predict = model_lstm.predict(X_test)
# Inverse transform predictions
train_predict = scaler.inverse_transform(train_predict)
test_predict = scaler.inverse_transform(test_predict)
# Plot results
plt.plot(web_traffic_data.index[time_steps:train_size], web_traffic_data['Visitors'].values[time_steps:train_size], label='Train Data')
plt.plot(web_traffic_data.index[train_size+time_steps:], web_traffic_data['Visitors'].values[train_size+time_steps:], label='Actual Test Data')
plt.plot(web_traffic_data.index[time_steps:train_size], train_predict, label='LSTM Train Prediction')
plt.plot(web_traffic_data.index[train_size+time_steps:], test_predict, label='LSTM Test Prediction')
plt.legend()
plt.show()

--

--

No responses yet