This is the code I used to create one of my images in the results page. Other images there were created with variations on this code (namely, the plotting sections and machine specific constants)
# erebus.py
# Author: Natasha Graham
# Low-Pass Filter Routine
# Adapted from Lyons et al 2012
# online supplement 2
# For Python 3
# Tilt extraction methods
# adapted from Lyons et al 2012
import numpy as np
from scipy.integrate import cumtrapz
from scipy.signal import butter, buttord, filtfilt, lfilter, decimate
from scipy import constants
import matplotlib.pyplot as plt
from obspy import read
import sys
import os
# Taken directly from Lyons 2012
# Adapted from the original MATLAB to Python 3
# Performs lowpass filter on data
def lpfl(x, fs, fl, fh, np):
# Definition of variables
# x : Data vector
# fs : sampling frequency (Hz)
# fl : cut frequency
# fh : stop frequency
# rp : pass (3dB)
# rs : stop (50dB)
# np : order of the Butter filter
wp = 2 * fl / fs # pass band corner
ws = 2 * fh / fs # stop band corner
rp = 3
rs = 50
n, wn = buttord(wp, ws, rp, rs)
b, a = butter(n, wn)
# single-pass, causal filter
if np == 1:
return lfilter(b, a, x)
# double-pass, non-causal filter
else:
return filtfilt(b, a, x)
# params:
# data: array
# returns: array
# Subtracts the mean from the given array and returns it
def remove_mean(data):
return data - data.mean()
# params:
# data: array of data
# timescale: array, elements are units of time
# returns: array
# Returns an array that is a cumulative integration of data over the time given
def integrate_data(data, timescale):
return cumtrapz(x=timescale, y=data)
# params:
# f: directory name
# returns: array, integer
# goes through filtering and tilt extraction process for the data
# Takes a folder name and goes through the individual data files
# to then stack the resulting arrays for an average tilt over all the data
def data_filter(f):
# S : seismometer sensitivity
# w0 : natural angular frequency
# g : gravitational acceleration
# for cmg-40t, should be adjusted for specific machine
S = 800
f0 = 0.033 # natural frequency of machine
fs = 4 # sampling frequency
fl = 1/30 # cut frequency
fh = 1/10 # stop frequency
# intialize a final array that will be returned
final = np.zeros(110*60*4)
# Get list of files in the directory given
files = os.listdir(os.fsencode(f))
w0 = f0 # * 2 * np.pi # For some reason, it *appears* to work better
# as just f0, and I'm unsure of exactly why - probably
# a filtering issue
# Not all files work with what I have at the moment
# need to take into account the amount of files that don't "work"
no = 0
# Iterate through each file in the directory
for fi in files:
d = read(f + "/" + os.fsdecode(fi))
# Remove the mean, create an array for the seconds
data_1 = remove_mean(d[0].data)
time = np.array([((1/40) * x) / 60 for x in range((110*60*40 + 1))])
# Tries to integrate the data
# In the event that it throws an exception (usually differently sized arrays)
# it adds one to the count of "failed" files, and moves to the next file
try:
fullsamplerate_data = integrate_data(data_1, time)
except Exception:
no += 1
continue
# Downsample the integrated data, with a FIR filter
# Downsample the time vector
downsampled_data = decimate(fullsamplerate_data, 10, ftype='fir')
downsample_time = decimate(time, 10)
# Lowpass filter the downsampled data
lowpass_filtered_data = lpfl(downsampled_data, fs, fl, fh, 1)
# Multiplies the filtered data by the constants and
# tries to add the latest tilt signal to the final array
# Most exceptions should have been caught above already, but just in case
# we have another try-except block here with similar results
try:
final = np.add(final,(-1 * S * np.power(w0,2) / constants.g) * lowpass_filtered_data)
except Exception:
no +=1
pass
# Return the stacked tilt, number of events that went into the stack
return final / (len(files) - no), len(files) - no
# params:
# argv: array of command line arguments
# returns: void
def main(argv):
# Get list of directories in the directory given at command line
file_list = os.listdir(os.fsencode(argv[0]))
num_of_files = len(file_list)
# Create a time vector for plotting
time = np.array([((10/40) * x) / 60 for x in range((110*60*4))])
results_dict = {} # a dictionary for the resulting stacks
# Iterate through the diretories listed above
for f in file_list:
# For each directory in this directory, run them through the data_filter
for l in os.listdir(os.fsencode(argv[0]) + os.fsencode("/") + f):
total_data, events = data_filter(argv[0] + "/" + os.fsdecode(f) + "/" + os.fsdecode(l))
# Get station and channel names from the directory names
sta = os.fsdecode(f)
cha = os.fsdecode(l)
# add the resulting stack to the dictionary
if sta in results_dict.keys():
results_dict[sta][cha] = [total_data, events]
pass
else:
results_dict[sta] = {cha: [total_data, events]}
# Number of stations - maybe see about automating figure creation
num = len(results_dict.keys())
# Plot the tilt signals and save as an image
ax1 = plt.subplot(num, 1, 1)
plt.xlim(0,110)
plt.xticks(np.arange(0,111,10))
plt.ylabel("Tilt (nrad)")
ax1.annotate("NKB Dec 2013" + ", " + str(results_dict["NKB-dec-2013"]["BH1"][1]) + " event(s)", xy=(0,1), xycoords="axes fraction")
plt.plot(time, results_dict["NKB-dec-2013"]["BH1"][0], "--k", label="East")
plt.plot(time, results_dict["NKB-dec-2013"]["BH2"][0], "-r", label="North")
plt.axvspan(25, 35, color="grey")
plt.legend(bbox_to_anchor=(1.1,1.05))
ax2 = plt.subplot(num, 1, 2)
plt.xlim(0,110)
plt.xticks(np.arange(0,111,10))
plt.ylabel("Tilt (nrad)")
ax2.annotate("NKB Jan 2016" + ", " + str(results_dict["NKB-2016-jan"]["BH1"][1]) + " event(s)", xy=(0,1), xycoords="axes fraction")
plt.plot(time, results_dict["NKB-2016-jan"]["BH1"][0], "--k", label="East")
plt.plot(time, results_dict["NKB-2016-jan"]["BH2"][0], "-r", label="North")
plt.axvspan(25, 35, color="grey")
plt.xlabel("Time (min)")
plt.savefig("results/figure.png")
if __name__ == "__main__":
main(sys.argv[1:])