import urllib import os import tempfile import pandas as pd import numpy as np from obspy.core import UTCDateTime, read, Stream ###### Functions and Classes ###### def read_sta_file(filename): pd_sta = pd.read_csv(filename, sep='|', header=0, names=RESIF_sta_names) short_list = list(['NET', 'STA', 'LOCID', 'LAT', 'LON', 'INSTRUM', 'SAMPRATE', 'START', 'END']) values = pd_sta[short_list].values values_unique = np.vstack({tuple(row) for row in values}) nr, nc = values_unique.shape ista = short_list.index('STA') for v in values_unique: v[ista] = v[ista].strip() return values_unique, short_list def create_station_objects(X, names_list): ista = names_list.index('STA') stations = X[:, ista] unique_stations = np.unique(stations) nsta = len(unique_stations) stadict = {} for sta in unique_stations: lines = X[X[:, ista]==sta, :] stadict[sta] = SeismicStation(lines, names_list) return stadict class SeismicStation(object): def __init__(self, sta_lines, sta_col_names): nr, nc = sta_lines.shape ista = sta_col_names.index('STA') ilat = sta_col_names.index('LAT') ilon = sta_col_names.index('LON') istart = sta_col_names.index('START') iend = sta_col_names.index('END') self.sta = np.unique(sta_lines[:, ista])[0] lats = np.empty(nr, dtype=float) lons = np.empty(nr, dtype=float) start_times = np.empty(nr, dtype=object) end_times = np.empty(nr, dtype=object) for i in xrange(nr): line = sta_lines[i] lats[i] = float(line[ilat]) lons[i] = float(line[ilon]) start_times[i] = UTCDateTime(line[istart]) try: end_times[i] = UTCDateTime(line[iend]) except TypeError: # case of missing end time end_times[i] = END_TIME if end_times[i] > END_TIME: end_times[i] = END_TIME self.lat = np.mean(lats) self.lon = np.mean(lons) self.n_periods = len(start_times) time_indexes = np.argsort(start_times) self.start_times = start_times[time_indexes] self.end_times = end_times[time_indexes] def get_ReNaSS_events(self, radius_km): # set up web-request request = 'http://renass.unistra.fr/fdsnws/event/1/query?orderby=time&format=tsv' request = request + '&longitude=%.2f' % self.lon request = request + '&latitude=%.2f' % self.lat request = request + '&starttime=%s' % self.start_times[0].isoformat() request = request + '&endtime=%s' % self.end_times[-1].isoformat() request = request + '&maxradius=%.2f' % (radius_km / 111.25) # retrieve catalog data f_, fname = tempfile.mkstemp() urllib.urlretrieve(request, fname) # read data and remove file pd_ev = pd.read_csv(fname, sep='\t', names=RENASS_evn_names) os.unlink(fname) # get only the info we want to keep short_list = list(['ID', 'OTIME', 'MAG']) values = pd_ev[short_list].values iid = short_list.index('ID') itime = short_list.index('OTIME') imag = short_list.index('MAG') nr, nc = values.shape # clean data ids = values[:, iid] otimes = np.empty(nr, dtype=object) ev_mags = np.empty(nr, dtype=float) for i in xrange(nr): v = values[i] otimes[i] = UTCDateTime(v[itime]) ev_mags[i] = float(v[imag]) self.n_events = len(otimes) time_indexes = np.argsort(otimes) self.ev_ids = ids[time_indexes] self.otimes = otimes[time_indexes] self.ev_mags = ev_mags[time_indexes] os.close(f_) def get_RESIF_data(self, nmax=None): if nmax is not None: n_ev = nmax else: n_ev = self.n_events data = np.empty(n_ev, dtype=object) n_data = 0 # get data one minute before (t1) and 10 minutes after (t2) origin times for otime in self.otimes[0:n_ev]: print otime t1 = otime - 60 t2 = otime + 10*60 request = 'http://ws.resif.fr/fdsnws/dataselect/1/query?station=%s&channel=HN?&starttime=%s&endtime=%s&quality=B' % (self.sta, t1, t2) f_, fname = tempfile.mkstemp() urllib.urlretrieve(request, fname) try: st = read(fname) os.unlink(fname) if len(st) > 0: data[n_data] = st n_data += 1 st.write("data/%s_%s.sac" % (self.sta, otime) ,format="SAC") print 'Waveform data found!' except TypeError: os.unlink(fname) print 'no RESIF data in this time window' continue os.close(f_) data.resize(n_data) self.data = data ###### Main program ###### END_TIME = UTCDateTime(2016, 1, 1) RESIF_station_file = 'RESIF_station_file.txt' # request station metadata at channel level sta_request = 'http://ws.resif.fr/fdsnws/station/1/query?network=FR,RA&channel=HN?&format=text&level=channel' urllib.urlretrieve(sta_request, RESIF_station_file) RESIF_sta_names = list(['NET', 'STA', 'LOCID', 'CHA', 'LAT', 'LON', 'ELEV', 'DEP', 'AZ', 'DIP', 'INSTRUM', 'SCALE', 'SFREQ', 'SUNIT', 'SAMPRATE','START', 'END']) RAP_sta, RAP_col = read_sta_file(RESIF_station_file) # create station objects using class SeismicStation sta_dict = create_station_objects(RAP_sta, RAP_col) RENASS_evn_names = list(['ID', 'OTIME', 'LAT', 'LON', 'MAG', 'MAGTYPE', 'DEPTH', 'DEPTYPE', 'CAT', 'EVAL1', 'NSTA', 'NPHA', 'AZGAP', 'EVAL2', 'TOWN', 'DIST']) event_radius = 50 for sta in sta_dict.values(): # get event metadata sta.get_ReNaSS_events(event_radius) # get event waveform data sta.get_RESIF_data(nmax=None) RESIF_response_file = 'RESIF_response_file.xml' # request station metadata at response level resp_request = 'http://ws.resif.fr/fdsnws/station/1/query?network=FR,RA&channel=HN?&level=response' urllib.urlretrieve(resp_request, RESIF_response_file)