Thanks for that obsession. I was wondering how to start learning about 
plotting in python.  I've not got very far.
What I have done in the attached version is:

   - add option towork with mysql db
   - plot first 3 pulse times.
   - added a background grey bar for 3 min either side of hte expected time.
   - also a wide-scale plot covering them all.
   - as you get closer to the equator, tidal changes dominate the baseline 
   in that timescale - I tried higher order polynomials, but they are next to 
   useless.

This version generates 4 png files.[image: Hunga_Tonga_return.png]
I was even able to see a small dip at the expected time it was making its 
second pass around (as seen in the image). The dip far left is the opposite 
direction pulse.
On Wednesday, 19 January 2022 at 3:40:46 am UTC+10 [email protected] wrote:

> I may be starting to obsess a bit, but give the attached python script a 
> try. You may have to install additional modules, e.g. geopy and sqlite3.
>
> 1. On line 9, specify your location
> 2. On line 14, specify the range of hours you want to examine, around the 
> event.
> 3. On line 25, you may need to adjust the query if your pressure data is 
> not in the barometer column of the archive table
> 3. Move to the directory containing your weewx.sdb database, or on line 
> 22, specify its full path.
> 4. Run the script
>
> The script calculates the distance from the eruption to your location, the 
> travel time to your location at the speed of sound, and then queries and 
> plots, and saves the barometer data for a time range around the estimated 
> arrival time. It also remove any linear trend and plots the difference from 
> that linear trend. 
>
> Follow up with any improvements!
>
> Mine looks like this:
> [image: hunga_tonga.png]
>

-- 
You received this message because you are subscribed to the Google Groups 
"weewx-user" group.
To unsubscribe from this group and stop receiving emails from it, send an email 
to [email protected].
To view this discussion on the web visit 
https://groups.google.com/d/msgid/weewx-user/a6ffa761-afcb-4d12-be4d-776a91d2788fn%40googlegroups.com.
import time
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
from geopy import distance

#DB="sqlite3"
DB="mysql"
if DB == "sqlite3":
    import sqlite3
else:
    import sys
    import mysql.connector
    from mysql.connector import errorcode as msqlerrcode
    db_user="weewx"
    db_pwd="insert-your-password"
    db_name="weewx"

# change as needed
home = (44.8032, -63.62038)

# some barometers only change their reading every nth record in the database
# set oversampling = 1 if the barometer can change every archive record.
# set oversampling = 2 if the barometer updates every 10 minutes but you have a 5 minute sample period
oversampling = 1

# order of polynomial fitted to background
background_order = 2

# you should not need to edit anything below here
# ===============================================

def plot_burst( cursor, pulse_time, span, order ):
    query = "select datetime, barometer from archive where datetime > %.0f and datetime < %.0f order by dateTime;" %\
                    ( pulse_time - span*3600, pulse_time + span*3600)
    cursor.execute(query)

    result = cursor.fetchall()

    xdata = []
    ydata = []
    tdata = []

    rowcount = 0
    for row in result:
        rowcount += 1
        if rowcount % oversampling == 0:
            xdata.append(row[0])
            tdata.append(mdates.datestr2num(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(row[0]))))
            ydata.append(row[1])
       
    peakt = [mdates.datestr2num(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime( pulse_time )))]
    # remove any linear trend
    coeff = np.polyfit(xdata, ydata, background_order )
    trend = np.polyval(coeff, xdata)

    fig, ax = plt.subplots()

    date_formatter = mdates.DateFormatter('%m-%d %H:%M')
    ax.tick_params(axis="x", rotation=90)
    ax.xaxis.set_major_formatter(date_formatter)

    fig.subplots_adjust(bottom=0.2)

    ax2 = ax.twinx()
    ax2.set_axis_off()
    ax2.bar( peakt, [1] , width=0.25/span, color="lightgrey" )  

    ax.plot(tdata, ydata, color="paleturquoise", linewidth=3)

    ax2=ax.twinx()
    ax2.plot(tdata, ydata - trend, marker="+", linewidth=1)

    #plt.show()
    # save the plot as a file
    filename = "./Hunga_Tonga_" + order + ".png"
    fig.savefig( filename, format='png', dpi=100, bbox_inches='tight')

    #plt.show()


def plot_all( cursor, e_time, pulse_time1, pulse_time2, pulse_time3, span ):
    query = "select datetime, barometer from archive where datetime > %.0f and datetime < %.0f order by dateTime;" %\
                    ( e_time - span*3600, pulse_time3 + span*3600)
    cursor.execute(query)

    result = cursor.fetchall()

    xdata = []
    ydata = []
    tdata = []

    rowcount = 0
    for row in result:
        rowcount += 1
        if rowcount % oversampling == 0:
            xdata.append(row[0])
            tdata.append(mdates.datestr2num(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(row[0]))))
            ydata.append(row[1])
       
    peakt = [
            mdates.datestr2num(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime( pulse_time1 ))),
            mdates.datestr2num(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime( pulse_time2 ))),
            mdates.datestr2num(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime( pulse_time3 )))
            ]
    # remove any linear trend
    coeff = np.polyfit(xdata, ydata, 5 )
    trend = np.polyval(coeff, xdata)

    fig, ax = plt.subplots()

    date_formatter = mdates.DateFormatter('%m-%d %H:%M')
    ax.tick_params(axis="x", rotation=90)
    ax.xaxis.set_major_formatter(date_formatter)

    fig.subplots_adjust(bottom=0.2)

    ax2 = ax.twinx()
    ax2.set_axis_off()
    ax2.bar( peakt, [1,1,1] , width=0.25/span, color="lightgrey" )  

    ax.plot(tdata, ydata, color="paleturquoise", linewidth=3)

    ax2=ax.twinx()
    ax2.plot(tdata, ydata - trend, marker="+", linewidth=1)

    #plt.show()
    # save the plot as a file
    filename = "./Hunga_Tonga_" + "all" + ".png"
    fig.savefig( filename, format='png', dpi=100, bbox_inches='tight')

    #plt.show()



hunga_tonga = (-20.5452074472518, -175.38715105641674)
#speed_of_sound = 0.32 # km/s
speed_of_sound = 0.315 # km/s - derived value from fitting first and 2nd pulses to several points across Australia.
eruption_time = 1642220085 # 2022-01-15 04:14:45 UTC
earth_circumference=40040     # near enough
hour_span = 6

distance = distance.distance(hunga_tonga, home).km
travel_time = distance / speed_of_sound
arrival_time = eruption_time + travel_time
once_around = earth_circumference  / speed_of_sound
opposite_wave_time = eruption_time + once_around - travel_time
return_wave_time = eruption_time + once_around + travel_time

print("distance to eruption %0.1f km arrival at %.0f (%s)" %
            (distance, arrival_time, time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(arrival_time))))
print("      opposite pulse arrival at %.0f (%s)" %
            ( opposite_wave_time , time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(opposite_wave_time ))))
print("      second time around pulse arrival at %.0f (%s)" %
            ( return_wave_time , time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(return_wave_time ))))
      
if DB == "sqlite3":
    connection = sqlite3.connect('weewx.sdb')
else:
    try:
        connection = mysql.connector.connect( user=db_user, password=db_pwd,
                host='127.0.0.1', database=db_name)

    except mysql.connector.Error as err:
      if err.errno == msqlerrcode.ER_ACCESS_DENIED_ERROR:
        print("Bad user name or password")
      elif err.errno == msqlerrcode.ER_BAD_DB_ERROR:
        print("Database does not exist")
      else:
        print(err)
      sys.exit( 2) 
 



cursor = connection.cursor()

plot_burst( cursor, arrival_time, hour_span, "primary" )
plot_burst( cursor, opposite_wave_time, hour_span, "secondary" )
plot_burst( cursor, return_wave_time, hour_span, "return" )

plot_all( cursor, eruption_time, arrival_time, opposite_wave_time, return_wave_time, hour_span )

connection.close()


Reply via email to