I am currently writing my masters thesis about the connection between a generalization of the reidemeister-torsion (called \(\ell^2\)-alexander torsion) and the thurston norm of 3-manifolds.
import osfrom datetime import date, datetimeimport gitREPO_URL ="https://github.com/quoteme/masterarbeit.git"REPO_DIR ="/tmp/quoteme-masterarbeit"CUTOFF_DATE = date(2025, 10, 8)repo : git.Repo = ( git.Repo(REPO_DIR)if os.path.exists(REPO_DIR)else git.Repo.clone_from(REPO_URL, REPO_DIR, bare=True))repo.remotes.origin.fetch("+refs/heads/*:refs/heads/*")commits = [ commitfor commit in repo.iter_commits('--all')if commit.committed_datetime.date() >= CUTOFF_DATE]ifnot commits:raiseValueError(f"No commits found on or after {CUTOFF_DATE.isoformat()}")# summary of the total number of commitsprint(f"Total commits: {len(commits)}")print(f"First commit: {datetime.fromtimestamp(commits[-1].committed_date)}")print(f"Last commit: {datetime.fromtimestamp(commits[0].committed_date)}")print(f"Avg. time between commits: {(commits[0].committed_date - commits[-1].committed_date) /len(commits) /3600:.2f} hours")
Total commits: 187
First commit: 2025-10-08 10:56:26
Last commit: 2026-03-17 22:45:47
Avg. time between commits: 20.60 hours
Code
import plotly.graph_objects as gocommits_sorted =sorted(commits, key=lambda c: c.committed_datetime)commit_dates = [c.committed_datetime for c in commits_sorted]commit_indices =list(range(1, len(commits_sorted) +1))fig = go.Figure( data=[ go.Scatter( x=commit_dates, y=commit_indices, mode="markers", marker=dict(size=7, color="#4C78A8", opacity=0.85), hovertemplate="Date: %{x|%Y-%m-%d %H:%M:%S}<br>Commit #: %{y}<extra></extra>", showlegend=False, ) ])fig.update_layout( title=f"Commits since {CUTOFF_DATE.isoformat()} · quoteme/masterarbeit", xaxis_title="Date", yaxis_title="Commit number", template="plotly_white", height=450,)fig.show()
Code
from collections import Counterfrom datetime import timedeltaimport numpy as npimport plotly.graph_objects as goforecast_end_date = date(2026, 4, 6)daily_counter = Counter(c.committed_datetime.date() for c in commits_sorted)observed_end_date =max(daily_counter.keys())observed_dates = [ CUTOFF_DATE + timedelta(days=i)for i inrange((observed_end_date - CUTOFF_DATE).days +1)]observed_daily = np.array([daily_counter.get(d, 0) for d in observed_dates], dtype=float)observed_cumulative = np.cumsum(observed_daily)t_obs = np.arange(len(observed_dates), dtype=float)slope, intercept = np.polyfit(t_obs, observed_cumulative, deg=1)all_dates = [ CUTOFF_DATE + timedelta(days=i)for i inrange((forecast_end_date - CUTOFF_DATE).days +1)]t_all = np.arange(len(all_dates), dtype=float)linear_cumulative = np.clip(slope * t_all + intercept, a_min=0.0, a_max=None)# Plot 1: linear model forecastlinear_fig = go.Figure()linear_fig.add_trace( go.Scatter( x=observed_dates, y=observed_cumulative, mode="lines+markers", name="Observed cumulative", marker=dict(size=5), line=dict(width=2), ))linear_fig.add_trace( go.Scatter( x=all_dates, y=linear_cumulative, mode="lines", name="Linear regression", line=dict(width=2, dash="dash"), ))linear_fig.add_vline( x=observed_end_date, line_width=1, line_dash="dash", line_color="gray",)linear_fig.update_layout( title=f"Linear model extrapolation until {forecast_end_date.isoformat()}", xaxis_title="Date", yaxis_title="Cumulative commits since cutoff", template="plotly_white", height=460,)linear_fig.show()expected_linear =float(linear_cumulative[-1])print(f"Expected cumulative commits by {forecast_end_date.isoformat()} (linear): {expected_linear:.1f}")
Expected cumulative commits by 2026-04-06 (linear): 173.8
Expected cumulative commits by 2026-04-06 (tda/ripser-model): 240.5
Code
import numpy as npimport plotly.graph_objects as go# Fit y(x) = p6(x) + a*sin(x) + b*sin(x)^2 on observed cumulative commits.x_obs = np.arange(len(observed_dates), dtype=float)y_obs = observed_cumulative.astype(float)# Normalize x to improve numerical conditioning for degree-6 polynomial.x_mean =float(x_obs.mean())x_std =float(x_obs.std()) iffloat(x_obs.std()) >0else1.0z_obs = (x_obs - x_mean) / x_stdpoly_design = np.column_stack([z_obs ** k for k inrange(7)])trig_design = np.column_stack([np.sin(z_obs), np.sin(z_obs) **2])design_obs = np.column_stack([poly_design, trig_design])coeffs, *_ = np.linalg.lstsq(design_obs, y_obs, rcond=None)y_fit_obs = design_obs @ coeffsx_all = np.arange(len(all_dates), dtype=float)z_all = (x_all - x_mean) / x_stddesign_all = np.column_stack( [np.column_stack([z_all ** k for k inrange(7)]), np.sin(z_all), np.sin(z_all) **2])y_poly_trig_all = np.clip(design_all @ coeffs, a_min=0.0, a_max=None)expected_poly_trig =float(y_poly_trig_all[-1])print(f"Expected cumulative commits by {forecast_end_date.isoformat()} "f"(poly6 + sin + sin^2): {expected_poly_trig:.1f}")poly_trig_fig = go.Figure()poly_trig_fig.add_trace( go.Scatter( x=observed_dates, y=y_obs, mode="lines+markers", name="Observed cumulative", marker=dict(size=5), line=dict(width=2), ))poly_trig_fig.add_trace( go.Scatter( x=observed_dates, y=y_fit_obs, mode="lines", name="Fit on observed", line=dict(width=2, dash="dash"), ))poly_trig_fig.add_trace( go.Scatter( x=all_dates, y=y_poly_trig_all, mode="lines", name="Poly6 + sin + sin² extrapolation", line=dict(width=2, dash="dot"), ))poly_trig_fig.add_vline( x=observed_end_date, line_width=1, line_dash="dash", line_color="gray",)poly_trig_fig.update_layout( title=f"Polynomial-trigonometric extrapolation until {forecast_end_date.isoformat()}", xaxis_title="Date", yaxis_title="Cumulative commits since cutoff", template="plotly_white", height=460,)poly_trig_fig.show()
Expected cumulative commits by 2026-04-06 (poly6 + sin + sin^2): 0.0
Conclusion
From the commit graph we can see that I was pretty lazy for the first three months. Admittedly, during christmas break I was mostly occupied with my family and friends. Starting january fifth however, I have been very productive and have made a lot of commits in a short time.
One can clearly see on january 25th that my laptop broke and my work-laptop was confiscated by the university, because of some bureaucratic nonsense. This left me around one month without any working computer, which is reflected in the complete lack of commits during that time. Luckily I was able to repair my laptop and get back to work aruond one month later, which led to the second spike in commits.