Compare commits

..

3 Commits

Author SHA1 Message Date
b59842899b replace status history with db 2026-02-12 21:00:47 -06:00
9553e77b2f summary 2026-02-12 18:46:00 -06:00
2ae714db48 misc 2026-02-12 17:57:30 -06:00
14 changed files with 347 additions and 221 deletions

View File

@@ -19,9 +19,9 @@ Server-side monitoring system that checks the availability of asimonson.com serv
**Features**:
- Tracks response times and HTTP status codes
- Stores check history (up to 720 checks = 60 days of data)
- Calculates uptime percentages for multiple time periods (24h, 7d, 30d, all-time)
- Persists data to `static/json/status_history.json`
- Persists data to PostgreSQL (`service_checks` table) via `DATABASE_URL` env var
- Gracefully degrades when no database is configured (local dev)
- Runs in a background thread
#### 2. `app.py` - Flask Integration
@@ -57,32 +57,22 @@ Server-side monitoring system that checks the availability of asimonson.com serv
## Data Storage
Status history is stored in `src/static/json/status_history.json`:
Check history is stored in a PostgreSQL `service_checks` table. The connection is configured via the `DATABASE_URL` environment variable (e.g. `postgresql://user:pass@host:5432/dbname`).
```json
{
"last_check": "2026-02-11T14:30:00",
"services": {
"main": {
"name": "asimonson.com",
"url": "https://asimonson.com",
"status": "online",
"response_time": 156,
"status_code": 200,
"last_online": "2026-02-11T14:30:00",
"checks": [
{
"timestamp": "2026-02-11T14:30:00",
"status": "online",
"response_time": 156,
"status_code": 200
}
]
}
}
}
```sql
CREATE TABLE service_checks (
id SERIAL PRIMARY KEY,
service_id VARCHAR(50) NOT NULL,
timestamp TIMESTAMPTZ NOT NULL DEFAULT NOW(),
status VARCHAR(20) NOT NULL,
response_time INTEGER,
status_code INTEGER,
error TEXT
);
```
The table and index are created automatically on startup. If `DATABASE_URL` is not set, the monitor runs without persistence (useful for local development).
## Status Types
- **online**: HTTP status 2xx-4xx, service responding
@@ -142,8 +132,7 @@ SERVICES = [
## Notes
- First deployment will show limited uptime data until enough checks accumulate
- Historical data is preserved across server restarts
- Maximum 720 checks stored per service (60 days at 2-hour intervals)
- Historical data is preserved across server restarts (stored in PostgreSQL)
- Page auto-refreshes every 5 minutes to show latest server data
- Manual refresh button available for immediate updates
- All checks performed server-side (no client-side CORS issues)

View File

@@ -7,3 +7,26 @@ services:
restart: 'no'
ports:
- 8080:8080
environment:
DATABASE_URL: postgresql://portfolio:portfolio@db:5432/portfolio
depends_on:
db:
condition: service_healthy
db:
image: postgres:16-alpine
restart: 'no'
environment:
POSTGRES_USER: portfolio
POSTGRES_PASSWORD: portfolio
POSTGRES_DB: portfolio
volumes:
- pgdata:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U portfolio"]
interval: 5s
timeout: 3s
retries: 5
volumes:
pgdata:

View File

@@ -2,13 +2,14 @@
Service monitoring module
Checks service availability and tracks uptime statistics
"""
import os
import requests
import time
import json
from concurrent.futures import ThreadPoolExecutor
from datetime import datetime, timedelta
from threading import Thread, Lock
from pathlib import Path
import psycopg2
# Service configuration
SERVICES = [
@@ -35,52 +36,138 @@ SERVICES = [
# Check interval: 30 mins
CHECK_INTERVAL = 1800
# File to store status history
STATUS_FILE = Path(__file__).parent / 'static' / 'json' / 'status_history.json'
DATABASE_URL = os.environ.get('DATABASE_URL')
# Expected columns (besides id) — name: SQL type
_EXPECTED_COLUMNS = {
'service_id': 'VARCHAR(50) NOT NULL',
'timestamp': 'TIMESTAMPTZ NOT NULL DEFAULT NOW()',
'status': 'VARCHAR(20) NOT NULL',
'response_time': 'INTEGER',
'status_code': 'INTEGER',
'error': 'TEXT',
}
class ServiceMonitor:
def __init__(self):
self.status_data = {}
self.lock = Lock()
self.load_history()
def load_history(self):
"""Load status history from file"""
if STATUS_FILE.exists():
try:
with open(STATUS_FILE, 'r') as f:
self.status_data = json.load(f)
except Exception as e:
print(f"Error loading status history: {e}")
self.initialize_status_data()
else:
self.initialize_status_data()
def initialize_status_data(self):
"""Initialize empty status data structure"""
self.status_data = {
'last_check': None,
'services': {}
}
# Lightweight in-memory cache of latest status per service
self._current = {}
for service in SERVICES:
self.status_data['services'][service['id']] = {
self._current[service['id']] = {
'name': service['name'],
'url': service['url'],
'status': 'unknown',
'response_time': None,
'status_code': None,
'last_online': None,
'checks': [] # List of check results
}
self._last_check = None
self._ensure_schema()
def save_history(self):
"""Save status history to file"""
# ── database helpers ──────────────────────────────────────────
@staticmethod
def _get_conn():
"""Return a new psycopg2 connection, or None if DATABASE_URL is unset."""
if not DATABASE_URL:
return None
return psycopg2.connect(DATABASE_URL)
def _ensure_schema(self):
"""Create the service_checks table (and index) if needed, then
reconcile columns with _EXPECTED_COLUMNS."""
if not DATABASE_URL:
print("DATABASE_URL not set — running without persistence")
return
# Retry connection in case DB is still starting (e.g. Docker)
conn = None
for attempt in range(5):
try:
conn = psycopg2.connect(DATABASE_URL)
break
except psycopg2.OperationalError:
if attempt < 4:
print(f"Database not ready, retrying in 2s (attempt {attempt + 1}/5)...")
time.sleep(2)
else:
print("Could not connect to database — running without persistence")
return
try:
STATUS_FILE.parent.mkdir(parents=True, exist_ok=True)
with open(STATUS_FILE, 'w') as f:
json.dump(self.status_data, f, indent=2)
except Exception as e:
print(f"Error saving status history: {e}")
with conn, conn.cursor() as cur:
cur.execute("""
CREATE TABLE IF NOT EXISTS service_checks (
id SERIAL PRIMARY KEY,
service_id VARCHAR(50) NOT NULL,
timestamp TIMESTAMPTZ NOT NULL DEFAULT NOW(),
status VARCHAR(20) NOT NULL,
response_time INTEGER,
status_code INTEGER,
error TEXT
);
""")
cur.execute("""
CREATE INDEX IF NOT EXISTS idx_service_checks_service_timestamp
ON service_checks (service_id, timestamp DESC);
""")
# Introspect existing columns
cur.execute("""
SELECT column_name
FROM information_schema.columns
WHERE table_name = 'service_checks'
""")
existing = {row[0] for row in cur.fetchall()}
# Add missing columns
for col, col_type in _EXPECTED_COLUMNS.items():
if col not in existing:
# Strip NOT NULL / DEFAULT for ALTER ADD (can't enforce
# NOT NULL on existing rows without a default)
bare_type = col_type.split('NOT NULL')[0].split('DEFAULT')[0].strip()
cur.execute(
f'ALTER TABLE service_checks ADD COLUMN {col} {bare_type}'
)
print(f"Added column {col} to service_checks")
# Drop unexpected columns (besides 'id')
expected_names = set(_EXPECTED_COLUMNS) | {'id'}
for col in existing - expected_names:
cur.execute(
f'ALTER TABLE service_checks DROP COLUMN {col}'
)
print(f"Dropped column {col} from service_checks")
print("Database schema OK")
finally:
conn.close()
def _insert_check(self, service_id, result):
"""Insert a single check result into the database."""
conn = self._get_conn()
if conn is None:
return
try:
with conn, conn.cursor() as cur:
cur.execute(
"""INSERT INTO service_checks
(service_id, timestamp, status, response_time, status_code, error)
VALUES (%s, %s, %s, %s, %s, %s)""",
(
service_id,
result['timestamp'],
result['status'],
result.get('response_time'),
result.get('status_code'),
result.get('error'),
),
)
finally:
conn.close()
# ── service checks ────────────────────────────────────────────
def check_service(self, service):
"""Check a single service and return status"""
@@ -136,107 +223,124 @@ class ServiceMonitor:
results[service['id']] = result
print(f" {service['name']}: {result['status']} ({result['response_time']}ms)")
# Only acquire lock when updating the shared data structure
# Persist to database (outside lock — DB has its own concurrency)
for service_id, result in results.items():
self._insert_check(service_id, result)
# Update lightweight in-memory cache under lock
with self.lock:
for service in SERVICES:
result = results[service['id']]
service_data = self.status_data['services'][service['id']]
# Update current status
service_data['status'] = result['status']
service_data['response_time'] = result['response_time']
service_data['status_code'] = result['status_code']
cached = self._current[service['id']]
cached['status'] = result['status']
cached['response_time'] = result['response_time']
cached['status_code'] = result['status_code']
if result['status'] == 'online':
service_data['last_online'] = result['timestamp']
cached['last_online'] = result['timestamp']
self._last_check = datetime.now().isoformat()
# Add to check history (keep last 2880 checks = 60 days at 2hr intervals)
service_data['checks'].append(result)
if len(service_data['checks']) > 2880:
service_data['checks'] = service_data['checks'][-2880:]
self.status_data['last_check'] = datetime.now().isoformat()
self.save_history()
# ── uptime calculations ───────────────────────────────────────
def _calculate_uptime_unlocked(self, service_id, hours=None):
"""Calculate uptime percentage for a service (assumes lock is held)"""
service_data = self.status_data['services'].get(service_id)
if not service_data or not service_data['checks']:
"""Calculate uptime percentage for a service by querying the DB."""
conn = self._get_conn()
if conn is None:
return None
try:
with conn.cursor() as cur:
if hours:
cutoff = datetime.now() - timedelta(hours=hours)
cur.execute(
"""SELECT
COUNT(*) FILTER (WHERE status = 'online'),
COUNT(*)
FROM service_checks
WHERE service_id = %s AND timestamp > %s""",
(service_id, cutoff),
)
else:
cur.execute(
"""SELECT
COUNT(*) FILTER (WHERE status = 'online'),
COUNT(*)
FROM service_checks
WHERE service_id = %s""",
(service_id,),
)
checks = service_data['checks']
online_count, total_count = cur.fetchone()
# Filter by time period if specified
if hours:
cutoff = datetime.now() - timedelta(hours=hours)
checks = [
c for c in checks
if datetime.fromisoformat(c['timestamp']) > cutoff
]
if total_count == 0:
return None
if not checks:
return None
# Minimum-data thresholds
if hours:
expected_checks = (hours * 3600) / CHECK_INTERVAL
minimum_checks = max(3, expected_checks * 0.5)
if total_count < minimum_checks:
return None
else:
if total_count < 3:
return None
# Require minimum data coverage for the time period
# Calculate expected number of checks for this period
expected_checks = (hours * 3600) / CHECK_INTERVAL
# Require at least 50% of expected checks to show this metric
minimum_checks = max(3, expected_checks * 0.5)
if len(checks) < minimum_checks:
return None
else:
# For all-time, require at least 3 checks
if len(checks) < 3:
return None
online_count = sum(1 for c in checks if c['status'] == 'online')
uptime = (online_count / len(checks)) * 100
return round(uptime, 2)
return round((online_count / total_count) * 100, 2)
finally:
conn.close()
def calculate_uptime(self, service_id, hours=None):
"""Calculate uptime percentage for a service"""
with self.lock:
return self._calculate_uptime_unlocked(service_id, hours)
return self._calculate_uptime_unlocked(service_id, hours)
def get_status_summary(self):
"""Get current status summary with uptime statistics"""
with self.lock:
summary = {
'last_check': self.status_data['last_check'],
'last_check': self._last_check,
'next_check': None,
'services': []
}
# Calculate next check time
if self.status_data['last_check']:
last_check = datetime.fromisoformat(self.status_data['last_check'])
if self._last_check:
last_check = datetime.fromisoformat(self._last_check)
next_check = last_check + timedelta(seconds=CHECK_INTERVAL)
summary['next_check'] = next_check.isoformat()
for service_id, service_data in self.status_data['services'].items():
for service_id, cached in self._current.items():
service_summary = {
'id': service_id,
'name': service_data['name'],
'url': service_data['url'],
'status': service_data['status'],
'response_time': service_data['response_time'],
'status_code': service_data['status_code'],
'last_online': service_data['last_online'],
'name': cached['name'],
'url': cached['url'],
'status': cached['status'],
'response_time': cached['response_time'],
'status_code': cached['status_code'],
'last_online': cached['last_online'],
'uptime': {
'24h': self._calculate_uptime_unlocked(service_id, 24),
'7d': self._calculate_uptime_unlocked(service_id, 24 * 7),
'30d': self._calculate_uptime_unlocked(service_id, 24 * 30),
'all_time': self._calculate_uptime_unlocked(service_id)
},
'total_checks': len(service_data['checks'])
'total_checks': self._get_total_checks(service_id),
}
summary['services'].append(service_summary)
return summary
def _get_total_checks(self, service_id):
"""Return the total number of checks for a service."""
conn = self._get_conn()
if conn is None:
return 0
try:
with conn.cursor() as cur:
cur.execute(
'SELECT COUNT(*) FROM service_checks WHERE service_id = %s',
(service_id,),
)
return cur.fetchone()[0]
finally:
conn.close()
def start_monitoring(self):
"""Start background monitoring thread"""
def monitor_loop():

View File

@@ -20,3 +20,4 @@ six==1.16.0
urllib3==2.2.2
Werkzeug==3.0.3
xxhash==3.4.1
psycopg2-binary==2.9.9

View File

@@ -174,7 +174,6 @@ tr {
margin-bottom: 0px;
margin-left: 0rem;
background-color: #1a1a1a;
width: 100vw;
z-index: 99;
position: sticky;
top: 0;
@@ -522,7 +521,6 @@ tr {
}
.skill {
display: none;
padding: .2em;
color: rgb(235, 235, 235);
}
@@ -535,19 +533,25 @@ tr {
color: rgb(170, 170, 170);
}
#skillTree > .skill {
display: block;
.skill-children {
display: grid;
grid-template-rows: 0fr;
transition: grid-template-rows 0.3s ease;
}
.skill > .skill {
.skill.activeSkill > .skill-children {
grid-template-rows: 1fr;
}
.skill-children-inner {
overflow: hidden;
}
.skill-children-inner > .skill {
font-size: large !important;
padding-left: .5em;
}
.skill.activeSkill > .skill {
display: block !important;
}
.skill > .skillname {
text-decoration: underline;
cursor: pointer;

View File

@@ -27,15 +27,13 @@ function toggleCheckbox(dir) {
}
function activeSkill(obj) {
if (obj.parentElement.classList.contains("activeSkill")) {
obj.parentElement.classList.remove("activeSkill");
let skill = obj.closest(".skill");
if (skill.classList.contains("activeSkill")) {
skill.classList.remove("activeSkill");
return;
}
// document.querySelectorAll(".skill").forEach((x) => {
// x.classList.remove("activeSkill");
// });
while (obj.parentElement.classList.contains("skill")) {
obj = obj.parentElement;
obj.classList.add("activeSkill");
while (skill) {
skill.classList.add("activeSkill");
skill = skill.parentElement.closest(".skill");
}
}

View File

@@ -1,5 +1,5 @@
const balls = [];
const density = 0.00003;
const density = 0.00005;
let screenWidth = window.innerWidth + 10;
let screenHeight = window.innerHeight + 10;
@@ -74,35 +74,40 @@ function draw() {
balls[i].update();
}
// Optimize line drawing with early distance checks
// Draw lines with additive blending so overlaps increase brightness
blendMode(ADD);
strokeWeight(2);
const maxDist = 150;
const maxDistSquared = maxDist * maxDist; // Avoid sqrt in distance calculation
const maxDistSquared = maxDist * maxDist;
for (let i = 0; i < balls.length - 1; i++) {
const ball1 = balls[i];
for (let j = i + 1; j < balls.length; j++) {
const ball2 = balls[j];
// Quick rejection test using squared distance (faster than sqrt)
const dx = ball2.x - ball1.x;
const dy = ball2.y - ball1.y;
const distSquared = dx * dx + dy * dy;
if (distSquared < maxDistSquared) {
const distance = Math.sqrt(distSquared); // Only calculate sqrt if needed
const distance = Math.sqrt(distSquared);
if (distance < 100) {
stroke(150);
if (distance < 75) {
stroke(255, 85);
line(ball1.x, ball1.y, ball2.x, ball2.y);
} else {
stroke(100);
const chance = 0.3 ** (((random(0.2) + 0.8) * distance) / 150);
if (chance < 0.5) {
stroke(50);
stroke(255, 40);
} else {
stroke(255, 75);
}
line(ball1.x, ball1.y, ball2.x, ball2.y);
}
}
}
}
blendMode(BLEND);
}

View File

@@ -1,41 +1,35 @@
{
"Tools": {
"Microsoft Azure": {
"Databricks": {},
"Data Factory": {},
"Stream Analytics": {}
},
"Databricks": {},
"Apache Spark": {},
"Visual Basic for Applications (Excel)": {}
},
"Data and AI": {
"Python": {
"PyTorch/TensorFlow": {},
"Numpy/Pandas": {},
"Scikit/Sklearn": {},
"Selenium/BS4": {},
"Pyspark": {}
"ML": {
"PySpark ML": {},
"Numpy/Pandas/Polars": {},
"TensorFlow": {},
"Scikit": {}
},
"R": {},
"SQL": {}
"PySpark": {},
"Selenium/BS4 Web Hacking": {},
"SQL": {},
"Declarative Pipelines": {},
"ArcGIS": {}
},
"DevOps": {
"Docker": {},
"Microsoft Azure": {},
"Databricks": {},
"Kubernetes/Openshift": {},
"Cloudflare": {},
"Bash": {}
},
"Frontend": {
"Flask (Python)": {},
"React (Javascript)": {},
"SASS/SCSS": {}
"REST APIs": {},
"Web Scraping": {}
},
"Backend & DevOps": {
"Backend": {
"Rust": {},
"C#": {}
},
"DevOps": {
"Docker": {},
"Microsoft Azure": {},
"Kubernetes/Openshift": {},
"Cloudflare": {},
"Bash": {}
}
"Offline Skills": {
"Circuitry": {},
"Skiing": {},
"Chess": {},
"Plinking": {},
"Building something with trash that solves my problems": {}
}
}

View File

@@ -5,7 +5,7 @@
<loc>https://asimonson.com/projects</loc>
<loc>https://asimonson.com/Resume</loc>
<loc>https://asimonson.com/duck</loc>
<loc>https://asimonson.com/books</loc>
<lastmod>2024-07-24</lastmod>
<loc>https://asimonson.com/status</loc>
<lastmod>2026-02-12</lastmod>
</url>
</urlset>

View File

@@ -20,7 +20,7 @@
property="og:image"
content="{{ url_for('static', filename='icons/rasterLogoCircle.png') }}"
/>
<meta property="og:url" content="{{ var['canonical'] }}" />
<meta property="og:url" content="{{ request.url_root | trim('/') }}{{ var['canonical'] }}" />
<meta property="twitter:title" content="Andrew Simonson" />
<meta name="twitter:description" content="{{ var['description'] }}" />
<meta name="twitter:card" content="summary_large_image" />
@@ -50,7 +50,7 @@
rel="stylesheet"
href="{{ url_for('static', filename='css/App.css') }}"
/>
<link rel="canonical" href="{{ var['canonical'] }}" />
<link rel="canonical" href="{{ request.url_root | trim('/') }}{{ var['canonical'] }}" />
<script defer src="{{ url_for('static', filename='js/checkbox.js') }}"></script>
<script defer src="{{ url_for('static', filename='js/responsive.js') }}"></script>
<script src="{{ url_for('static', filename='js/chessbed.js') }}"></script>

View File

@@ -20,28 +20,27 @@
<!--<INSERT SMALL BANNER HERE FOR PROJECT IMAGECARD CAROUSEL>-->
<div id="desktopSpacer"></div>
<div class="homeSubContent">
<img class='blinkies' alt='My Brain is Glowing' src="{{ url_for('static', filename='photos/blinkies/brainglow.gif') }}" loading="lazy" />
<img class='blinkies' alt='Pepsi Addict' src="{{ url_for('static', filename='photos/blinkies/pepsiaddict.gif') }}" loading="lazy" />
<img class='blinkies' alt='I Fear No Beer' src="{{ url_for('static', filename='photos/blinkies/fearnobeer.gif') }}" loading="lazy" />
<img class='blinkies' alt='Secret Message' src="{{ url_for('static', filename='photos/blinkies/tooclose.gif') }}" loading="lazy" />
<img class='blinkies' alt="They took my blood but it wasn't DNA, it was USA" src="{{ url_for('static', filename='photos/blinkies/usa.gif') }}" loading="lazy" />
<img class='blinkies' alt='Bob the Builder gif' src="{{ url_for('static', filename='photos/blinkies/bobthebuilder.gif') }}" loading="lazy" />
<div>
<br />
<strong> You've reached the website for Andrew Simonson's personal online shenanigans.</strong>
<strong> You've reached the website for Andrew Simonson's digital shenanigans.</strong>
<h3>Now What?</h3>
<p>
Go back and find the link that I originally shared. Or poke around. Be your own person.</br>
I guess I'll grant myself some titles while I'm at it:
I'll grant myself some titles while I'm at it:
</p>
<ul>
<li>Load-Bearing Coconut</li>
<li>Wicked Wizard of the West</li>
<li>Enemy of Node.js, Hater of Bloat</li>
<li>Load-Bearing Coconut</li>
<li>Creator and Harnesser of Energy</li>
</ul>
</div>
<br />
{#
<div id="aboutCards" class="flex">
<div class="chess">
{% from 'partials/chess.html' import chess %} {{
@@ -53,6 +52,7 @@
</div>
<br />
</div>
#}
</div>
{% endblock %}
</div>

View File

@@ -1,9 +1,15 @@
{% macro expandSkill(dict, name, classes="") %}
{% macro expandSkill(dict, name, classes="") %}
<div class='skill {{ classes }}' data-length='{{ dict[name]|length }}'>
<div onclick='activeSkill(this)' class='skillname'>{{ name }}</div>
{% if dict[name]|length > 0 %}
<div class='skill-children'>
<div class='skill-children-inner'>
{% for child in dict[name] %}
{{ expandSkill(dict[name], child) }}
{% endfor %}
{% endfor %}
</div>
</div>
{% endif %}
</div>
{% endmacro %}

View File

@@ -1,6 +1,5 @@
<div class='socials'>
<a href='https://github.com/asimonson1125'><img alt='Github' src="{{ url_for('static', filename='icons/github.svg') }}" /></a>
<a href='https://www.instagram.com/an_a.simonson/'><img alt='Instagram' src="{{ url_for('static', filename='icons/instagram.svg') }}" /></a>
<a href='https://www.linkedin.com/in/simonsonandrew/'><img alt='LinkedIn' src="{{ url_for('static', filename='icons/linkedin.svg') }}" /></a>
<a href='mailto:asimonson1125@gmail.com'><img alt='E-mail' src="{{ url_for('static', filename='icons/email.svg') }}" /></a>
<div id='vertLine'></div>

View File

@@ -3,39 +3,42 @@
<div class="foregroundContent">
<div class="flex equalitems vertOnMobile">
<div>
<h2 class="concentratedHead">About Me</h2>
<p>
I'm Andrew Simonson<!--, CEO of the anti-thermodynamics syndicate.-->,
a <strong>Data Scientist at Ecolab</strong> and a graduate Data
Science student at
<strong>Rochester Institute of Technology</strong>, having
recently completed the <b>Computer Science BS</b> program
(international relations minor) with a focus on probability
theory.
<br />
<br />
I started in ~2017, reverse engineering probablistic logic
models in games and developing interfaces to recreate my
findings for friends. Now I develop tracable AI built on
deductive reasoning, maintaning scientific methodology in an
industry obsessed with implicit rules and exclusive empiricism.
<!-- As the analysis grew more sophisticated, so too did the tech
stack - to the point that I now manage most services, like this
website, end to end, container image to insight visual. -->
<br />
<br />
I get bored and throw random stuff on this website. It's a form
of unprofessional development and I swear by this form of
learning.
</p>
<h3 class='concentratedHead'>
<div>
<h2 class="concentratedHead">About Me</h2>
<p>
I'm Andrew Simonson<!--, CEO of the anti-thermodynamics syndicate.-->,
a <strong>Data Scientist at Ecolab</strong> and a graduate Data
Science student at
<strong>Rochester Institute of Technology</strong>, having
recently completed the <b>Computer Science BS</b> program
(international relations minor) with a focus on probability
theory.
<!-- <br />
<br />
I started in ~2017, reverse engineering probablistic logic
models in games and developing interfaces to recreate my
findings for friends. Now I develop tracable AI built on
deductive reasoning, maintaning scientific methodology in an
industry obsessed with implicit rules and exclusive empiricism.
As the analysis grew more sophisticated, so too did the tech
stack - to the point that I now manage most services, like this
website, end to end, container image to insight visual. -->
<br />
<br />
I get bored and throw random stuff on this website.<br/>
This is what unprofessional development looks like.
</p>
</div>
<br/>
<br/>
<h4 class='concentratedHead'>
I also have a
<a href="Resume_Simonson_Andrew.pdf" target="_blank">resume</a>
for some reason.
</h3>
for unexplained reasons.
</h4>
</div>
<div id="skills">
<h2 id="skillstag">Skills</h2>
<h2 id="skillstag">Technologies</h2>
{% from 'partials/skills.html' import skills %} {{
skills(var['skillList']) }}
</div>