Initial import
Some checks failed
Build & Release / build-docker-image (push) Failing after 3m15s
Build & Release / deploy-to-production (push) Has been skipped

This commit is contained in:
2026-03-31 19:09:37 +02:00
commit da7e881311
24 changed files with 2990 additions and 0 deletions

7
.dockerignore Normal file
View File

@@ -0,0 +1,7 @@
.git
.venv
.ollamassist
**/*.db
Dockerfile
**/__pycache__/**
.streamlit/secrets.toml

View File

@@ -0,0 +1,72 @@
name: Build & Release
on:
push:
tags:
- '[0-9]+.[0-9]+.[0-9]+'
env:
ENDPOINT: services-3
STACK: misc
IMAGE: john/daily-counter
TAG: ${{ gitea.ref_name }}
CACHE_NAME: cache-python-dependencies-daily-counter
RUNNER_TOOL_CACHE: /toolcache
jobs:
build-docker-image:
runs-on: node20
container:
image: catthehacker/ubuntu:act-24.04
steps:
- name: Checkout Docker file
uses: actions/checkout@v4
with:
ssh-key: ${{ secrets.SSH_JOHN_PRIVATE_KEY }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Devsoap Container Registry
uses: docker/login-action@v2
with:
registry: ${{ secrets.DOCKER_REGISTRY }}
username: ${{ secrets.DOCKER_REGISTRY_USER }}
password: ${{ secrets.DOCKER_REGISTRY_PASSWORD }}
scope: ${{secrets.DOCKER_REGISTRY}}/${{env.IMAGE}}@push
logout: true
- name: Build and push Docker image
uses: docker/build-push-action@v6
with:
context: .
file: ./Dockerfile
push: true
build-args: |
version=${{env.TAG}}
tags: |
${{secrets.DOCKER_REGISTRY}}/${{env.IMAGE}}:${{env.TAG}}
${{secrets.DOCKER_REGISTRY}}/${{env.IMAGE}}:latest
deploy-to-production:
if: ${{ always() && needs.build-docker-image.result == 'success' }}
needs: build-docker-image
runs-on: node20
steps:
- name: Checkout infrastructure config
run: |
git clone -v --depth=1 ${{ env.REPOSITORY_URL }} infra
env:
REPOSITORY_URL: ${{ env.GIT_REPO_USER }}@${{ env.GIT_REPO_INTERNAL }}:${{ env.DEVSOAP_INFRA_GIT_REPO }}
- name: Setup Git config
working-directory: infra
run: |
git config user.email "code@devsoap.com"
git config user.name "Devsoap Code CI/CD"
- name: Update image version
working-directory: infra
run: |
sed -i -r "s|/$IMAGE:(.*?)|/$IMAGE:$TAG|g" $ENDPOINT/$STACK/docker-compose.yml
git diff -U0
- name: Push changes
working-directory: infra
run: |
git commit -am "Updated $ENDPOINT/$STACK/$IMAGE to $TAG"
git push origin master

6
.gitignore vendored Normal file
View File

@@ -0,0 +1,6 @@
.idea
.ollamassist
.venv
**/*.db
**/__pycache__/**
.streamlit/secrets.toml

23
.streamlit/config.toml Normal file
View File

@@ -0,0 +1,23 @@
[server]
port = 8501
address = "0.0.0.0"
[browser]
gatherUsageStats = false
[logger]
level = "info"
[client]
toolbarMode = "viewer"
showSidebarNavigation = false
[theme]
base="light"
backgroundColor = "#eee"
secondaryBackgroundColor = "#fff"
primaryColor = "black"
baseRadius = "none"
[runner]
magicEnabled = false

34
Dockerfile Normal file
View File

@@ -0,0 +1,34 @@
ARG PYTHON_VERSION=3.13
FROM python:${PYTHON_VERSION}-alpine
LABEL author="John Ahlroos <john@ahlroos.me>"
ARG POETRY_VERSION=2.3.3
ENV PYTHONFAULTHANDLER=1 \
PYTHONUNBUFFERED=1 \
PYTHONHASHSEED=random \
PIP_NO_CACHE_DIR=off \
PIP_DISABLE_PIP_VERSION_CHECK=on \
PIP_DEFAULT_TIMEOUT=100 \
POETRY_NO_INTERACTION=1 \
POETRY_VIRTUALENVS_CREATE=false \
POETRY_CACHE_DIR='/var/cache/pypoetry' \
POETRY_HOME='/usr/local'
RUN apk add --no-cache tini
RUN pip install -Iv --no-cache-dir "poetry==${POETRY_VERSION}"
WORKDIR /app
COPY poetry.lock pyproject.toml ./
RUN poetry install --only=main --no-interaction --no-ansi
COPY . /app
VOLUME /app/data
RUN touch .streamlit/secrets.toml \
&& toml add_section --toml-path='.streamlit/secrets.toml' 'connections.sqlite' \
&& toml set --toml-path='.streamlit/secrets.toml' 'connections.sqlite.type' 'sql' \
&& toml set --toml-path='.streamlit/secrets.toml' 'connections.sqlite.url' 'sqlite:///data/daily-counter.db'
HEALTHCHECK --interval=60s --retries=5 CMD wget -qO- http://127.0.0.1:8501/_stcore/health || exit 1
ENTRYPOINT ["/sbin/tini", "--"]
CMD ["/app/entrypoint.sh"]

36
README.md Normal file
View File

@@ -0,0 +1,36 @@
markdown
# Daily Habit Counter App
This is a simple habit tracking web app that allows the user to track occurrences of events in a day.
## Screenshots
| | |
|:----------------------:|:------------------------:|
| ![](docs/counters.png) | ![](docs/statistics.png) |
| Main Page | Statistics page |
## Features
* Create any numbers of counters for tracking events
* Color encode every counter
* View statistics about how often each event occurs
## Run project (Locally with Python )
**Pre-requisites:** [Pip](https://pypi.org/project/pip/) and [Poetry](https://pypi.org/project/poetry/)
1. Clone this repository
2. Install Poetry ``pip install -Iv --no-cache-dir "poetry==2.3.3"``
3. Install project dependencies ``poetry install``
4. Run database migrations ``alembic upgrade head``
5. Run project ``streamlit run app``
6. Project will be running at http://localhost:8501
## Run project (Locally with Docker)
**Pre-requisites:** [Docker](https://www.docker.com)
1. Clone this repository
2. Build Docker Image ``docker build -t habit-counter:latest .``
3. Run Docker container ``docker run habit-counter`` or to preserve the data ``docker run -v ./data:/app/data habit-counter``

36
alembic.ini Normal file
View File

@@ -0,0 +1,36 @@
[alembic]
sqlalchemy.url = sqlite:///data/daily-counter.db
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARNING
handlers = console
qualname =
[logger_sqlalchemy]
level = WARNING
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

40
app/enums.py Normal file
View File

@@ -0,0 +1,40 @@
from enum import IntEnum, Enum
class CounterType(IntEnum):
SIMPLE = 1
DAILY = 2
WEEKLY = 3
MONTHLY = 4
YEARLY = 5
def current_unit_text(self):
match self:
case CounterType.DAILY:
return 'today'
case CounterType.WEEKLY:
return 'this week'
case CounterType.MONTHLY:
return 'this month'
case CounterType.YEARLY:
return 'this year'
case _:
return 'times'
def previous_unit_text(self):
match self:
case CounterType.DAILY:
return 'yesterday'
case CounterType.WEEKLY:
return 'last week'
case CounterType.MONTHLY:
return 'last month'
case CounterType.YEARLY:
return 'last year'
case _:
return 'times'
class Tabs(Enum):
COUNTERS ="Counters"
STATISTICS = "Stats"

8
app/logger.py Normal file
View File

@@ -0,0 +1,8 @@
import logging
import sys
def init_logger() -> None:
logging.basicConfig(
level=logging.INFO,
format="%(asctime)s %(levelname)s %(name)s: %(message)s",
stream=sys.stdout)

96
app/pages/counters.py Normal file
View File

@@ -0,0 +1,96 @@
import streamlit as st
import sql
from enums import CounterType
@st.dialog("Add New Counter", icon=":material/add_box:")
def _add_counter():
colors = sql.get_colors(1)
with st.form(key="add_counter", border=False, clear_on_submit=True):
title = st.text_input("Title:")
counter_type_name = st.selectbox("Type", options=[e.name for e in CounterType])
color = st.radio("Color",
key="color-selector",
width="stretch",
options=[colors[key][0] for key in colors],
format_func=lambda c: f"#{c}")
with st.container(horizontal=True, width="stretch", horizontal_alignment="center"):
if st.form_submit_button(label="Create", icon=":material/save:"):
sql.create_counter(title, CounterType[counter_type_name], color)
st.rerun()
@st.dialog("Remove Counter", icon=":material/delete:")
def _remove_counter(counter_id:int):
with st.form(key="remove_counter", border=False, clear_on_submit=True):
st.subheader("Are you sure?")
with st.container(horizontal=True, width="stretch", horizontal_alignment="center"):
if st.form_submit_button("Confirm", icon=":material/delete:"):
sql.remove_counter(counter_id)
st.rerun()
df = sql.get_counters()
with st.container(key="counter-table"):
for counter_id, name, counter_type_str, color in zip(df['id'], df['name'], df['type'], df['color']):
with st.container(width="stretch", key=f"counter_{counter_id}"):
with st.container(horizontal=True, width="stretch"):
st.header(f":material/calendar_clock: {name}", width="stretch")
if st.button("", icon=":material/exposure_plus_1:", key=f"increment_counter_{counter_id}"):
sql.increment_counter(counter_id)
st.rerun()
if st.button("", icon=":material/delete_forever:", key=f"remove_counter_{counter_id}"):
_remove_counter(counter_id)
with st.container(horizontal=True, width="stretch"):
counter_type = CounterType(counter_type_str)
stats_current_unit = counter_type.current_unit_text()
stats_prev_unit = counter_type.previous_unit_text()
match counter_type:
case CounterType.DAILY.value | CounterType.SIMPLE.value:
stats = sql.get_daily_analytics(counter_id)
stats_current = stats.iloc[0]["count"]
stats_prev = stats.iloc[1]["count"]
case CounterType.WEEKLY.value:
stats = sql.get_weekly_analytics(counter_id)
stats_current = stats.iloc[-1]["count"]
stats_prev = stats.iloc[-2]["count"]
case CounterType.MONTHLY.value:
stats = sql.get_monthly_analytics(counter_id)
stats_current = stats.iloc[-1]["count"]
stats_prev = stats.iloc[-2]["count"]
case CounterType.YEARLY.value:
stats = sql.get_yearly_analytics(counter_id)
stats_current = stats.iloc[-1]["count"]
stats_prev = stats.iloc[-2]["count"]
if counter_type is CounterType.SIMPLE.value:
st.markdown(f"**{stats_current} {stats_current_unit}**")
else:
st.markdown(f"""
**{stats_current} {stats_current_unit}**
*{stats_prev} {stats_prev_unit}*
""")
with st.container(horizontal=True, width="stretch", horizontal_alignment="right"):
st.page_link("pages/stats.py", icon=":material/bar_chart:", icon_position="right", label="", query_params={"counter_id": str(counter_id)})
st.html(f"""
<style>
div:has(> .st-key-counter_{counter_id}) {{
background-color: {color};
}}
</style>
""")
if st.button("Add Counter", width="stretch", icon=":material/add_box:"):
_add_counter()

44
app/pages/stats.py Normal file
View File

@@ -0,0 +1,44 @@
import logging
import streamlit as st
import json
import sql
import pandas as pd
from enums import CounterType
logger = logging.getLogger(__name__)
if "counter_id" in st.query_params.keys():
counter_id = int(st.query_params["counter_id"])
df = sql.get_counter(counter_id)
st.header('Counter: ' + df['name'])
color ='#' + df['color']
match df['type']:
case CounterType.DAILY.value | CounterType.SIMPLE.value:
st.bar_chart(sql.get_daily_analytics(int(df['id'])), x="date", y="count", color=color)
case CounterType.WEEKLY.value:
st.bar_chart(sql.get_weekly_analytics(int(df['id'])), x="week", y="count", color=color)
case CounterType.MONTHLY.value:
st.bar_chart(sql.get_monthly_analytics(int(df['id'])), x="month", y="count", color=color)
case CounterType.YEARLY.value:
st.bar_chart(sql.get_yearly_analytics(int(df['id'])), x="year", y="count", color=color)
else:
st.header("Statistics")
entries = sql.get_analytics()
entries_norm = pd.json_normalize(entries.counters.apply(json.loads)).fillna(0)
entries_full = pd.concat([entries, entries_norm], axis=1).drop(['counters'], axis=1)
selected_counters = [c for c in entries_full.columns if c != "date"]
all_counters = sql.get_counters()
colors = all_counters.loc[all_counters['name'].isin(selected_counters), ["name", "color"]]
colors.name = colors.name.astype("category")
colors.name = colors.name.cat.set_categories(selected_counters)
colors = colors.sort_values(["name"])
colors = colors.color.apply(lambda c: "#" + c).tolist()
st.bar_chart(entries_full, x="date", x_label="Date", y_label="Count", color=colors)

229
app/sql.py Normal file
View File

@@ -0,0 +1,229 @@
import logging
import streamlit as st
from sqlalchemy.sql import text
from enums import CounterType
logger = logging.getLogger(__name__)
connection = st.connection("sqlite")
with connection.session as configure_session:
configure_session.execute(text('PRAGMA foreign_keys=ON'))
def create_counter(title:str, counter_type:CounterType, counter_color) -> None:
with connection.session as session:
try:
query = text('INSERT INTO counters (name, type, color) VALUES (:title, :type, :color)')
session.execute(query, {'title': title, 'type': counter_type, 'color': counter_color})
session.commit()
except Exception as e:
logger.error(e)
session.rollback()
def get_counters():
try:
return connection.query('SELECT id, name, type, color FROM counters', ttl=0)
except Exception as e:
logger.error(e)
return st.dataframe()
def increment_counter(counter_id:int) -> None:
with connection.session as session:
try:
query = text('INSERT INTO entries (counter_id) VALUES (:id)')
session.execute(query, {'id': counter_id})
session.commit()
except Exception as e:
logger.error(e)
session.rollback()
def remove_counter(counter_id:int) -> None:
with connection.session as session:
try:
query = text('DELETE FROM counters WHERE id = :id')
session.execute(query, {'id': counter_id})
session.commit()
except Exception as e:
logger.error(e)
session.rollback()
def get_counter(counter_id:int):
try:
return connection.query('SELECT * FROM counters WHERE id = :id', params={'id': counter_id}, ttl=0).iloc[0]
except Exception as e:
logger.error(e)
return None
def get_analytics(end_date:str = 'now'):
try:
return connection.query('''
WITH RECURSIVE timeseries(d) AS (
VALUES(date(:end_date))
UNION ALL
SELECT date(d, '-1 day') as d
FROM timeseries
WHERE d > date(:end_date, '-30 days')
),
stats AS (
SELECT
date(timestamp) as d,
counter_id,
sum(increment) as count
FROM entries
group by counter_id, date(timestamp)
)
select
s.d as date,
case
when counter_id is null then json_object()
else json_group_object(name, count)
end as counters
FROM timeseries s
left outer join stats t on s.d = t.d
left join counters c on t.counter_id = c.id
GROUP by s.d
''', params={"end_date": end_date}, ttl=0)
except Exception as e:
logger.error(e)
return None
def get_daily_analytics(counter_id:int, end_date:str = 'now'):
try:
return connection.query('''
WITH RECURSIVE timeseries(d) AS (
VALUES(date(:end_date))
UNION ALL
SELECT date(d, '-1 day') as d
FROM timeseries
WHERE d > date(:end_date, '-7 days')
),
stats AS (
SELECT
date(timestamp) as d,
sum(increment) as count
FROM entries
where counter_id = :id
group by date(timestamp)
)
SELECT
t.d as "date",
coalesce(s.count, 0) as count
FROM timeseries as t
LEFT JOIN stats as s on s.d = t.d
''', params={'id': counter_id, "end_date": end_date}, ttl=0)
except Exception as e:
logger.error(e)
return None
def get_weekly_analytics(counter_id:int, end_date:str = 'now'):
try:
return connection.query('''
WITH RECURSIVE timeseries(d) AS (
VALUES(date(:end_date, 'weekday 0'))
UNION ALL
SELECT date(d, '-7 day')
FROM timeseries
WHERE d > date(:end_date, '-30 days')
),
weeks AS (
SELECT strftime('%W',d) as w
FROM timeseries
),
stats AS (
SELECT
strftime('%W', timestamp) as w,
sum(increment) as count
FROM entries
where counter_id = :id
group by strftime('%W', timestamp)
)
SELECT
w.w as "week",
coalesce(s.count, 0) as count
FROM weeks as w
LEFT JOIN stats as s on s.w = w.w
''', params={'id': counter_id, "end_date": end_date}, ttl=0)
except Exception as e:
logger.error(e)
return None
def get_monthly_analytics(counter_id:int, end_date:str = 'now'):
try:
return connection.query('''
WITH RECURSIVE timeseries(d) AS (
VALUES( date(:end_date, 'start of year'))
UNION ALL
SELECT date(d, '+1 month')
FROM timeseries
WHERE d < date(:end_date, '-1 month')
),
months AS (
SELECT
strftime('%m',d) as m,
strftime('%Y',d) as y
FROM timeseries
),
stats AS (
SELECT
strftime('%m', timestamp) as m,
strftime('%Y', timestamp) as y,
sum(increment) as count
FROM entries
where counter_id = :id
group by strftime('%m', timestamp), strftime('%Y', timestamp)
)
SELECT
concat(m.m,', ',m.y) as "month",
coalesce(s.count, 0) as count
FROM months as m
LEFT JOIN stats as s on s.m = m.m and s.y = m.y
''', params={'id': counter_id, "end_date": end_date}, ttl=0)
except Exception as e:
logger.error(e)
return None
def get_yearly_analytics(counter_id:int, end_date:str = 'now'):
try:
return connection.query('''
WITH RECURSIVE timeseries(d) AS (
VALUES( date(:end_date, 'start of year', '-4 years'))
UNION ALL
SELECT date(d, '+1 year')
FROM timeseries
WHERE d < date(:end_date, '-1 year')
),
years AS (
SELECT strftime('%Y',d) as y
FROM timeseries
),
stats AS (
SELECT
strftime('%Y', timestamp) as y,
sum(increment) as count
FROM entries
where counter_id = :id
group by strftime('%Y', timestamp)
)
SELECT
m.y as "year",
coalesce(s.count, 0) as count
FROM years as m
LEFT JOIN stats as s on s.y = m.y
''', params={'id': counter_id, "end_date": end_date}, ttl=0)
except Exception as e:
logger.error(e)
return None
def get_colors(palette_id:int):
try:
return connection.query('''SELECT color1,color2,color3,color4,color5 FROM color_palettes WHERE id = :id''', params={'id': palette_id})
except Exception as e:
logger.error(e)
return None

20
app/streamlit_app.py Normal file
View File

@@ -0,0 +1,20 @@
import streamlit as st
from logger import init_logger
from styles import init_styles
init_logger()
init_styles()
if st.user and not st.user.is_logged_in:
with st.container(width="stretch", height="stretch", horizontal_alignment="center"):
st.title("Daily Counter", width="stretch", text_alignment="center")
st.text("Please log in to use this app", width="stretch", text_alignment="center")
st.space()
if st.button("Log in"):
st.login()
else:
counters = st.Page("pages/counters.py", title="Counters", icon=":material/update:")
stats = st.Page("pages/stats.py", title="Statistics", icon=":material/chart_data:")
pg = st.navigation(position="top", pages=[counters, stats])
pg.run()

25
app/styles.py Normal file
View File

@@ -0,0 +1,25 @@
import streamlit as st
import sql
def _load_css(filepath):
with open(filepath) as file:
st.html(f"<style>{file.read()}</style>")
def _load_color_selector_styles():
colors = sql.get_colors(1) #FIXME Change to use user profile color palette
for idx, c in enumerate(colors.keys()):
css_color = '#' + colors[c][0]
st.html(f"""
<style>
.st-key-color-selector label:has(> input[value='{idx}']) {{
background-color: {css_color};
}}
</style>
""")
def init_styles():
_load_css("css/theme.css")
_load_color_selector_styles()

56
css/theme.css Normal file
View File

@@ -0,0 +1,56 @@
#MainMenu {
#display: none;
}
.stApp {
min-width: 360px;
}
.stAppHeader {
width: 100%;
max-width: 736px;
margin-left: auto;
margin-right: auto;
}
.stMainBlockContainer{
padding-top: 4rem;
}
.stPageLink a {
background: whitesmoke;
height: 40px;
width: 45px;
border: 1px solid silver;
padding-left: 12px;
}
.stPageLink a > span:first-child {
display: none;
}
.st-key-selected_tab div[role="tablist"] > button{
width: 100%;
}
.st-key-counter-table > div {
border: 1px solid gray;
padding: 10px;
border-radius: 5px;
background-color: whitesmoke;
}
.st-key-color-selector div[role = "radiogroup"] {
display: flex;
flex-direction: row;
}
.st-key-color-selector div[role = "radiogroup"] > label {
flex: 1
}
.st-key-color-selector div[role = "radiogroup"] > label > div:first-child {
display: none;
}
.st-key-color-selector div[role = "radiogroup"] > label:has(> input[tabindex="0"]) {
outline: 3px solid blue;
}
.st-key-color-selector div[role = "radiogroup"] p {
visibility: hidden;
}

0
data/.keep Normal file
View File

BIN
docs/counters.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 76 KiB

BIN
docs/statistics.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

21
entrypoint.sh Executable file
View File

@@ -0,0 +1,21 @@
#!/usr/bin/env sh
STREAMLIT_SECRETS_LOCATION=".streamlit/secrets.toml"
if [ "$OIDC_ENABLED" = "true" ]; then
echo "INFO [entrypoint] OIDC configuration detected. Configuring app..."
toml add_section --toml-path=$STREAMLIT_SECRETS_LOCATION 'auth'
toml set --toml-path=$STREAMLIT_SECRETS_LOCATION 'auth.redirect_uri' "$OIDC_PUBLIC_URL/oauth2callback"
toml set --toml-path=$STREAMLIT_SECRETS_LOCATION 'auth.cookie_secret' "$OIDC_COOKIE_SECRET"
toml set --toml-path=$STREAMLIT_SECRETS_LOCATION 'auth.client_id' "$OIDC_CLIENT_ID"
toml set --toml-path=$STREAMLIT_SECRETS_LOCATION 'auth.client_secret' "$OIDC_CLIENT_SECRET"
toml set --toml-path=$STREAMLIT_SECRETS_LOCATION 'auth.server_metadata_url' "$OIDC_METADATA_URL"
else
echo "INFO [entrypoint] No OIDC configuration detected."
fi
echo "INFO [entrypoint] Running database migrations..."
alembic upgrade head
echo "INFO [entrypoint] Running application..."
streamlit run app

37
migrations/env.py Normal file
View File

@@ -0,0 +1,37 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
config = context.config
if config.config_file_name is not None:
fileConfig(config.config_file_name, disable_existing_loggers=False)
target_metadata = None
def run_migrations_offline() -> None:
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

28
migrations/script.py.mako Normal file
View File

@@ -0,0 +1,28 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
"""Upgrade schema."""
${upgrades if upgrades else "pass"}
def downgrade() -> None:
"""Downgrade schema."""
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,63 @@
"""initial version
Revision ID: 4ee21f978e6c
Revises:
Create Date: 2026-04-05 18:13:28.735859
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy import ForeignKeyConstraint
# revision identifiers, used by Alembic.
revision: str = '4ee21f978e6c'
down_revision: Union[str, Sequence[str], None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table(
"counters",
sa.Column('id', sa.Integer, primary_key=True, autoincrement=True),
sa.Column('name', sa.String(20), nullable=False, unique=True),
sa.Column('type', sa.Integer(), nullable=False, server_default=sa.text("1")),
sa.Column('color', sa.String(6), nullable=False)
)
op.create_table(
"entries",
sa.Column('id', sa.Integer, primary_key=True, autoincrement=True),
sa.Column('timestamp', sa.DateTime, nullable=False, server_default=sa.func.now()),
sa.Column('counter_id', sa.Integer, nullable=False,),
sa.Column('increment', sa.Integer, nullable=False, server_default=sa.text("1")),
ForeignKeyConstraint(["counter_id"], ["counters.id"], onupdate="CASCADE", ondelete="CASCADE")
)
table = op.create_table(
"color_palettes",
sa.Column('id', sa.Integer, primary_key=True, autoincrement=True),
sa.Column('color1', sa.String(6), nullable=False),
sa.Column('color2', sa.String(6), nullable=False),
sa.Column('color3', sa.String(6), nullable=False),
sa.Column('color4', sa.String(6), nullable=False),
sa.Column('color5', sa.String(6), nullable=False),
)
op.bulk_insert(table, [
{"color1": "F2F3AE", "color2": "EDD382", "color3": "FC9E4F", "color4": "FF521B", "color5": "020122"},
{"color1": "2B4141", "color2": "0EB1D2", "color3": "34E4EA", "color4": "8AB9B5", "color5": "C8C2AE"},
{"color1": "181F1C", "color2": "274029", "color3": "315C2B", "color4": "60712F", "color5": "9EA93F"},
{"color1": "A3A380", "color2": "D6CE93", "color3": "EFEBCE", "color4": "D8A48F", "color5": "BB8588"},
{"color1": "32292F", "color2": "99E1D9", "color3": "F0F7F4", "color4": "70ABAF", "color5": "705D56"}
])
def downgrade() -> None:
op.drop_table("color_palettes")
op.drop_table("entries")
op.drop_table("counters")

2083
poetry.lock generated Normal file

File diff suppressed because it is too large Load Diff

26
pyproject.toml Normal file
View File

@@ -0,0 +1,26 @@
[project]
name = "daily-counter"
description = "A daily counter for any habbit tracking"
version = "0.1"
dynamic = ["version"]
requires-python = ">= 3.10"
dependencies = [
"alembic (==1.18.4)",
"streamlit (==1.56.0)",
"toml-cli (==0.8.2)",
"authlib (==1.6.9)"
]
[virtualenvs]
in-project = true
[tool.alembic]
script_location = "%(here)s/migrations"
truncate_slug_length = 10
file_template = "%%(year)d%%(month).2d%%(day).2d%%(hour)2d%%(minute)2d%%(second).2d_%%(slug)s"
prepend_sys_path = [
"."
]
[tool.poetry]
package-mode = false