This commit is contained in:
cash
2026-03-29 23:50:49 -05:00
commit eb5e194331
56 changed files with 4010 additions and 0 deletions

10
.dockerignore Normal file
View File

@@ -0,0 +1,10 @@
.env
.idea/
.venv/
.wslvenv/
.legacy/
__pycache__/
backend/data/
dev.db
dev.db-shm
dev.db-wal

12
.gitignore vendored Normal file
View File

@@ -0,0 +1,12 @@
.env
.idea/
.venv/
.wslvenv/
.legacy/
.50.txt
50.txt
__pycache__/
backend/data/
dev.db
dev.db-shm
dev.db-wal

0
.venv Normal file
View File

14
MANIFEST.in Normal file
View File

@@ -0,0 +1,14 @@
# core project docs
include README.md
include LICENSE
# python typing markers
recursive-include backend *.pyi
recursive-include backend py.typed
# frontend assets (templates + static)
recursive-include frontend *
# alembic migrations
recursive-include backend/alembic *

1
README.md Normal file
View File

@@ -0,0 +1 @@
s1ne v3.1.500

0
__init__.py Normal file
View File

119
alembic.ini Normal file
View File

@@ -0,0 +1,119 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts
# Use forward slashes (/) also on windows to provide an os agnostic path
script_location = backend/alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
# version_path_separator = newline
#
# Use os.pathsep. Default configuration used for new projects.
version_path_separator = os
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARNING
handlers = console
qualname =
[logger_sqlalchemy]
level = WARNING
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

0
backend/__init__.py Normal file
View File

1
backend/alembic/README Normal file
View File

@@ -0,0 +1 @@
Generic single-database configuration.

97
backend/alembic/env.py Normal file
View File

@@ -0,0 +1,97 @@
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
import os
from dotenv import load_dotenv
from alembic import context
from backend.core.settings import SQLALCHEMY_DATABASE_URI
from backend.core.db import metadata
# Load .env manually (if not loaded by your settings.py or entry point)
load_dotenv()
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Get environment (default to development if not set)
APP_ENV = os.getenv("APP_ENV")
config.set_main_option("sqlalchemy.url", SQLALCHEMY_DATABASE_URI)
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
# Determine DB URL
if APP_ENV == "production":
db_url = os.getenv("DATABASE_URL")
if not db_url:
raise RuntimeError("DATABASE_URL is required in production.")
else:
# fallback to local SQLite db
db_url = os.getenv("SQLALCHEMY_DATABASE_URI", "sqlite:///data/local.db")
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url", db_url)
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
print(f"→ Alembic using DB URL: {db_url}")
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connect_args = {"sslmode": "require"}
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
connect_args=connect_args,
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,28 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
"""Upgrade schema."""
${upgrades if upgrades else "pass"}
def downgrade() -> None:
"""Downgrade schema."""
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,28 @@
"""merge user-branch fixes
Revision ID: 03568bb37289
Revises: user_counters_defaults, user_counters_defaults_old
Create Date: 2025-05-01 16:01:28.514674
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '03568bb37289'
down_revision: Union[str, None] = ('user_counters_defaults', 'user_counters_defaults_old')
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
pass
def downgrade() -> None:
"""Downgrade schema."""
pass

View File

@@ -0,0 +1,67 @@
"""tier text not int
Revision ID: 05d6342e2105
Revises: 20250506abcd
Create Date: 2025-05-06 16:42:38.378374
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy import text
# revision identifiers, used by Alembic.
revision: str = '05d6342e2105'
down_revision: Union[str, None] = '20250506abcd'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# 1) make sure every numeric tier is inside the allowed range
# (optional safety; no type change yet)
op.execute("""
UPDATE users
SET tier = 0
WHERE pg_typeof(tier)::text = 'integer'
AND tier NOT IN (0,1,2,3);
""")
# 2) ALTER COLUMN to TEXT first numeric values become '0','1',...
op.alter_column(
"users", "tier",
existing_type=sa.Integer(),
type_=sa.Text(),
postgresql_using="tier::text",
nullable=False,
server_default=sa.text("'Online'")
)
# 3) now map the stringified numbers to names
op.execute("""
UPDATE users SET tier =
CASE tier
WHEN '0' THEN 'Online'
WHEN '1' THEN 'Rank 1'
WHEN '2' THEN 'Rank 2'
WHEN '3' THEN 'Rank 3'
ELSE tier
END;
""")
def downgrade() -> None:
# revert to integer, mapping back Online→0 etc. if you really need it
op.alter_column(
"users", "tier",
existing_type=sa.Text(),
type_=sa.Integer(),
postgresql_using="""
CASE tier
WHEN 'Rank 1' THEN 1
WHEN 'Rank 2' THEN 2
WHEN 'Rank 3' THEN 3
ELSE 0
END::integer
""",
server_default=sa.text("0"),
nullable=False,
)

View File

@@ -0,0 +1,37 @@
"""add bonus_active_until to users
Revision ID: 175f03f1c9f7
Revises: ff38ddad43af
Create Date: 2025-05-01 16:58:15.855501
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '175f03f1c9f7'
down_revision: Union[str, None] = 'ff38ddad43af'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade():
if not _has_column("users", "bonus_active_until"):
op.add_column(
"users",
sa.Column("bonus_active_until", sa.DateTime(), nullable=True),
)
def downgrade():
op.drop_column("users", "bonus_active_until")
# helper
from sqlalchemy.engine.reflection import Inspector
def _has_column(table, column):
bind = op.get_bind()
insp = Inspector.from_engine(bind)
return column in [c["name"] for c in insp.get_columns(table)]

View File

@@ -0,0 +1,55 @@
"""Drop XP / tier related columns (+data) if they exist
Revision ID: 20250521_remove_xp_system
Revises: 05d6342e2105
Create Date: 2025-05-21
"""
from alembic import op
import sqlalchemy as sa
revision = "20250521_remove_xp_system"
down_revision = "05d6342e2105"
branch_labels = None
depends_on = None
# added "data" here ↓↓↓
XP_COLUMNS = (
"videos_downloaded",
"mb_usage",
"level",
"xp",
"tier",
"admin",
"vip_badge",
"bonus_active_until",
"score",
"data", # ← drop the NOT-NULL JSON/profile blob
)
def upgrade() -> None:
conn = op.get_bind()
existing = {c["name"] for c in sa.inspect(conn).get_columns("users")}
with op.batch_alter_table("users") as batch:
for col in XP_COLUMNS:
if col in existing:
batch.drop_column(col)
def downgrade() -> None:
conn = op.get_bind()
existing = {c["name"] for c in sa.inspect(conn).get_columns("users")}
with op.batch_alter_table("users") as batch:
# Only re-add if missing; "data" comes back as JSON/Text and NULL-able
add = lambda name, *args, **kw: (
batch.add_column(sa.Column(name, *args, **kw))
if name not in existing else None
)
add("videos_downloaded", sa.Integer(), server_default="0", nullable=False)
add("mb_usage", sa.Float(), server_default="0", nullable=False)
add("level", sa.Integer(), server_default="1", nullable=False)
add("xp", sa.Integer(), server_default="0", nullable=False)
add("tier", sa.Text(), server_default="Online")
add("admin", sa.Boolean(), server_default="false", nullable=False)
add("vip_badge", sa.Text())
add("bonus_active_until", sa.DateTime(timezone=True))
add("score", sa.Float(), server_default="0")
add("data", sa.JSON(), server_default="{}") # <<<

View File

@@ -0,0 +1,36 @@
"""default zeros for new user counters"""
from alembic import op
import sqlalchemy as sa
revision = "user_counters_defaults"
down_revision = "add_ok_to_dl_stats"
branch_labels = None
depends_on = None
def _add(column: str, coltype, default_sql: str):
# Add column nullable=True with default, then make NOT NULL & drop default
op.add_column(
"users",
sa.Column(column, coltype, nullable=True, server_default=sa.text(default_sql)),
)
op.alter_column("users", column, server_default=None, nullable=False)
def upgrade():
_add("videos_downloaded", sa.Integer(), "0")
_add("mb_usage", sa.Float(), "0")
_add("level", sa.Integer(), "1")
_add("xp", sa.Integer(), "0")
_add("tier", sa.Integer(), "0")
_add("ban_status", sa.Boolean(), "false")
_add("soft_banned", sa.Boolean(), "false")
def downgrade():
for col in (
"soft_banned", "ban_status", "tier",
"xp", "level", "mb_usage", "videos_downloaded",
):
op.drop_column("users", col)

View File

@@ -0,0 +1,64 @@
"""drop url column, make ip the primary key on users
Revision ID: 55327cbf08df
Revises: 70e118917866
Create Date: 2025-05-01 07:26:50.279482
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.engine.reflection import Inspector
# revision identifiers, used by Alembic.
revision: str = '55327cbf08df'
down_revision: Union[str, None] = '70e118917866'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def _has_column(conn, table, column):
return column in [
c["name"] for c in Inspector.from_engine(conn).get_columns(table)
]
def upgrade():
conn = op.get_bind()
insp = Inspector.from_engine(conn)
# 1) Drop any UNIQUE constraints on url
for uc in insp.get_unique_constraints("users"):
if "url" in uc["column_names"]:
op.drop_constraint(uc["name"], "users", type_="unique")
# 2) Drop whatever PRIMARY KEY exists today
pk = insp.get_pk_constraint("users")["name"]
if pk:
op.drop_constraint(pk, "users", type_="primary")
# 3) Remove the old url column
if "url" in {c["name"] for c in insp.get_columns("users")}:
op.drop_column("users", "url")
# 4) Create a new PK on ip
op.create_primary_key("users_pkey", "users", ["ip"])
def downgrade():
conn = op.get_bind()
insp = Inspector.from_engine(conn)
# 1) Drop the ip PK
pk = insp.get_pk_constraint("users")["name"]
if pk:
op.drop_constraint(pk, "users", type_="primary")
# 2) Re-create url column
op.add_column("users", sa.Column("url", sa.Text(), nullable=False))
# 3) Restore the PK on url
op.create_primary_key("users_pkey", "users", ["url"])

View File

@@ -0,0 +1,22 @@
"""add ok column to dl_stats"""
from alembic import op
import sqlalchemy as sa
revision = "add_ok_to_dl_stats"
down_revision = "55327cbf08df" # or latest hash
branch_labels = None
depends_on = None
def upgrade():
op.add_column(
"dl_stats",
sa.Column("ok", sa.Boolean(), nullable=False, server_default=sa.text("false"))
)
# drop default if you like
op.alter_column("dl_stats", "ok", server_default=None)
def downgrade():
op.drop_column("dl_stats", "ok")

View File

@@ -0,0 +1,43 @@
"""add default to users.videos_downloaded
Revision ID: 20250506abcd
Revises: previous_revision_id
Create Date: 2025-05-06 22:10:00.000000
"""
from typing import Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "20250506abcd"
down_revision = 'abe00f7f8f72'
branch_labels = None
depends_on = None
def upgrade():
# 1) backfill existing NULLs
op.execute(
"UPDATE users SET videos_downloaded = 0 "
"WHERE videos_downloaded IS NULL;"
)
# 2) give the column a serverside default
op.alter_column(
"users",
"videos_downloaded",
existing_type=sa.Integer(),
server_default=sa.text("0"),
nullable=False,
)
def downgrade():
op.alter_column(
"users",
"videos_downloaded",
existing_type=sa.Integer(),
server_default=None,
nullable=True,
)

View File

@@ -0,0 +1,27 @@
"""add ok column to dl_stats
Revision ID: 7064708f684e
Revises: 86141e89fea3
Create Date: 2025-05-01 16:29:31.009976
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '7064708f684e'
down_revision: Union[str, None] = '86141e89fea3'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade():
# if “success” exists, rename it; otherwise just add ok
op.add_column("dl_stats", sa.Column("ok", sa.Boolean(), server_default="false", nullable=False))
def downgrade():
op.drop_column("dl_stats", "ok")

View File

@@ -0,0 +1,48 @@
"""make ip the primary key on users, drop obsolete url
Revision ID: 70e118917866
Revises: e03269ce4058
Create Date: 2025-05-01 07:03:06.119500
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.engine import reflection
# revision identifiers, used by Alembic.
revision: str = '70e118917866'
down_revision: Union[str, None] = 'e03269ce4058'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def _column_exists(conn, table, column):
insp = reflection.Inspector.from_engine(conn)
return column in [c["name"] for c in insp.get_columns(table)]
def upgrade():
conn = op.get_bind()
# 1) add ip column if the old table never had it
if not _column_exists(conn, "users", "ip"):
op.add_column("users", sa.Column("ip", sa.Text(), nullable=False))
# 2) drop the old PK that was on url (or any other column mix)
op.drop_constraint("users_pkey", "users", type_="primary")
# 3) create new PK on ip
op.create_primary_key("users_pkey", "users", ["ip"])
# 4) drop obsolete url column if its still there
if _column_exists(conn, "users", "url"):
op.drop_column("users", "url")
def downgrade():
# reverse: recreate url, restore old PK
op.add_column("users", sa.Column("url", sa.Text(), nullable=False))
op.drop_constraint("users_pkey", "users", type_="primary")
op.create_primary_key("users_pkey", "users", ["url"])
op.drop_column("users", "ip")

View File

@@ -0,0 +1,28 @@
"""merge heads
Revision ID: 86141e89fea3
Revises: 03568bb37289, ffae4495003d
Create Date: 2025-05-01 16:08:14.791524
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '86141e89fea3'
down_revision: Union[str, None] = ('03568bb37289', 'ffae4495003d')
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
pass
def downgrade() -> None:
"""Downgrade schema."""
pass

View File

@@ -0,0 +1,40 @@
"""add banned and updated_at columns to proxies
Revision ID: 957c893a8a67
Revises:
Create Date: 2025-05-01 06:45:41.546150
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '957c893a8a67'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ── Add the new 'banned' column with default FALSE ──────────
op.add_column(
'proxies',
sa.Column('banned', sa.Boolean(), nullable=False, server_default=sa.text('FALSE'))
)
# ── (Optional) Add an 'updated_at' timestamp column ─────────
op.add_column(
'proxies',
sa.Column('updated_at', sa.DateTime(), nullable=True)
)
# ── (Optional) Remove server_default if you dont need it going forward ─
op.alter_column('proxies', 'banned', server_default=None)
def downgrade() -> None:
# ── Drop in reverse order ───────────────────────────────
op.drop_column('proxies', 'updated_at')
op.drop_column('proxies', 'banned')

View File

@@ -0,0 +1,51 @@
"""proxy leasing columns
Revision ID: abe00f7f8f72
Revises: 175f03f1c9f7
Create Date: 20250505 18:12:44
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
revision = "abe00f7f8f72"
down_revision = "175f03f1c9f7"
branch_labels = None
depends_on = None
def _has_column(bind, table: str, column: str) -> bool:
insp = inspect(bind)
return column in {c["name"] for c in insp.get_columns(table)}
def upgrade() -> None:
bind = op.get_bind()
# add only the columns that are missing
add_in_use = not _has_column(bind, "proxies", "in_use")
add_last_fail = not _has_column(bind, "proxies", "last_fail")
if add_in_use or add_last_fail:
with op.batch_alter_table("proxies") as batch:
if add_in_use:
batch.add_column(sa.Column("in_use", sa.Integer(), server_default="0"))
if add_last_fail:
batch.add_column(sa.Column("last_fail", sa.DateTime()))
def downgrade() -> None:
# downgrade assumes the columns exist, so drop them only if present
bind = op.get_bind()
drop_in_use = _has_column(bind, "proxies", "in_use")
drop_last_fail = _has_column(bind, "proxies", "last_fail")
if drop_in_use or drop_last_fail:
with op.batch_alter_table("proxies") as batch:
if drop_last_fail:
batch.drop_column("last_fail")
if drop_in_use:
batch.drop_column("in_use")

View File

@@ -0,0 +1,37 @@
"""add first_visit column to users
Revision ID: e03269ce4058
Revises: 957c893a8a67
Create Date: 2025-05-01 06:58:05.118501
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'e03269ce4058'
down_revision: Union[str, None] = '957c893a8a67'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade():
# 1) add nullable column with UTC default
op.add_column(
"users",
sa.Column(
"first_visit",
sa.TIMESTAMP(timezone=True),
nullable=True,
server_default=sa.text("timezone('utc', now())")
),
)
# 2) once its there, make it NOT NULL and drop the default
op.alter_column("users", "first_visit", nullable=False, server_default=None)
def downgrade():
op.drop_column("users", "first_visit")

View File

@@ -0,0 +1,41 @@
from alembic import op
import sqlalchemy as sa
from sqlalchemy.engine.reflection import Inspector
revision = "ff38ddad43af"
down_revision = "7064708f684e"
branch_labels = None
depends_on = None
def upgrade():
conn = op.get_bind()
insp = Inspector.from_engine(conn)
cols = {c["name"]: c for c in insp.get_columns("dl_stats")}
# ── 1. guarantee “id” exists and is the PK ──────────────────────
if "id" not in cols:
op.add_column("dl_stats", sa.Column("id", sa.Integer(), primary_key=True))
# rely on PostgreSQL's implicit sequence; no ALTER ... ADD GENERATED
op.create_primary_key("dl_stats_pkey", "dl_stats", ["id"])
else:
pk_cols = insp.get_pk_constraint("dl_stats")["constrained_columns"]
if "id" not in pk_cols:
op.drop_constraint("dl_stats_pkey", "dl_stats", type_="primary")
op.create_primary_key("dl_stats_pkey", "dl_stats", ["id"])
# do **not** attempt to alter the columns default/identity
# ── 2. add “ok” boolean if missing, back-fill from “success” ───
if "ok" not in cols:
op.add_column(
"dl_stats",
sa.Column("ok", sa.Boolean(), nullable=False,
server_default=sa.text("false")),
)
if "success" in cols:
op.execute("UPDATE dl_stats SET ok = success")
def downgrade():
op.drop_column("dl_stats", "ok")
op.drop_constraint("dl_stats_pkey", "dl_stats", type_="primary")
op.drop_column("dl_stats", "id")

View File

@@ -0,0 +1,10 @@
revision = 'ffae4495003d'
down_revision = '55327cbf08df' # or whatever its real parent was
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass

View File

@@ -0,0 +1,45 @@
"""default zeros for new user counters"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.engine.reflection import Inspector
revision = "user_counters_defaults_old"
down_revision = "add_ok_to_dl_stats"
branch_labels = None
depends_on = None
def _has_column(table: str, column: str, conn) -> bool:
insp = Inspector.from_engine(conn)
return column in [c["name"] for c in insp.get_columns(table)]
def _add(column: str, coltype, default_sql: str, conn):
if not _has_column("users", column, conn):
op.add_column(
"users",
sa.Column(column, coltype, nullable=True, server_default=sa.text(default_sql)),
)
# Whether it was just added or already existed, be sure it is NOT NULL and no default remains
op.alter_column("users", column, nullable=False, server_default=None)
def upgrade():
conn = op.get_bind()
_add("videos_downloaded", sa.Integer(), "0", conn)
_add("mb_usage", sa.Float(), "0", conn)
_add("level", sa.Integer(), "1", conn)
_add("xp", sa.Integer(), "0", conn)
_add("tier", sa.Integer(), "0", conn)
_add("ban_status", sa.Boolean(), "false", conn)
_add("soft_banned", sa.Boolean(), "false", conn)
def downgrade():
for col in (
"soft_banned", "ban_status", "tier",
"xp", "level", "mb_usage", "videos_downloaded",
):
op.drop_column("users", col)

0
backend/core/__init__.py Normal file
View File

73
backend/core/db.py Normal file
View File

@@ -0,0 +1,73 @@
"""
Shared SQLAlchemy engine / session + schema bootstrap
"""
from __future__ import annotations
import importlib, datetime
import os
from sqlalchemy import (
create_engine, event, Table, Column, Text, DateTime, Index, make_url
)
from sqlalchemy.engine import url
from sqlalchemy.orm import sessionmaker, registry
from backend.core.settings import (
SQLALCHEMY_DATABASE_URI, DB_POOL_SIZE, DB_ECHO, SKIP_SCHEMA_BOOTSTRAP
)
IS_PG = SQLALCHEMY_DATABASE_URI.startswith("postgresql")
parsed_url = make_url(SQLALCHEMY_DATABASE_URI) # string into URL object
# engine & session
connect_args = {"sslmode": "require"} if parsed_url.drivername.startswith("postgresql") else {}
engine = create_engine(
SQLALCHEMY_DATABASE_URI,
pool_size = DB_POOL_SIZE,
max_overflow = 20,
pool_timeout = 30,
echo = DB_ECHO,
future = True,
pool_pre_ping = True,
pool_recycle=3600,
connect_args = connect_args
)
# SQLite -> WAL for concurrency
if SQLALCHEMY_DATABASE_URI.startswith("sqlite:///"):
@event.listens_for(engine, "connect")
def _set_wal(dbapi_conn, _):
dbapi_conn.execute("PRAGMA journal_mode=WAL;")
SessionLocal = sessionmaker(bind=engine, autoflush=False,
expire_on_commit=False, future=True)
# metadata (tables from every module)
mapper_registry = registry()
metadata = mapper_registry.metadata
# download-cache table
download_cache = Table(
"download_cache", metadata,
Column("key", Text, primary_key=True),
Column("path", Text, nullable=False),
Column("ext", Text, nullable=False),
Column("created_at", DateTime, default=datetime.datetime.utcnow,
nullable=False, index=True),
)
Index("ix_download_cache_created", download_cache.c.created_at)
# auto-bootstrap all
def _bootstrap_schema() -> None:
"""Import modules then create."""
table_modules = (
"backend.core.db_xp",
"backend.web.db_extra",
"backend.core.formats",
)
for mod in table_modules:
importlib.import_module(mod)
metadata.create_all(engine)
if SKIP_SCHEMA_BOOTSTRAP != "1":
_bootstrap_schema()

14
backend/core/db_cache.py Normal file
View File

@@ -0,0 +1,14 @@
"""
compat layer - exposes getconn used by older code
"""
from contextlib import contextmanager
from backend.core.db import engine
@contextmanager
def getconn():
conn = engine.raw_connection()
try:
yield conn
finally:
conn.commit()
conn.close()

31
backend/core/db_utils.py Normal file
View File

@@ -0,0 +1,31 @@
# backend/core/db_utils.py
from sqlalchemy import insert as sa_insert
from sqlalchemy.dialects.postgresql import insert as pg_insert
from core.db import engine
_IS_PG = engine.url.get_backend_name().startswith("postgres")
def upsert(
tbl,
insert_values: dict,
conflict_cols: list[str],
update_values: dict | None = None,
):
if _IS_PG:
stmt = (
pg_insert(tbl)
.values(**insert_values)
.on_conflict_do_update(
index_elements=conflict_cols,
set_=update_values or insert_values,
)
)
else:
stmt = (
sa_insert(tbl)
.values(**insert_values)
.prefix_with("OR REPLACE") # SQLite
)
return stmt

70
backend/core/db_xp.py Normal file
View File

@@ -0,0 +1,70 @@
"""db_xp.py minimal user helper"""
from __future__ import annotations
from datetime import datetime, timezone
from typing import Dict, Any
from sqlalchemy import (
Table, MetaData, select, func, insert as sa_insert, text, inspect
)
from sqlalchemy.dialects.postgresql import insert as pg_insert
from backend.core.db import SessionLocal, engine
_NOW = lambda: datetime.now(timezone.utc)
metadata = MetaData()
_IS_PG = engine.url.get_backend_name().startswith("postgres")
def _get_users_table() -> Table:
return Table("users", metadata, autoload_with=engine)
def _get_column_info():
try:
insp = inspect(engine)
cols = {c["name"]: c for c in insp.get_columns("users")}
return cols
except Exception:
return {}
def _insert_ignore(**vals):
users = _get_users_table()
if _IS_PG:
return (
pg_insert(users)
.values(**vals)
.on_conflict_do_nothing(index_elements=["ip"])
)
return sa_insert(users).values(**vals).prefix_with("OR IGNORE")
def ensure_user(ip: str) -> None:
cols = _get_column_info()
has_data = (
"data" in cols and
not cols["data"].get("nullable", True) # NOT NULL
)
vals = dict(
ip=ip,
first_visit=_NOW(),
ban_status=False,
soft_banned=False,
)
if has_data:
vals["data"] = {}
stmt = _insert_ignore(**vals)
with SessionLocal.begin() as s:
s.execute(stmt)
def is_ip_banned(ip: str) -> bool:
users = _get_users_table()
with SessionLocal() as s:
return bool(s.scalar(select(users.c.ban_status).where(users.c.ip == ip)))
def get_status(ip: str) -> Dict[str, Any]:
ensure_user(ip)
users = _get_users_table()
with SessionLocal() as s:
soft = s.scalar(select(users.c.soft_banned).where(users.c.ip == ip))
return {"soft_banned": bool(soft)}

287
backend/core/download.py Normal file
View File

@@ -0,0 +1,287 @@
"""backend/core/download.py — patched 2025-06-03"""
from __future__ import annotations
import asyncio
import contextvars
import datetime
import hashlib
import os
import random
import re
from pathlib import Path
from typing import Dict
import yt_dlp
from sqlalchemy import select, delete
from sqlalchemy.exc import NoResultFound
from core.db_utils import upsert
from core.settings import (
DOWNLOAD_DIR,
TMP_DIR,
PER_IP_CONCURRENCY,
DOWNLOAD_CACHE_TTL_SEC,
)
from core.network import get_proxy, record_proxy
from core.db_xp import ensure_user
from core.db import SessionLocal, download_cache
from core.progress_bus import update as set_progress
from core.formats import _cached_metadata_fetch, _clean_proxy
os.makedirs(DOWNLOAD_DIR, exist_ok=True)
os.makedirs(TMP_DIR, exist_ok=True)
EST_MB = contextvars.ContextVar("est_mb", default=0)
MAX_GLOBAL_DOWNLOADS = PER_IP_CONCURRENCY * 4
_global_semaphore = asyncio.Semaphore(MAX_GLOBAL_DOWNLOADS)
_ip_semaphores: Dict[str, asyncio.BoundedSemaphore] = {}
_inflight: Dict[str, asyncio.Task[str]] = {}
_ip_cache: Dict[str, set[str]] = {}
def _get_ip_cache(ip: str) -> set[str]:
return _ip_cache.setdefault(ip, set())
def _url_fmt_hash(url: str, fmt: str) -> str:
return hashlib.blake2s(f"{url}::{fmt}".encode(), digest_size=16).hexdigest()
_ansi_escape = re.compile(r"\x1B\[[0-?]*[ -/]*[@-~]")
async def download(url: str, fmt_id: str, ip: str, sid: str) -> str:
fmt = fmt_id or "bestaudio"
key = _url_fmt_hash(url, fmt)
dup_key = f"{ip}::{url}::{fmt}"
cached = await asyncio.to_thread(_lookup_cache_sync, key)
if cached:
set_progress(sid, status="cached", pct=100, progress="Instant served from cache")
return cached
if key in _inflight:
return await _inflight[key]
ip_cache_set = _get_ip_cache(ip)
if dup_key in ip_cache_set:
cached2 = await asyncio.to_thread(_lookup_cache_sync, key)
if cached2:
set_progress(sid, status="cached", pct=100, progress="Instant served from cache")
return cached2
ip_cache_set.add(dup_key)
sem = _ip_semaphores.setdefault(ip, asyncio.BoundedSemaphore(PER_IP_CONCURRENCY))
async def _run() -> str:
async with _global_semaphore, sem:
ensure_user(ip)
set_progress(sid, status="starting", pct=0, progress="Starting…")
try:
info = await asyncio.to_thread(_cached_metadata_fetch, url)
except Exception as e:
set_progress(sid, status="error", progress=f"Metadata fetch failed: {e}")
raise
attempt = 0
last_exc: Exception | None = None
while attempt < 3:
attempt += 1
proxies = [get_proxy() for _ in range(5)]
random.shuffle(proxies)
proxies.append("DIRECT")
for proxy_url in proxies:
try:
final_path = await _single_download(
url,
fmt,
key,
sid,
proxy_url,
info,
)
except asyncio.CancelledError:
raise
except Exception as exc:
last_exc = exc
record_proxy(proxy_url, False)
clean_proxy = _clean_proxy(proxy_url)
set_progress(
sid,
status="retrying",
progress=f"Retry {attempt} failed (proxy {clean_proxy})",
)
await asyncio.sleep(1 + random.random())
continue
else:
record_proxy(proxy_url, True)
await asyncio.to_thread(_store_cache_sync, key, final_path)
set_progress(sid, status="finished", pct=100, progress="Done")
return final_path
set_progress(sid, status="error", progress="Download failed")
raise RuntimeError(f"All download attempts failed: {last_exc!r}")
task = asyncio.create_task(_run())
_inflight[key] = task
try:
return await task
finally:
_inflight.pop(key, None)
asyncio.create_task(_expire_ip_cache_entry(ip, dup_key))
async def _single_download(
url: str,
fmt: str,
_unused_cache_key: str,
sid: str,
proxy_url: str,
info: dict,
) -> str:
title = info.get("title") or "unknown"
artist = info.get("artist") or info.get("uploader") or "unknown"
def _clean(s: str) -> str:
return re.sub(r'[\\/*?:"<>|]', "", s)
safe_title = _clean(title)
safe_artist = _clean(artist)
short_id = hashlib.blake2s(f"{url}::{fmt}".encode(), digest_size=8).hexdigest()
base = f"{safe_title} - {safe_artist} - {short_id}"
fmt_entry = next((f for f in info.get("formats", []) if f.get("format_id") == fmt), None)
is_audio_only = bool(fmt_entry and fmt_entry.get("vcodec") == "none")
if "soundcloud.com" in url.lower():
is_audio_only = True
# force .mp3 for audio-only, .mp4 otherwise
ext_guess = "mp3" if is_audio_only else "mp4"
outtmpl_path = DOWNLOAD_DIR / f"{base}.%(ext)s"
final_path_expected = DOWNLOAD_DIR / f"{base}.{ext_guess}"
if final_path_expected.exists() and final_path_expected.stat().st_size > 0:
return str(final_path_expected)
cmd = ["yt-dlp", "-f", fmt, "-o", str(outtmpl_path), url]
if is_audio_only:
cmd = ["yt-dlp", "-x", "--audio-format", "mp3", "-o", str(outtmpl_path), url]
else:
cmd = ["yt-dlp", "-f", f"{fmt}+bestaudio", "-o", str(outtmpl_path), url]
if proxy_url and proxy_url.upper() != "DIRECT":
cmd.insert(1, f"--proxy={proxy_url}")
proc = await asyncio.create_subprocess_exec(
*cmd,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
)
stderr_buffer: list[str] = []
try:
while True:
try:
line = await asyncio.wait_for(proc.stderr.readline(), timeout=10)
except asyncio.TimeoutError:
break
if not line:
break
decoded = line.decode(errors="ignore").strip()
if decoded:
stderr_buffer.append(decoded)
if len(stderr_buffer) > 10:
stderr_buffer.pop(0)
set_progress(sid, status="running", pct=None, progress=decoded)
try:
rc = await asyncio.wait_for(proc.wait(), timeout=15)
except asyncio.TimeoutError:
proc.kill()
await proc.wait()
raise RuntimeError("yt-dlp stalled and was killed (timeout)")
if rc != 0:
raise RuntimeError(f"yt-dlp exited with code {rc}. Last lines: {' | '.join(stderr_buffer)}")
candidates = [
p for p in DOWNLOAD_DIR.glob(f"{base}.*")
if p.is_file() and p.stat().st_size > 0
]
if not candidates:
raise RuntimeError("No output file produced")
candidates.sort(key=lambda p: p.stat().st_mtime, reverse=True)
return str(candidates[0])
except asyncio.CancelledError:
proc.kill()
await proc.wait()
raise
except Exception:
for f in DOWNLOAD_DIR.glob(f"{base}.*"):
f.unlink(missing_ok=True)
raise
def _lookup_cache_sync(key: str) -> str | None:
now = datetime.datetime.now(datetime.timezone.utc)
with SessionLocal() as session:
try:
row = session.execute(
select(download_cache.c.path, download_cache.c.created_at)
.where(download_cache.c.key == key)
).one()
except NoResultFound:
return None
path_on_disk, created_at = row
if created_at.tzinfo is None:
created_at = created_at.replace(tzinfo=datetime.timezone.utc)
age = (now - created_at).total_seconds()
if age > DOWNLOAD_CACHE_TTL_SEC:
session.execute(delete(download_cache).where(download_cache.c.key == key))
session.commit()
try:
os.remove(path_on_disk)
except OSError:
pass
return None
if not os.path.exists(path_on_disk):
session.execute(delete(download_cache).where(download_cache.c.key == key))
session.commit()
return None
return path_on_disk
def _store_cache_sync(key: str, path: str) -> None:
now = datetime.datetime.now(datetime.timezone.utc)
insert_values = {
"key": key,
"path": path,
"ext": Path(path).suffix.lstrip("."),
"created_at": now,
}
stmt = upsert(
download_cache,
insert_values=insert_values,
conflict_cols=["key"],
update_values={"path": path, "ext": Path(path).suffix.lstrip("."), "created_at": now},
)
with SessionLocal.begin() as session:
session.execute(stmt)
async def _expire_ip_cache_entry(ip: str, dup_key: str, delay: int = 300) -> None:
await asyncio.sleep(delay)
_get_ip_cache(ip).discard(dup_key)

265
backend/core/formats.py Normal file
View File

@@ -0,0 +1,265 @@
"""backend/core/formats.py — patched 2025-06-03"""
from __future__ import annotations
import asyncio
import os
import re
import urllib.parse as _url
from datetime import datetime, timezone
from functools import lru_cache
from pathlib import Path
from urllib.parse import urlparse
import yt_dlp
import structlog
from sqlalchemy import select, delete, Table, Column, Text, DateTime, JSON
from sqlalchemy.dialects.postgresql import insert as pg_insert
from sqlalchemy.exc import NoResultFound
from core.db import SessionLocal, metadata
from core.network import get_proxy, record_proxy, stealth_headers
from core.settings import FORMAT_CACHE_TTL_SEC
log = structlog.get_logger()
format_cache = Table(
"format_cache",
metadata,
Column("url", Text, primary_key=True),
Column("cached_at", DateTime, nullable=False),
Column("info", JSON, nullable=False),
)
_YT_PAT = re.compile(r"(youtu\.be/|youtube\.com/(?:watch|shorts))", re.I)
_BC_PAT = re.compile(r"\.bandcamp\.com", re.I)
_SC_PAT = re.compile(r"(?:soundcloud\.com|on\.soundcloud\.com|m\.soundcloud\.com)", re.I)
_TW_PAT = re.compile(r"(?:twitter\.com|x\.com|mobile\.twitter\.com)", re.I)
_ansi_escape = re.compile(r"\x1B\[[0-?]*[ -/]*[@-~]")
# resolve cookie file path from env or fallback to root-relative path
COOKIE_FILE = Path(os.getenv("YT_COOKIE_FILE", Path(__file__).resolve().parents[2] / "playwright_cookies.txt"))
log.info("cookie_file_resolved", path=str(COOKIE_FILE), exists=COOKIE_FILE.exists())
def _canonical_url(u: str) -> str:
u = u.strip()
if not u.lower().startswith(("http://", "https://")):
return u
if _YT_PAT.search(u):
parsed = _url.urlparse(u)
if "youtu.be" in parsed.netloc:
vid = parsed.path.lstrip("/")
else:
q = _url.parse_qs(parsed.query)
vid = (q.get("v") or [None])[0]
if not vid and parsed.path.startswith("/shorts/"):
vid = parsed.path.split("/")[2]
return f"https://www.youtube.com/watch?v={vid}" if vid else u
if _BC_PAT.search(u):
parsed = _url.urlparse(u)
clean = parsed._replace(query="", fragment="")
return _url.urlunparse(clean)
if _SC_PAT.search(u):
u2 = (
u.replace("m.soundcloud.com", "soundcloud.com")
.replace("on.soundcloud.com", "soundcloud.com")
)
return u2.split("?")[0].split("#")[0]
if _TW_PAT.search(u):
parsed = _url.urlparse(
u.replace("mobile.twitter.com", "x.com").replace("twitter.com", "x.com")
)
clean = parsed._replace(query="", fragment="")
return _url.urlunparse(clean)
parsed = _url.urlparse(u)
clean = parsed._replace(query="", fragment="")
return _url.urlunparse(clean)
def _clean_proxy(proxy: str) -> str:
if not proxy or proxy.upper() == "DIRECT":
return "DIRECT"
parsed = urlparse(proxy)
return (
f"{parsed.scheme}://{parsed.hostname}{f':{parsed.port}' if parsed.port else ''}"
if parsed.hostname
else proxy
)
def platform_badge(u: str) -> str:
l = u.lower()
if "youtu" in l:
return "youtube"
if "soundcloud" in l:
return "soundcloud"
if "twitter" in l or "x.com" in l:
return "twitterx"
if "bandcamp" in l:
return "bandcamp"
return "other"
def user_facing_formats(fmts: list[dict]) -> list[dict]:
desired_heights = [1440, 1080, 720, 480, 360]
out: list[dict] = []
audio_only = [
f for f in fmts if f.get("vcodec") == "none" and f.get("acodec") != "none"
]
if audio_only:
best = max(audio_only, key=lambda x: x.get("tbr") or 0)
out.append(
{
"format_id": best["format_id"],
"ext": best.get("ext", "mp3"),
"label": "Audio (.mp3)",
}
)
for h in desired_heights:
candidates = [f for f in fmts if f.get("height") == h and f.get("vcodec") != "none"]
if candidates:
best = max(candidates, key=lambda x: x.get("tbr") or 0)
out.append(
{
"format_id": best["format_id"],
"ext": best.get("ext", "mp4"),
"label": f"{h}p",
}
)
return out
@lru_cache(maxsize=1024)
def _cached_metadata_fetch(url: str) -> dict:
opts = {"quiet": True, "skip_download": True}
try:
with yt_dlp.YoutubeDL(opts) as ydl:
return ydl.extract_info(url, download=False)
except Exception as e:
msg = _ansi_escape.sub("", str(e)).strip()
log.warning("metadata_fail_direct", url=url, err=msg)
raise
def _fetch_metadata_sync(url: str, proxy_url: str = "DIRECT") -> dict:
opts = {
"quiet": True,
"skip_download": True,
"proxy": None if proxy_url == "DIRECT" else proxy_url,
"http_headers": stealth_headers(),
"cookiefile": str(COOKIE_FILE),
}
if not COOKIE_FILE.exists():
log.warning("cookie_file_missing", path=str(COOKIE_FILE))
try:
with yt_dlp.YoutubeDL(opts) as ydl:
return ydl.extract_info(url, download=False)
except Exception as e:
clean_proxy = _clean_proxy(proxy_url)
msg = _ansi_escape.sub("", str(e)).strip()
log.warning("metadata_fail_proxy", url=url, proxy=clean_proxy, err=msg)
raise
async def _fetch_metadata(url: str) -> dict:
if any(x in url.lower() for x in ("youtube.com", "youtu.be", "bandcamp.com")):
return await asyncio.to_thread(_cached_metadata_fetch, url)
for attempt in range(1, 4):
proxy = get_proxy()
try:
info = await asyncio.to_thread(_fetch_metadata_sync, url, proxy)
if not info.get("formats"):
raise ValueError("No formats found")
record_proxy(proxy, True)
return info
except Exception as e:
record_proxy(proxy, False)
err_msg = _ansi_escape.sub("", str(e)).strip()
log.warning(
"metadata_retry_fail",
attempt=attempt,
proxy=_clean_proxy(proxy),
err=err_msg,
)
raise RuntimeError("Format fetch failed after 3 attempts")
async def choose_format(url: str) -> dict:
url = _canonical_url(url)
if not re.match(r"^https?://", url, re.I):
return {"error": "Invalid URL"}
if any(x in url.lower() for x in ("soundcloud.com", "x.com")):
return {"auto_download": True, "fmt_id": "bestaudio", "url": url}
info = await asyncio.to_thread(_lookup_cache_sync, url)
if info:
return {
"formats": user_facing_formats(info["formats"]),
"title": info.get("title", "Unknown"),
"platform": info.get("platform", ""),
"url": url,
}
info_raw = await _fetch_metadata(url)
cache_doc = {
"title": info_raw.get("title", "Unknown"),
"formats": info_raw.get("formats", []),
"platform": platform_badge(url),
}
await asyncio.to_thread(_store_cache_sync, url, cache_doc)
return {
"formats": user_facing_formats(info_raw.get("formats", [])),
"title": cache_doc["title"],
"platform": cache_doc["platform"],
"url": url,
}
def _lookup_cache_sync(url: str) -> dict | None:
now = datetime.now(timezone.utc)
with SessionLocal() as session:
try:
row = session.execute(
select(format_cache.c.info, format_cache.c.cached_at).where(
format_cache.c.url == url
)
).one()
except NoResultFound:
return None
info, cached_at = row
if cached_at.tzinfo is None:
cached_at = cached_at.replace(tzinfo=timezone.utc)
if (now - cached_at).total_seconds() > FORMAT_CACHE_TTL_SEC:
session.execute(delete(format_cache).where(format_cache.c.url == url))
session.commit()
return None
return info
def _store_cache_sync(url: str, info: dict) -> None:
now = datetime.now(timezone.utc)
stmt = (
pg_insert(format_cache)
.values(url=url, cached_at=now, info=info)
.on_conflict_do_update(index_elements=["url"], set_={"cached_at": now, "info": info})
)
with SessionLocal.begin() as session:
session.execute(stmt)

40
backend/core/logging.py Normal file
View File

@@ -0,0 +1,40 @@
"""
Logging - 16 May 2025
Dev - colored console
Prod - structured JSON
"""
from __future__ import annotations
import logging, os, structlog
from core.settings import LOG_LEVEL, ENV
def init_logging() -> None:
log_level = getattr(logging, LOG_LEVEL.upper(), logging.INFO)
if ENV == "production":
processors = [
structlog.processors.TimeStamper(fmt="%Y-%m-%dT%H:%M:%S", utc=True),
structlog.processors.add_log_level,
_add_path,
structlog.processors.JSONRenderer(),
]
else:
processors = [
structlog.processors.TimeStamper(fmt="%H:%M:%S"),
structlog.processors.add_log_level,
structlog.dev.ConsoleRenderer(colors=True),
]
structlog.configure(
wrapper_class=structlog.make_filtering_bound_logger(log_level),
processors=processors,
)
logging.getLogger("sqlalchemy.engine").setLevel(logging.WARNING)
def _add_path(_, __, event_dict):
from quart import request
if request:
event_dict["path"] = request.path
return event_dict

49
backend/core/network.py Normal file
View File

@@ -0,0 +1,49 @@
"""
network.py - 16 May 2025
"""
from __future__ import annotations
import random, structlog
from functools import lru_cache
from typing import Optional
from fake_useragent import UserAgent
from tls_client import Session as TLSSession
from backend.web.db_extra import acquire_proxy, release_proxy, queue_proxy_result
log = structlog.get_logger()
@lru_cache(maxsize=128)
def stealth_headers(rotate: bool = False) -> dict[str, str]:
if rotate:
stealth_headers.cache_clear()
browsers = ["chrome", "firefox", "edge"]
browser = random.choice(browsers)
client_id_map = {
"chrome": ["chrome_122", "chrome_121", "chrome_120"],
"firefox": ["firefox_123"],
"edge": ["edge_121"],
}
client_id = random.choice(client_id_map[browser])
TLSSession(client_identifier=client_id)
headers = {
"User-Agent": UserAgent()[browser],
"Accept-Language": random.choice(
["en-US,en;q=0.9", "en-GB,en;q=0.9", "en;q=0.8"]
),
}
return headers
def get_proxy() -> str:
px = acquire_proxy()
if px:
return px
log.debug("proxy.none", msg="DIRECT fallback")
return "DIRECT"
def record_proxy(px: str, ok: bool) -> None:
if not px or px == "DIRECT":
return
queue_proxy_result(px, ok)
release_proxy(px, ok)

View File

@@ -0,0 +1,62 @@
"""
progress_bus.py - 07 May 2025
"""
from __future__ import annotations
import asyncio, json, time
from typing import Dict, Any
# in-mem state
_progress: Dict[str, Dict[str, Any]] = {}
_watchers: Dict[str, list[asyncio.Queue[str]]] = {}
_TTL = 60 * 60 # keep finished/error records 1 h
def _now() -> float: return time.time()
def register(sid: str) -> None:
_progress[sid] = dict(pct=0, progress="", status="running", ts=_now())
_broadcast(sid)
def update(sid: str, *, pct: float | None = None,
progress: str | None = None, status: str | None = None) -> None:
if sid not in _progress:
register(sid)
p = _progress[sid]
if pct is not None: p["pct"] = pct
if progress is not None: p["progress"] = progress
if status is not None: p["status"] = status
p["ts"] = _now()
_broadcast(sid)
def get(sid: str) -> Dict[str, Any]:
_gc()
return _progress.get(sid, {"status": "idle"})
def clear(sid: str) -> None:
_progress.pop(sid, None)
_watchers.pop(sid, None)
# SSE integration
def subscribe(sid: str) -> asyncio.Queue[str]:
q: asyncio.Queue[str] = asyncio.Queue(maxsize=16)
_watchers.setdefault(sid, []).append(q)
# immediately push current state
q.put_nowait(json.dumps({"sid": sid, **get(sid)}))
return q
def _broadcast(sid: str) -> None:
if sid not in _watchers:
return
payload = json.dumps({"sid": sid, **_progress[sid]})
for q in list(_watchers[sid]):
try:
q.put_nowait(payload)
except asyncio.QueueFull:
pass # drop frame
# garbage collector
def _gc() -> None:
now = _now()
stale = [k for k, v in _progress.items()
if v["status"] in ("finished", "error") and now - v["ts"] > _TTL]
for k in stale:
clear(k)

48
backend/core/settings.py Normal file
View File

@@ -0,0 +1,48 @@
import os
from pathlib import Path
from functools import lru_cache
from dotenv import load_dotenv
load_dotenv()
# ─── Paths ───────────────────────────────────────────────
ROOT_DIR = Path(__file__).resolve().parent.parent
DATA_DIR = ROOT_DIR / "data"
TMP_DIR = Path("/tmp")
DOWNLOAD_DIR = DATA_DIR / "downloads"
USERS_DIR = DATA_DIR / "users"
ENV = os.getenv("APP_ENV", "development")
PROXY_LIST_FILE = Path(os.getenv("PROXY_LIST_FILE", ".50.txt"))
# ─── Additional ───────────────────────────────────────────────
PROXY_USERNAME = os.getenv("PROXY_USERNAME")
PROXY_PASSWORD = os.getenv("PROXY_PASSWORD")
# ─── DB and SQLAlchemy ───────────────────────────────────────────
SQLALCHEMY_DATABASE_URI= os.getenv("DATABASE_URL") or f"sqlite:///{DATA_DIR / 'local.db'}"
DB_POOL_SIZE = int(os.getenv("DB_POOL_SIZE", 20))
DB_ECHO = bool(os.getenv("DB_ECHO", False))
# ─── Concurrency ───────────────────────────────────────────────
CPU_COUNT = os.cpu_count() or 2
THREADS_MAX = min(32, CPU_COUNT * 4)
PROCS_MAX = min(CPU_COUNT, 4)
PER_IP_CONCURRENCY = int(os.getenv("PER_IP_CONCURRENCY", 2))
# ─── Cache and Tuning knobs ──────────────────────────────────────
FORMAT_CACHE_TTL_SEC = int(os.getenv("FORMAT_CACHE_TTL_SEC", 8_000))
DOWNLOAD_CACHE_TTL_SEC = int(os.getenv("DOWNLOAD_CACHE_TTL_SEC", 86_400)) # 24h
PARALLEL_CHUNK_MB = int(os.getenv("PARALLEL_CHUNK_MB", 2))
MAX_CONCURRENT_FRAG = int(os.getenv("MAX_CONCURRENT_FRAG", 4))
ARIA2C_THRESHOLD_MB = int(os.getenv("ARIA2C_THRESHOLD_MB", 512))
MIN_SCORE = float(os.getenv("PROXY_MIN_SCORE", "0.05"))
MAX_IN_USE = int(os.getenv("PROXY_CONCURRENCY_LIMIT", "4"))
FAIL_COOLDOWN_SEC = int(os.getenv("PROXY_FAIL_COOLDOWN", "600"))
_MAX_LOGIN_FAILS = int(os.getenv("MAX_LOGIN_FAILS", "12"))
_MAX_INVALID_URLS = int(os.getenv("MAX_INVALID_URLS", "20"))
_WINDOW_MINUTES = int(os.getenv("WINDOW_MINUTES", "60"))
# ─── Logging ───────────────────────────────────────────────────
LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO")
# ─── db ───────────────────────────────────────────────────
SKIP_SCHEMA_BOOTSTRAP = int(os.getenv("SKIP_SCHEMA_BOOTSTRAP","0"))
@lru_cache
def ensure_dirs() -> None:
for p in (DATA_DIR, USERS_DIR, DOWNLOAD_DIR):
p.mkdir(parents=True, exist_ok=True)
ensure_dirs()

0
backend/web/__init__.py Normal file
View File

303
backend/web/app.py Normal file
View File

@@ -0,0 +1,303 @@
"""app.py Jul 15 2025"""
from __future__ import annotations
import asyncio
import mimetypes
import os
import secrets
import shutil
import signal
import sysconfig
import threading
from pathlib import Path
from typing import Dict
import aiofiles
import structlog
from quart import (
Quart,
Response,
jsonify,
redirect,
render_template,
request,
session,
url_for, render_template_string,
)
from backend.core.logging import init_logging
from core.settings import TMP_DIR, DOWNLOAD_DIR
from core.formats import choose_format, _lookup_cache_sync, _cached_metadata_fetch
from core.download import download, EST_MB
from core.formats import choose_format as choose_format_logic
from core.db_xp import is_ip_banned, ensure_user, get_status
from core.web.db_extra import invalid_over_limit, init_proxy_seed, start_background_tasks
from core.db import metadata, engine
from core import progress_bus
init_logging()
log = structlog.get_logger()
def _frontend_root() -> Path:
here = Path(__file__).resolve().parent
dev = here.parent.parent / "frontend"
return dev if dev.exists() else Path(sysconfig.get_path("data")) / "share" / "s1ne" / "frontend"
FRONTEND_ROOT = _frontend_root()
app = Quart(
__name__,
template_folder=str(FRONTEND_ROOT / "templates"),
static_folder=str(FRONTEND_ROOT / "static"),
)
app.secret_key = os.getenv("SECRET_KEY_WORD")
_tasks: Dict[str, asyncio.Task] = {}
async def _cleanup_temp(interval: int = 900) -> None:
while True:
cutoff = asyncio.get_event_loop().time() - 60 * 60 * 12
for p in (TMP_DIR / "ytlocks").glob("*.lock"):
if p.stat().st_mtime < cutoff:
p.unlink(missing_ok=True)
for pattern in ("yt_*", "tmp*"):
for p in TMP_DIR.glob(pattern):
if p.is_dir() and p.stat().st_mtime < cutoff:
shutil.rmtree(p, ignore_errors=True)
await asyncio.sleep(interval)
async def _file_iter(path: Path, chunk: int = 1 << 15):
async with aiofiles.open(path, "rb") as f:
while (blk := await f.read(chunk)):
yield blk
async def _shutdown_waiter():
await asyncio.sleep(0.1)
log.info("shutdown.tasks_cancelled")
def _graceful_exit() -> None:
log.info("shutdown.initiated")
for t in list(_tasks.values()):
if not t.done():
t.cancel()
asyncio.create_task(_shutdown_waiter())
def force_exit():
import time
time.sleep(5)
os._exit(1)
threading.Thread(target=force_exit, daemon=True).start()
@app.before_serving
async def _launch_tasks() -> None:
metadata.create_all(engine)
await init_proxy_seed()
start_background_tasks(asyncio.get_running_loop())
asyncio.create_task(_cleanup_temp())
loop = asyncio.get_running_loop()
for sig in (signal.SIGTERM, signal.SIGINT):
loop.add_signal_handler(sig, _graceful_exit)
@app.route("/")
async def home():
if not request.cookies.get("auth"):
return await render_template("login.html")
ip = request.remote_addr or "0.0.0.0"
ensure_user(ip)
soft_banned = get_status(ip)["soft_banned"]
return await render_template("index.html", soft_banned=soft_banned)
@app.route("/login", methods=["GET", "POST"])
async def login():
if request.method == "GET":
return await render_template("login.html")
form = await request.form
if form.get("password") == os.getenv("MASTER_PASSWORD"):
resp = redirect(url_for("home"))
resp.set_cookie("auth", "1", httponly=True, secure=False)
return resp
return await render_template("login.html", error_badge="Incorrect password")
@app.route("/logout")
async def logout():
session.clear()
resp = redirect(url_for("login"))
resp.delete_cookie("auth")
return resp
@app.route("/choose_format", methods=["POST"])
async def handle_choose_format() -> Response:
url: str
try:
if request.content_type and "application/json" in request.content_type:
data = await request.get_json(silent=True) or {}
url = (data.get("url") or "").strip()
else:
form = await request.form
url = (form.get("url") or "").strip()
if not url:
#log.warning("choose_format.missing_url")
return jsonify({"error": "url field required"}), 422
run_id: str = session.get("run_id") or secrets.token_urlsafe(10)
session["run_id"] = run_id
#log.info("choose_format.run_id_set", run_id=run_id, url=url)
res: dict = await choose_format_logic(url)
res["sid"] = run_id
if "error" in res:
#log.warning("choose_format.logic_error", error=res["error"], url=url)
return jsonify(res), 400
log.info("choose_format.success", url=url, title=res.get("title"), platform=res.get("platform"))
return jsonify(res)
except Exception as e:
log.exception("choose_format.exception", err=str(e))
return jsonify({"error": "Internal error during format selection"}), 500
@app.route("/download_file")
async def dl():
ip = request.remote_addr or "0.0.0.0"
if ip == "127.0.0.1":
log.info("dev_mode.skip_ban_check", ip=ip)
else:
if is_ip_banned(ip):
log.warning("download.reject.banned", ip=ip)
return jsonify({"error": "Banned"}), 403
url = request.args.get("url", "").strip()
fmt = request.args.get("format_id", "").strip()
sid = request.args.get("sid", "").strip()
run_id = session.get("run_id")
if run_id is None:
pass
if is_ip_banned(ip):
log.warning("download.reject.banned", ip=ip)
return jsonify({"error": "Banned"}), 403
if not url or not fmt:
log.warning("download.reject.missing_params", url=url, fmt=fmt)
return jsonify({"error": "Missing URL or format"}), 400
if sid in _tasks and not _tasks[sid].done():
log.warning("download.reject.already_running", sid=sid)
return jsonify({"error": "download already running"}), 409
if sid != run_id:
log.warning("download.reject.sid_mismatch", sid=sid, session_run_id=run_id)
return jsonify({
"error": "Session mismatch please refresh the page and select a format again."
}), 403
progress_bus.register(sid)
async def _run_download() -> Path:
try:
meta = await asyncio.to_thread(_lookup_cache_sync, url)
if meta:
chosen = next((f for f in meta["formats"] if f["format_id"] == fmt), None)
est = (
chosen.get("filesize")
or chosen.get("filesize_approx")
or 0
) if chosen else 0
EST_MB.set(int(est / 1_048_576))
log.info("download.starting", sid=sid, url=url, fmt=fmt)
path_str = await download(url, fmt, ip, sid)
return Path(path_str)
finally:
_tasks.pop(sid, None)
task = asyncio.create_task(_run_download())
_tasks[sid] = task
try:
tmp_path = await task
mime = mimetypes.guess_type(tmp_path.name)[0] or "application/octet-stream"
log.info("download.success", file=str(tmp_path), sid=sid)
resp = Response(
_file_iter(tmp_path),
headers={
"Content-Type": mime,
"Content-Disposition": f'attachment; filename="{tmp_path.name}"',
},
)
if hasattr(resp, "call_after_response"):
def _after():
progress_bus.update(sid, status="finished", pct=100, progress="Done")
progress_bus.clear(sid)
if str(tmp_path.parent).startswith(str(TMP_DIR)):
shutil.rmtree(tmp_path.parent, ignore_errors=True)
resp.call_after_response(_after)
return resp
except asyncio.CancelledError:
log.warning("download.cancelled", sid=sid)
progress_bus.update(sid, status="cancelled", progress="Cancelled")
return jsonify({"error": "Download cancelled"}), 499
except Exception as e:
log.exception("download.failed", sid=sid, err=str(e))
progress_bus.update(sid, status="error", progress="Error")
return jsonify({"error": "Download failed"}), 500
@app.route("/cancel_download", methods=["POST"])
async def cancel_dl():
sid = request.args.get("sid", "").strip()
if sid:
task = _tasks.get(sid)
if task and not task.done():
task.cancel()
progress_bus.update(sid, status="cancelled", progress="Cancelled")
return jsonify({"status": "cancelled"})
@app.route("/api/progress/<sid>")
async def progress_stream(sid: str):
q = progress_bus.subscribe(sid)
async def gen():
while True:
msg = await q.get()
yield f"data: {msg}\n\n"
return Response(
gen(),
content_type="text/event-stream",
headers={"Cache-Control": "no-store"},
)
@app.before_serving
async def _on_startup():
pass

320
backend/web/db_extra.py Normal file
View File

@@ -0,0 +1,320 @@
"""
backend/web/db_extra.py - 16 May 2025
"""
from __future__ import annotations
import asyncio
import datetime as dt
from datetime import timezone
import structlog
from typing import List
from sqlalchemy import (
Table, Column, Text, Float, Integer, Boolean, DateTime, func,
select, insert, update, delete, or_, inspect, text
)
from sqlalchemy.dialects.postgresql import insert as pg_insert
from backend.core.db import SessionLocal, metadata, engine
from backend.core.settings import (
MAX_IN_USE, FAIL_COOLDOWN_SEC, MIN_SCORE, PROXY_LIST_FILE,
_WINDOW_MINUTES, PROXY_USERNAME, PROXY_PASSWORD,
_MAX_LOGIN_FAILS, _MAX_INVALID_URLS
)
log = structlog.get_logger()
_IS_PG = engine.url.get_backend_name().startswith("postgres")
def _insert_ignore(tbl: Table, **vals):
if _IS_PG:
return pg_insert(tbl).values(**vals).on_conflict_do_nothing()
return insert(tbl).prefix_with("OR IGNORE").values(**vals)
def _clamp_zero(expr):
"""SQLportable max(expr, 0)."""
return func.greatest(expr, 0) if _IS_PG else func.max(expr, 0)
proxy_tbl = Table(
"proxies", metadata,
Column("url", Text, primary_key=True),
Column("score", Float, nullable=False, server_default="1.0"),
Column("fails", Integer, nullable=False, server_default="0"),
Column("banned", Boolean, nullable=False, server_default="false"),
Column("in_use", Integer, nullable=False, server_default="0"),
Column("last_fail", DateTime),
Column("updated_at", DateTime, server_default=func.now(), index=True),
)
login_tbl = Table(
"login_attempts", metadata,
Column("ip", Text, primary_key=True),
Column("count", Integer, nullable=False, server_default="0"),
Column("updated_at", DateTime, nullable=False, server_default=func.now()),
)
invalid_tbl = Table(
"invalid_urls", metadata,
Column("ip", Text, primary_key=True),
Column("count", Integer, nullable=False, server_default="0"),
Column("updated_at", DateTime, nullable=False, server_default=func.now()),
)
dl_stats = Table(
"dl_stats", metadata,
Column("id", Integer, primary_key=True, autoincrement=True),
Column("ok", Boolean, nullable=False),
Column("ts", DateTime, nullable=False, server_default=func.now(), index=True),
)
def _ensure_proxy_columns() -> None:
insp = inspect(engine)
if "proxies" not in insp.get_table_names():
return
existing = {c["name"] for c in insp.get_columns("proxies")}
add: list[tuple[str, str]] = []
if "in_use" not in existing: add.append(("in_use", "INTEGER DEFAULT 0"))
if "last_fail" not in existing: add.append(("last_fail", "TIMESTAMP"))
if not add:
return
with engine.begin() as conn:
for col, ddl in add:
if _IS_PG:
conn.execute(text(f"ALTER TABLE proxies ADD COLUMN IF NOT EXISTS {col} {ddl};"))
else:
conn.execute(text(f"ALTER TABLE proxies ADD COLUMN {col} {ddl};"))
log.info("proxy.schema.auto_migrated", added=[c for c, _ in add])
#metadata.create_all(engine)
_ensure_proxy_columns()
def _seed() -> None:
if not PROXY_LIST_FILE.exists():
return
with SessionLocal.begin() as s:
for ln in PROXY_LIST_FILE.read_text().splitlines():
ln = ln.strip()
if not ln:
continue
ip, port = ln.split(":", 1)
px = (
f"http://{PROXY_USERNAME}:{PROXY_PASSWORD}@{ip}:{port}"
if PROXY_USERNAME else f"http://{ip}:{port}"
)
s.execute(_insert_ignore(proxy_tbl, url=px))
def _candidate_stmt(now: dt.datetime):
cool_ts = now - dt.timedelta(seconds=FAIL_COOLDOWN_SEC)
jitter = func.random() * 0.01
return (
select(proxy_tbl.c.url)
.where(
proxy_tbl.c.banned.is_(False),
proxy_tbl.c.score > MIN_SCORE,
proxy_tbl.c.in_use < MAX_IN_USE,
or_(proxy_tbl.c.last_fail.is_(None), proxy_tbl.c.last_fail < cool_ts),
)
.order_by((proxy_tbl.c.score + jitter).desc())
.limit(1)
.with_for_update(nowait=False)
)
def acquire_proxy() -> str | None:
now = dt.datetime.now(timezone.utc)
with SessionLocal.begin() as s:
row = s.execute(_candidate_stmt(now)).first()
if not row:
return None
px = row[0]
s.execute(
update(proxy_tbl)
.where(proxy_tbl.c.url == px)
.values(in_use=proxy_tbl.c.in_use + 1, updated_at=now)
)
return px
def release_proxy(px: str, ok: bool) -> None:
if not px or px == "DIRECT":
return
now = dt.datetime.now(timezone.utc)
with SessionLocal.begin() as s:
new_in_use = proxy_tbl.c.in_use - 1
s.execute(
update(proxy_tbl)
.where(proxy_tbl.c.url == px)
.values(
in_use=_clamp_zero(new_in_use),
updated_at=now,
last_fail=None if ok else now,
)
)
_buffer: asyncio.Queue[tuple[str, bool]] = asyncio.Queue(maxsize=2048)
def queue_proxy_result(px: str, ok: bool) -> None:
try:
_buffer.put_nowait((px, ok))
except asyncio.QueueFull:
try:
_buffer.get_nowait()
_buffer.put_nowait((px, ok))
except Exception:
pass
async def _flusher() -> None:
while True:
await asyncio.sleep(0.4)
if _buffer.empty():
continue
batch: dict[str, tuple[int, int]] = {}
while not _buffer.empty():
px, ok = _buffer.get_nowait()
succ, fail = batch.get(px, (0, 0))
if ok:
succ += 1
else:
fail += 1
batch[px] = (succ, fail)
now = dt.datetime.now(timezone.utc)
with SessionLocal.begin() as s:
for px, (succ, fail) in batch.items():
delta = 0.1 * succ - 0.2 * fail
stmt = (
update(proxy_tbl)
.where(proxy_tbl.c.url == px)
.values(
score=_clamp_zero(proxy_tbl.c.score + delta),
fails=_clamp_zero(proxy_tbl.c.fails + fail - succ),
banned=(proxy_tbl.c.fails + fail) > 5,
updated_at=now,
)
)
s.execute(stmt)
def start_background_tasks(loop: asyncio.AbstractEventLoop) -> None:
loop.create_task(_flusher())
loop.create_task(asyncio.to_thread(_seed))
_WINDOW_N = 50
def add_dl_stat(ok: bool) -> None:
now = dt.datetime.now(timezone.utc)
with SessionLocal.begin() as s:
s.execute(insert(dl_stats).values(ok=ok, ts=now))
# -------- FIX ③ --------
oldest_keep = select(dl_stats.c.id).order_by(
dl_stats.c.id.desc()
).limit(500)
s.execute(
delete(dl_stats).where(~dl_stats.c.id.in_(oldest_keep))
)
def recent_success_rate(n: int = _WINDOW_N) -> float:
with SessionLocal() as s:
vals = (
s.execute(select(dl_stats.c.ok).order_by(dl_stats.c.id.desc()).limit(n))
.scalars()
.all()
)
return 0.5 if not vals else sum(vals) / len(vals)
def _inc(table: Table, ip: str) -> None:
now = dt.datetime.now(timezone.utc)
with SessionLocal.begin() as s:
row = s.execute(select(table).where(table.c.ip == ip)).first()
if not row:
s.execute(insert(table).values(ip=ip, count=1, updated_at=now))
else:
s.execute(
update(table)
.where(table.c.ip == ip)
.values(count=row.count + 1, updated_at=now)
)
def record_login(ip: str, success: bool) -> None:
if success:
with SessionLocal.begin() as s:
s.execute(update(login_tbl).where(login_tbl.c.ip == ip).values(count=0))
else:
_inc(login_tbl, ip)
def inc_invalid(ip: str) -> None:
_inc(invalid_tbl, ip)
def _over_limit(table: Table, ip: str, cap: int) -> bool:
with SessionLocal() as s:
row = s.execute(
select(table.c.count, table.c.updated_at).where(table.c.ip == ip)
).first()
if not row:
return False
count, ts = row
now = dt.datetime.now(timezone.utc)
if ts.tzinfo is None:
ts = ts.replace(tzinfo=timezone.utc)
if (now - ts).total_seconds() > _WINDOW_MINUTES * 60:
with SessionLocal.begin() as sx:
sx.execute(update(table).where(table.c.ip == ip).values(count=0))
return False
return count >= cap
def too_many_attempts(ip: str) -> bool:
return _over_limit(login_tbl, ip, _MAX_LOGIN_FAILS)
def invalid_over_limit(ip: str) -> bool:
return _over_limit(invalid_tbl, ip, _MAX_INVALID_URLS)
def pick_proxy() -> str | None:
return acquire_proxy()
def ensure_proxy(px: str) -> None:
with SessionLocal.begin() as s:
s.execute(_insert_ignore(proxy_tbl, url=px))
def update_proxy(px: str, ok: bool) -> None:
queue_proxy_result(px, ok)
async def init_proxy_seed() -> None:
await asyncio.to_thread(_seed)

26
docker-entrypoint.sh Normal file
View File

@@ -0,0 +1,26 @@
#!/bin/bash
set -e
echo "📦 Running Alembic migrations..."
export SKIP_SCHEMA_BOOTSTRAP=1
MISSING=$(python3 <<EOF
from sqlalchemy import create_engine, inspect
from backend.core.settings import SQLALCHEMY_DATABASE_URI
engine = create_engine(SQLALCHEMY_DATABASE_URI)
inspector = inspect(engine)
tables = inspector.get_table_names()
print("1" if "users" not in tables else "0")
EOF
)
if [ "$MISSING" -eq "1" ]; then
echo "'users' table missing. Running Alembic migrations..."
alembic -c alembic.ini upgrade head
else
echo "DB already initialized. Skipping migrations."
fi
exec "$@"

41
dockerfile Normal file
View File

@@ -0,0 +1,41 @@
FROM python:3.11-slim
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
ENV PYTHONPATH=/app
RUN apt-get update && \
apt-get install -y --no-install-recommends \
ffmpeg \
curl \
build-essential \
libffi-dev \
libssl-dev \
libxml2-dev \
libxslt1-dev \
libjpeg-dev \
zlib1g-dev \
git && \
apt-get clean && \
rm -rf /var/lib/apt/lists/*
WORKDIR /app
COPY requirements.txt .
RUN pip install --upgrade pip && \
pip install --no-cache-dir -r requirements.txt
COPY alembic.ini ./alembic.ini
COPY backend/alembic ./backend/alembic
COPY docker-entrypoint.sh ./docker-entrypoint.sh
COPY . .
ENV QUART_ENV=production
EXPOSE 5000
ENTRYPOINT ["./docker-entrypoint.sh"]
# CMD ["hypercorn", "app:app", "--bind", "0.0.0.0:5000", "--reload"]
CMD ["hypercorn", "backend.web.app:app", "--bind", "0.0.0.0:5000", "--worker-class", "asyncio"]

BIN
frontend/static/bloom.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

View File

@@ -0,0 +1,652 @@
/* casper color.css */
/* discord */
:root {
--discord-primary: #5865F2;
--discord-light: #E0E3FF;
--discord-dark-1: #2C2F33;
--discord-dark-2: #23272A;
--discord-text: #FFFFFF;
}
/* bandcamp */
:root {
--bandcamp-primary: #629AA9;
--bandcamp-light: #A0C3CC;
--bandcamp-dark-1: #1C1C1C;
--bandcamp-dark-2: #0F0F0F;
--bandcamp-text: #FFFFFF;
}
/* youtube */
:root {
--youtube-primary: #FF0000;
--youtube-light: #FF6666;
--youtube-dark-1: #282828;
--youtube-dark-2: #1F1F1F;
--youtube-text: #FFFFFF;
}
/* soundcloud */
:root {
--soundcloud-primary: #FF7700;
--soundcloud-light: #FFA64D;
--soundcloud-dark-1: #282828;
--soundcloud-dark-2: #1F1F1F;
--soundcloud-text: #FFFFFF;
}
/* twitter/x */
:root {
--twitter-primary: #1DA1F2;
--twitter-light: #A4D9F9;
--twitter-dark-1: #14171A;
--twitter-dark-2: #0D0F12;
--twitter-text: #FFFFFF;
}
:root {
--gray-1: #fcfcfd;
--gray-2: #f8f9fa;
--gray-3: #f1f3f5;
--gray-4: #e9ecef;
--gray-5: #dee2e6;
--gray-6: #ced4da;
--gray-7: #adb5bd;
--gray-8: #868e96;
--gray-9: #495057;
--gray-10: #343a40;
--gray-11: #212529;
--gray-12: #121619;
}
:root {
--mauve-1: #fcfcfd;
--mauve-2: #f8f8fa;
--mauve-3: #f3f3f6;
--mauve-4: #edeff1;
--mauve-5: #e8e8ea;
--mauve-6: #e2e2e5;
--mauve-7: #dcdde0;
--mauve-8: #d4d4d8;
--mauve-9: #babac0;
--mauve-10: #a1a1a8;
--mauve-11: #85858e;
--mauve-12: #3c3c44;
}
:root {
--slate-1: #f8f9fa;
--slate-2: #f1f3f5;
--slate-3: #e9ecef;
--slate-4: #e2e6ea;
--slate-5: #d9dde1;
--slate-6: #ced2d7;
--slate-7: #bec4ca;
--slate-8: #adb5bd;
--slate-9: #8b9aa7;
--slate-10: #728198;
--slate-11: #5c697f;
--slate-12: #3d5060;
}
:root {
--sage-1: #f8f9f4;
--sage-2: #f1f3e9;
--sage-3: #e9ece0;
--sage-4: #e2e6d6;
--sage-5: #dae0cc;
--sage-6: #ced4c1;
--sage-7: #bfc6b3;
--sage-8: #acb3a1;
--sage-9: #909982;
--sage-10: #767e69;
--sage-11: #5d6551;
--sage-12: #3c3f37;
}
:root {
--olive-1: #fbfefd;
--olive-2: #f7fbfa;
--olive-3: #f2f7f3;
--olive-4: #edf3ed;
--olive-5: #e8efe7;
--olive-6: #e1e9e0;
--olive-7: #dbe3da;
--olive-8: #d4ddd3;
--olive-9: #b8c0b9;
--olive-10: #9aa39c;
--olive-11: #7c867f;
--olive-12: #343d35;
}
:root {
--sand-1: #fcfaf7;
--sand-2: #faf6f1;
--sand-3: #f6f1ec;
--sand-4: #f2ece6;
--sand-5: #efebe1;
--sand-6: #ebe8dd;
--sand-7: #e7e4d8;
--sand-8: #e3dfd4;
--sand-9: #d0c9bf;
--sand-10: #b9b2a9;
--sand-11: #a29b93;
--sand-12: #79736d;
}
:root {
--gold-1: #fdfdfc;
--gold-2: #fbf9f2;
--gold-3: #f5f2e9;
--gold-4: #eeeadd;
--gold-5: #e5dfd0;
--gold-6: #dad1bd;
--gold-7: #cbbda4;
--gold-8: #b8a383;
--gold-9: #978365;
--gold-10: #8c795d;
--gold-11: #776750;
--gold-12: #3b352b;
}
:root {
--bronze-1: #fdfcfc;
--bronze-2: #fdf8f6;
--bronze-3: #f8f1ee;
--bronze-4: #f2e8e4;
--bronze-5: #eaddd7;
--bronze-6: #e0cec7;
--bronze-7: #d1b9b0;
--bronze-8: #bfa094;
--bronze-9: #a18072;
--bronze-10: #957468;
--bronze-11: #846358;
--bronze-12: #43302b;
}
:root {
--brown-1: #fefdfc;
--brown-2: #fcf9f6;
--brown-3: #f8f1eb;
--brown-4: #f2e8de;
--brown-5: #eaddd0;
--brown-6: #e0cebf;
--brown-7: #d1b9a4;
--brown-8: #bfa083;
--brown-9: #a18065;
--brown-10: #977459;
--brown-11: #84634a;
--brown-12: #433026;
}
:root {
--yellow-1: #fdfdfc;
--yellow-2: #fefce9;
--yellow-3: #fffab8;
--yellow-4: #fff394;
--yellow-5: #ffe770;
--yellow-6: #f3d768;
--yellow-7: #e4c767;
--yellow-8: #d5b862;
--yellow-9: #ffe629;
--yellow-10: #ffdc00;
--yellow-11: #946800;
--yellow-12: #35290f;
}
:root {
--amber-1: #fefdfb;
--amber-2: #fff9ed;
--amber-3: #fff4d5;
--amber-4: #ffecbc;
--amber-5: #ffe3a2;
--amber-6: #ffd386;
--amber-7: #f3ba63;
--amber-8: #ee9d2b;
--amber-9: #ffb224;
--amber-10: #ffa01c;
--amber-11: #ad5700;
--amber-12: #4e2009;
}
:root {
--orange-1: #fefcfb;
--orange-2: #fff7ed;
--orange-3: #ffefd6;
--orange-4: #ffe4b5;
--orange-5: #ffd599;
--orange-6: #ffc182;
--orange-7: #f5a65b;
--orange-8: #ec8a31;
--orange-9: #f76808;
--orange-10: #ed5f00;
--orange-11: #bd4b00;
--orange-12: #451e11;
}
:root {
--tomato-1: #fffcfc;
--tomato-2: #fff8f7;
--tomato-3: #ffefed;
--tomato-4: #ffe6e2;
--tomato-5: #fdd8d3;
--tomato-6: #fac7be;
--tomato-7: #f3b0a2;
--tomato-8: #ea9280;
--tomato-9: #e54d2e;
--tomato-10: #db4324;
--tomato-11: #ca3214;
--tomato-12: #341711;
}
:root {
--red-1: #fffcfc;
--red-2: #fff7f7;
--red-3: #ffefef;
--red-4: #ffe5e5;
--red-5: #fdd8d8;
--red-6: #f9c6c6;
--red-7: #f3aeaf;
--red-8: #eb9091;
--red-9: #e5484d;
--red-10: #dc3d43;
--red-11: #cd2b31;
--red-12: #381316;
}
:root {
--ruby-1: #fffcfd;
--ruby-2: #fff7f9;
--ruby-3: #feeff3;
--ruby-4: #ffe5eb;
--ruby-5: #fdd8e2;
--ruby-6: #f9c6d6;
--ruby-7: #f3aec4;
--ruby-8: #eb91af;
--ruby-9: #e54666;
--ruby-10: #db3b5b;
--ruby-11: #ca244d;
--ruby-12: #3a0c1c;
}
:root {
--crimson-1: #fffcfd;
--crimson-2: #fff7fb;
--crimson-3: #feeff6;
--crimson-4: #fce5f0;
--crimson-5: #f9d8e7;
--crimson-6: #f4c6db;
--crimson-7: #edadc8;
--crimson-8: #e58fb1;
--crimson-9: #e93d82;
--crimson-10: #df3476;
--crimson-11: #cb1d63;
--crimson-12: #35111f;
}
:root {
--pink-1: #fefcfd;
--pink-2: #fff7fb;
--pink-3: #feeff6;
--pink-4: #fce5f0;
--pink-5: #f9d8e7;
--pink-6: #f3c6db;
--pink-7: #ecadc8;
--pink-8: #e38fb1;
--pink-9: #d6409f;
--pink-10: #d23197;
--pink-11: #cd1d8d;
--pink-12: #3b0a2a;
}
:root {
--plum-1: #fefcff;
--plum-2: #fdf7fd;
--plum-3: #f8eff9;
--plum-4: #f3e7f4;
--plum-5: #eddcee;
--plum-6: #e3cce5;
--plum-7: #d3b3d8;
--plum-8: #be8cca;
--plum-9: #ab4aba;
--plum-10: #a43cb3;
--plum-11: #9c2bad;
--plum-12: #340c3b;
}
:root {
--purple-1: #fefcfe;
--purple-2: #fbf7fc;
--purple-3: #f7eff9;
--purple-4: #f2e7f4;
--purple-5: #eadcf0;
--purple-6: #e0cdec;
--purple-7: #d1b5e7;
--purple-8: #bc8dec;
--purple-9: #8e4ec6;
--purple-10: #8445b9;
--purple-11: #793aab;
}
:root {
--violet-1: #fdfcfe;
--violet-2: #fbfaff;
--violet-3: #f5f2ff;
--violet-4: #ede9fe;
--violet-5: #e4defc;
--violet-6: #d7cff9;
--violet-7: #c4b8f3;
--violet-8: #aa99ec;
--violet-9: #6e56cf;
--violet-10: #644fc1;
--violet-11: #5746af;
--violet-12: #20134b;
}
:root {
--iris-1: #fdfdff;
--iris-2: #f8f8ff;
--iris-3: #f0f1fe;
--iris-4: #e6e7ff;
--iris-5: #dadcff;
--iris-6: #cbceff;
--iris-7: #b8bcff;
--iris-8: #9b9eff;
--iris-9: #5b5bd6;
--iris-10: #5151c4;
--iris-11: #3e3eb0;
--iris-12: #1b1b66;
}
:root {
--indigo-1: #fdfdfe;
--indigo-2: #f8faff;
--indigo-3: #f0f4ff;
--indigo-4: #e6edfe;
--indigo-5: #d9e2fc;
--indigo-6: #c6d4f9;
--indigo-7: #aec0f5;
--indigo-8: #8da4ef;
--indigo-9: #3e63dd;
--indigo-10: #3a5ccc;
--indigo-11: #3451b2;
--indigo-12: #101d46;
}
:root {
--blue-1: #fbfdff;
--blue-2: #f4faff;
--blue-3: #e6f4fe;
--blue-4: #d5efff;
--blue-5: #c2e5ff;
--blue-6: #acd8fc;
--blue-7: #8ec8f6;
--blue-8: #5eb1ef;
--blue-9: #0091ff;
--blue-10: #0588f0;
--blue-11: #0d74ce;
--blue-12: #113264;
}
:root {
--sky-1: #f9feff;
--sky-2: #f1fafd;
--sky-3: #e1f6fb;
--sky-4: #d1f0f7;
--sky-5: #bee7f2;
--sky-6: #a9dce9;
--sky-7: #8dcedc;
--sky-8: #60bcd0;
--sky-9: #7ce2fe;
--sky-10: #74daf8;
--sky-11: #00749e;
--sky-12: #1d3e46;
}
:root {
--cyan-1: #fafdfe;
--cyan-2: #f2fbfd;
--cyan-3: #e7f9fb;
--cyan-4: #d8f3f6;
--cyan-5: #c4eaef;
--cyan-6: #aadee6;
--cyan-7: #84cdda;
--cyan-8: #3db9cf;
--cyan-9: #00a2c7;
--cyan-10: #0797b9;
--cyan-11: #107d98;
--cyan-12: #0d3c48;
}
:root {
--teal-1: #fafefd;
--teal-2: #f3fbf9;
--teal-3: #e6f7f4;
--teal-4: #d8f1ec;
--teal-5: #c7e8e2;
--teal-6: #b3dcd5;
--teal-7: #94ccc5;
--teal-8: #5bb9b1;
--teal-9: #12a594;
--teal-10: #0d9b8a;
--teal-11: #008573;
--teal-12: #0d3d38;
}
:root {
--jade-1: #f9fefd;
--jade-2: #effdf9;
--jade-3: #e1f9f2;
--jade-4: #d3f3ea;
--jade-5: #c2ebe0;
--jade-6: #ade0d3;
--jade-7: #8ecfbf;
--jade-8: #4cbba5;
--jade-9: #00a383;
--jade-10: #00997b;
--jade-11: #00826c;
--jade-12: #0d3c37;
}
:root {
--mint-1: #f9fefd;
--mint-2: #f2fcfa;
--mint-3: #dff9f2;
--mint-4: #ccf3ea;
--mint-5: #b8eae0;
--mint-6: #a1ded2;
--mint-7: #83cdc0;
--mint-8: #4fb9ab;
--mint-9: #86ead4;
--mint-10: #7de0cb;
--mint-11: #027d6c;
--mint-12: #16433c;
}
:root {
--green-1: #fbfefc;
--green-2: #f4fbf6;
--green-3: #e6f6eb;
--green-4: #d6f1df;
--green-5: #c4e8d1;
--green-6: #adddc0;
--green-7: #8eceaa;
--green-8: #5bb98b;
--green-9: #30a46c;
--green-10: #2b9a66;
--green-11: #218358;
--green-12: #193b2d;
}
:root {
--grass-1: #fbfefb;
--grass-2: #f3fbf3;
--grass-3: #e6f6e6;
--grass-4: #d6f1d6;
--grass-5: #c4e8c4;
--grass-6: #addcad;
--grass-7: #8ecf8e;
--grass-8: #5bb95b;
--grass-9: #30a930;
--grass-10: #2b9e2b;
--grass-11: #218821;
--grass-12: #193b19;
}
:root {
--background-color: #0d0d0d;
--text-color: #ffffff;
--light-purple: #c7a4f5;
--light-purple-hover: #dbb5fc;
--error-color: #FF5C5C;
--success-color: #7FD1AE;
--border-color: #2a2a2a;
--card-bg: #151515;
--input-bg: #1c1c1c;
--placeholder-color: #555;
--button-bg: #2d2d2d;
--button-border: #444;
--button-bg-hover: #3f3f3f;
--button-border-hover: #666;
--button-focus: #444;
--button-loading-bg: #555;
--button-loading-border: #666;
--button-loading-text: #ccc;
--login-card-border: #777;
--checkbox-bg: #333;
--checkbox-accent: #999;
--fetching-glow: #8aff8a;
--fetching-mid: #797979;
--progress-track-bg: #444;
--progress-indicator-bg: #77c;
--format-bg: #222;
--close-btn-color: #444;
--close-btn-hover: #e33;
--spinner-border: #777;
--spinner-top: #bbb;
}
.error-badge {
background-color: var(--error-color) !important;
color: #fff !important;
}
.success-badge {
background-color: var(--success-color) !important;
color: #000 !important;
}
:root,
[data-radix-themes="dark"] {
--badge-fill-error: rgba(239, 68, 68, 0.1);
--badge-border-error: #ef4444;
--badge-text-error: #f87171;
--error-bg: #a33636;
}
[data-radix-themes="light"] {
--badge-fill-error: rgba(255, 0, 0, 0.05);
--badge-border-error: #ff6b6b;
--badge-text-error: #e11d48;
}
.radix-badge {
display: inline-flex;
align-items: center;
padding: 2px 8px;
font-size: 0.77rem;
font-weight: 500;
font-family: 'Cascadia Code', monospace;
border-radius: 5px;
border: 1px solid;
text-transform: none;
letter-spacing: 0.02em;
margin: 0.25rem;
white-space: nowrap;
user-select: none;
transition: all 0.2s ease-in-out;
background-color: #2a2a2a;
}
.badge-row {
display: flex;
flex-wrap: nowrap;
justify-content: center;
align-items: center;
gap: 0.77rem;
text-align: center;
margin: 0.5rem auto;
}
/*
#a47d3f Golden brown (bottom bars)
#113264 Deep blue (top right)
#4e2009 Dark brown (top center)
#3a0c1c Deep burgundy (top left)
*/
.radix-badge.videos-badge {
background: var(--ruby-12);
border-color: var(--ruby-6);
/*color: var(--ruby-4); */
color:white;
}
.radix-badge.usage-badge {
background: var(--blue-12);
border-color: var(--blue-5);
/*color: var(--mauve-1); */
color:white;
}
.radix-badge.time-badge {
background: var(--violet-11);
border-color: var(--amber-6);
/*color: var(--amber-3); */
color:white;
}
.radix-badge.xp-level-badge,
.radix-badge.xp-total-badge {
background: var(--teal-12);
border-color: var(--teal-6);
/*color: var(--teal-1); */
color:white;
}
/* Tiers */
.radix-badge.tier-badge.rank-1 {
background: var(--amber-12);
border-color: var(--amber-5);
color: var(--mauve-1);
}
.radix-badge.tier-badge.rank-2 {
background: var(--gray12);
border-color: var(--gray-5);
color: var(--gray-12);
}
.radix-badge.tier-badge.rank-3 {
background: var(--brown-12);
border-color: var(--orange-5);
color: var(--gray-1);
}
.radix-badge.tier-badge.stamped {
background: var(--cyan-12);
border-color: var(--blue-5);
color: var(--gray-1);
}
.radix-badge.tier-badge.valid {
background: var(--green-12);
border-color: var(--green-5);
color: var(--gray-1);
}
.radix-badge.tier-badge.online {
background: var(--gray-11);
border-color: var(--gray-5);
color: var(--gray-1);
}
.radix-badge.tier-badge.limited {
background: var(--amber-12);
border-color: var(--amber-6);
color: var(--gray-1);
}
.radix-badge.tier-badge.offline {
background: var(--red-12);
border-color: var(--red-6);
color: var(--gray-1);
}
#platformBadge {
display: none;
margin-top: 0.55rem;
opacity: 0;
}
.badge-youtube {
background-color: var(--red-12)!important;
border-color: var(--red-8)!important;
color: white;
}
.badge-soundcloud {
background-color: var(--orange-9);
border-color: var(--orange-5);
color: black;
}
.badge-twitterx {
background-color: var(--gray-9);
border-color: var(--gray-5);
color: white;
}
.badge-bandcamp {
background-color: var(--indigo-9);
border-color: var(--indigo-5);
color: white;
}

View File

@@ -0,0 +1,240 @@
/*STYLE.CSS 25 Apr 2025*/
.dark-theme {
min-height: 100vh;
margin: 0;
overflow-y: hidden;/* default desktop */
font-family: sans-serif;
background-color: #0a0a0a;
color: var(--text-color);
}
@media (max-width: 600px) {
.dark-theme { overflow-y: auto; }
}
a { text-decoration: none; color: var(--light-purple); }
a:hover { color: var(--light-purple-hover); }
.user-card,
.download-section {
background-color: var(--card-bg);
padding: 1.5rem;
margin: 1rem 0;
border-radius: 8px;
border: 1px solid var(--border-color);
}
form { display: flex; flex-direction: column; gap: 1rem; }
.form-row { display: flex; flex-direction: column; gap: .3rem; }
input[type="text"],
select {
padding: .5rem;
border-radius: 4px;
border: 1px solid var(--border-color);
background-color: var(--input-bg);
color: var(--text-color);
}
input[type="text"]::placeholder{
color: var(--placeholder-color);
}
input[type="password"]::placeholder {
color: #e1e1e1;
}
input[type="password"],
select {
padding: .5rem;
border-radius: 4px;
border: 1px solid #151515;
background-color: transparent;
color: #e1e1e1;
}
input[type="password"]:focus,
select:focus {
outline: none;
border: 1px solid #151515;
box-shadow: none;
}
.radix-button {
display: inline-flex;
align-items: center;
justify-content: center;
gap: .5rem;
padding: .8rem 1.2rem;
font: 500 1rem/1 Sans-Serif;
background-color: var(--button-bg);
border: 1px solid var(--button-border);
border-radius: 5px;
color: var(--text-color);
cursor: pointer;
transition: background-color .2s, border-color .2s, opacity .2s;
}
.radix-button:hover { background-color: var(--button-bg-hover); border-color: var(--button-border-hover); }
.radix-button:focus { outline: none; box-shadow: 0 0 0 2px var(--button-focus); }
.radix-button:disabled { opacity: .6; cursor: not-allowed; }
.radix-button.loading { background-color: var(--button-loading-bg); border-color: var(--button-loading-border); color: var(--button-loading-text); cursor: progress; }
.centered-wrapper { width: 50%; max-width: 870px; margin: 0 auto; position: absolute; }
@media (max-width: 768px) { .centered-wrapper { width: 90%; } }
.login-card {
margin-top: 5px;
border: 1px solid var(--login-card-border);
width: 300px;
text-align: center;
position: absolute; top: 50%; left: 50%;
transform: translate(-50%, -50%);
background-color: transparent;
}
.main-container .bloom-logo {
position: absolute; top: 6px; left: 50%; transform: translate(-50%, -50%);
}
.center-container { position: relative; display: flex; flex-direction: column; align-items: center; }
.center-container .bloom-logo { position: relative; margin-bottom: 1rem; }
.main-container { position: relative; padding-top: 2.5rem; }
@media (max-width: 600px) { .main-container .bloom-logo { transform: translate(-30%, -30%); } }
@keyframes floatPulse { 0%{transform:translateY(0)}50%{transform:translateY(-4px)}100%{transform:translateY(0)} }
.bloom-logo {
height: 40px; width: 40px; border-radius: 6px; font-size: 1rem;
display: flex; align-items: center; justify-content: center; z-index: 10;
animation: floatPulse 2.4s ease-in-out infinite;
}
.checkbox-container { display: flex; align-items: center; gap: .5rem; background-color: var(--checkbox-bg); display:none;}
.remember-me { background-color: var(--checkbox-bg); accent-color: var(--checkbox-accent); width:16px; height:16px; cursor:pointer; display:none;}
#downloadURL:focus { outline: none; }
@keyframes breathe { 0%{border-color:var(--fetching-glow)}50%{border-color:var(--fetching-mid)}100%{border-color:var(--fetching-glow)} }
.fetching { animation: breathe 2.5s ease-in-out infinite; }
.xp-level-info { margin-top:5px; text-align:center; font-family:Arial,sans-serif; }
.radix-progress { width:80%; margin:8px auto; }
.radix-progress-track { background-color:var(--progress-track-bg); border-radius:5px; height:20px; overflow:hidden; }
.radix-progress-indicator { height:100%; background-color:var(--progress-indicator-bg); transition:width .5s ease; }
.format-selection-container {
display:none;
margin-top:1rem;
padding:1rem;
border:1px solid var(--border-color);
border-radius:8px;
background:var(--format-bg);
opacity:0;
position:relative;
}
.format-options { display:flex; flex-direction:column; gap:.3rem; margin:.8rem 0; }
.format-option input[type=radio]{ margin-right:8px; accent-color:#333; }
#closeFormatCard {
position:absolute; top:8px; right:12px; z-index:2;
font-size:20px; font-weight:bold; background:transparent; border:none;
color:var(--close-btn-color); cursor:pointer; transition:color .2s ease;
}
#closeFormatCard:hover { color: var(--close-btn-hover); }
.loading { opacity:.7; cursor:wait; }
@keyframes breathingPulse{
0% { transform:scale(1); opacity:.7; }
50% { transform:scale(1.02);opacity:1; }
100%{ transform:scale(1); opacity:.7; }
}
.breathing { animation: breathingPulse 1.4s ease-in-out infinite; display:flex; align-items:center; gap:.5rem; }
.radix-spinner {
width:20px; height:20px;
border:2px solid var(--spinner-border);
border-top:2px solid var(--spinner-top);
border-radius:50%;
animation:spin 1s linear infinite; margin-right:8px;
}
@keyframes spin { to{transform:rotate(360deg);} }
.radix-button.danger {
background-color: var(--error-bg);
border-color: var(--error-bg);
}
.radix-button.danger:hover { filter: brightness(0.9); }
.radix-button.hidden { display: none; }
.radix-button.small { padding: .4rem .6rem; font-size: .875rem; }
@keyframes breathe-green{0%{border-color:var(--fetching-glow)}50%{border-color:var(--fetching-mid)}100%{border-color:var(--fetching-glow)}}
@keyframes breathe-red{0%{border-color:var(--error-glow)}50%{border-color:var(--error-mid)}100%{border-color:var(--error-glow)}}
.fetching{animation:breathe-green 2.5s ease-in-out infinite;}
.erroring{animation:breathe-red 1.5s ease-in-out 1;}
:root[data-radix-theme="dark"]{
--error-glow:#ff5c5c;
--error-mid:#b83e3e;
}
@keyframes breathe-green{0%{border-color:var(--green-9)}50%{border-color:var(--green-7)}100%{border-color:var(--green-9)}}
@keyframes breathe-red {0%{border-color:var(--red-9)}50%{border-color:var(--red-7)}100%{border-color:var(--red-9)}}
.fetching{animation:breathe-green 2.5s ease-in-out infinite;}
.erroring{animation:breathe-red 1.6s ease-in-out 1;}
.radix-button.primary{background-color:var(--gray-10);border-color:var(--gray-10);color:var(--gray-1);}
.radix-button.primary:hover{filter:brightness(1.05);}
.radix-button.danger{background-color:var(--red-11);border-color:var(--red-10);color:var(--gray-1);}
.radix-button.danger:hover{filter:brightness(.95);}
@keyframes breatheText{0%{opacity:.8}50%{opacity:1}100%{opacity:.8}}
.breathing-btn{animation:breatheText 1.6s ease-in-out infinite;}
@keyframes breathe-green{0%{border-color:var(--green-9)}50%{border-color:var(--green-7)}100%{border-color:var(--green-9)}}
@keyframes breathe-red {0%{border-color:var(--red-9)}50%{border-color:var(--red-7)}100%{border-color:var(--red-9)}}
.fetching{animation:breathe-green 2.5s ease-in-out infinite;}
.erroring{animation:breathe-red 1.6s ease-in-out 1;}
#downloadURL:not(.fetching):not(.erroring):hover{
border-color:var(--gray-7);transition:border-color .15s ease;
}
.radix-badge.size-badge{
background-color:var(--gray-8);
border:1px solid var(--gray-7);
color:var(--gray-1);
padding:.15rem .45rem;
font-size:.75rem;
border-radius:4px;
white-space:nowrap;
display:inline-flex;
align-items:center;
gap:.2rem;
}
.radix-badge.size-badge{
padding:.15rem .45rem;font-size:.75rem;border-radius:4px;
display:inline-flex;align-items:center;gap:.15rem;
transition:background-color .25s ease,border-color .25s ease,color .25s ease;
}
.size-small {background-color:var(--green-9);border:1px solid var(--green-7);color:var(--gray-1);}
.size-medium{background-color:var(--amber-9);border:1px solid var(--amber-7);color:var(--gray-1);}
.size-large {background-color:var(--red-10);border:1px solid var(--red-8); color:var(--gray-1);}
.skeleton {
background:transparent;border:1px dashed var(--gray-7);color:transparent;
animation:skeletonPulse 1s ease-in-out infinite;
}
@keyframes skeletonPulse {0%,100%{opacity:.4}50%{opacity:1}}

1
frontend/static/js/anime.min.js vendored Normal file

File diff suppressed because one or more lines are too long

259
frontend/static/js/app.js Normal file
View File

@@ -0,0 +1,259 @@
/* frontend/static/js/app.js synced with backend changes */
/* jul 15 2025 */
const $ = (id) => document.getElementById(id);
const qs = (s) => encodeURIComponent(s);
const anime = window?.anime;
/* DOM references */
const urlForm = $("urlForm");
const urlInput = $("downloadURL");
const fmtCard = $("formatCard");
const platBadge = $("platformBadge");
const fmtOpt = $("formatOptions");
const hidUrl = $("hiddenUrl");
const hidSid = $("hiddenSid");
const dlBtn = $("downloadBtn");
const cancelBtn = $("cancelBtn");
const fmtTitle = $("formatTitle");
const fmtForm = $("formatForm");
const closeBtn = $("closeFormatCard");
/* Mutable state */
let fetching = false;
let downloading = false;
let esStream = null;
const sizeCache = Object.create(null);
let baseSid = sessionStorage.getItem("sid");
if (!baseSid) {
baseSid = crypto.randomUUID();
sessionStorage.setItem("sid", baseSid);
}
let sidSeq = 0;
const newSid = () => `${baseSid}-${(sidSeq++).toString(36)}`;
const hide = (el) => { el.classList.add("hidden"); el.style.display = "none"; };
const show = (el, display="block") => { el.classList.remove("hidden"); el.style.display = display; };
const errBlink = (el) => {
el.classList.add("erroring");
el.addEventListener("animationend", () => el.classList.remove("erroring"), { once:true });
};
function resetBtn() {
dlBtn.disabled = true;
dlBtn.classList.remove("loading");
dlBtn.textContent = "Download";
}
/* Fadeout animation for format card */
function fadeOutCard() {
if (fmtCard.classList.contains("hidden")) return;
anime({
targets: fmtCard,
opacity: [1,0],
translateY: [0,-10],
easing: "easeInQuad",
duration: 300,
complete: () => {
hide(fmtCard);
fmtOpt.innerHTML = "";
}
});
platBadge.style.display = "none";
}
function resetUI() {
downloading = false;
fadeOutCard();
resetBtn();
urlInput.value = "";
urlInput.disabled = false;
urlInput.classList.remove("fetching");
urlInput.focus();
}
/* SSE helpers */
function closeStream() { esStream?.close(); esStream = null; }
function startSSE(sid, onFinish = () => {}) {
closeStream();
esStream = new EventSource(`/api/progress/${sid}`);
esStream.onmessage = ({ data }) => {
const j = JSON.parse(data);
if (["finished","error","cancelled","cached"].includes(j.status)) {
finish();
onFinish();
}
};
esStream.onerror = closeStream;
}
/* Finalise flow (download finished / errored / cancelled) */
function finish() {
closeStream();
resetUI();
}
/* Cancel current download or dismiss card */
function cancelFlow() {
if (downloading) {
fetch(`/cancel_download?sid=${hidSid.value}`, { method:"POST" })
.finally(finish);
} else {
fadeOutCard();
}
}
urlInput.addEventListener("keydown", (e) => {
if (e.key !== "Enter") return;
e.preventDefault();
if (!fetching && !downloading) urlForm.requestSubmit();
});
urlForm.addEventListener("submit", async (e) => {
e.preventDefault();
if (fetching || downloading) return;
const raw = urlInput.value.trim();
if (!raw) { errBlink(urlInput); return; }
fetching = true;
urlInput.classList.add("fetching");
urlInput.disabled = true;
const maxRetries = 3;
let lastError = null;
let result = null;
for (let attempt = 1; attempt <= maxRetries; attempt++) {
try {
const r = await fetch("/choose_format", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ url: raw })
});
result = await r.json().catch(() => ({ error: "invalid json" }));
if (r.ok && result && !result.error && Array.isArray(result.formats)) {
break;
}
lastError = result?.error || "Unknown error";
} catch (err) {
lastError = "Network error";
}
if (attempt < maxRetries) {
await new Promise(res => setTimeout(res, 750 * attempt));
}
}
if (!result || result.error || !Array.isArray(result.formats)) {
errBlink(urlInput);
console.warn("choose_format failed:", lastError);
fetching = false;
urlInput.classList.remove("fetching");
urlInput.disabled = false;
return;
}
if (!result.sid) {
errBlink(urlInput);
console.error("Missing sid from server response");
fetching = false;
urlInput.classList.remove("fetching");
urlInput.disabled = false;
return;
}
const sid = result.sid;
hidSid.value = sid; // ✅ Use server-issued SID only
hidUrl.value = result.url;
fmtTitle.textContent = result.title || "Select format";
const platform = (result.platform || "other").toLowerCase();
platBadge.textContent = platform === "twitterx" ? "X / Twitter" :
platform[0].toUpperCase() + platform.slice(1);
platBadge.className = `radix-badge badge-${platform}`;
show(platBadge, "inline-flex");
platBadge.style.opacity = 0;
anime({ targets: platBadge, translateY: [-10, 0], opacity: [0, 1], duration: 300 });
fmtOpt.innerHTML = "";
const sourceUrl = result.url;
result.formats.forEach((f) => {
const row = document.createElement("div");
row.className = "format-option";
const radio = document.createElement("input");
radio.type = "radio";
radio.id = `f_${f.format_id}`;
radio.name = "format_id";
radio.value = f.format_id;
const label = document.createElement("label");
label.htmlFor = radio.id;
label.textContent = f.label;
row.append(radio, label);
// Size prefetch
let hoverTimer = null;
const fetchSize = async () => {
if (sizeCache[f.format_id] || downloading) return;
try {
const szResp = await fetch(`/format_size?url=${qs(sourceUrl)}&fmt_id=${f.format_id}`);
const js = await szResp.json();
if (szResp.ok && js.size) {
const mb = js.size / 1_048_576;
sizeCache[f.format_id] = `${(mb > 99 ? Math.round(mb) : mb.toFixed(1))}\u00A0MB`;
label.dataset.size = sizeCache[f.format_id];
}
} catch {}
};
row.addEventListener("mouseenter", () => hoverTimer = setTimeout(fetchSize, 1200));
row.addEventListener("mouseleave", () => clearTimeout(hoverTimer));
radio.addEventListener("change", () => { dlBtn.disabled = false; });
fmtOpt.append(row);
});
show(fmtCard);
fmtCard.style.opacity = 0;
anime({ targets: fmtCard, opacity: [0, 1], translateY: [-10, 0], duration: 300 });
fetching = false;
urlInput.classList.remove("fetching");
});
fmtForm.addEventListener("submit", (e) => {
e.preventDefault();
if (downloading) return;
const fmtId = new FormData(fmtForm).get("format_id");
if (!fmtId) { errBlink(dlBtn); return; }
downloading = true;
const sid = hidSid.value;
dlBtn.disabled = true;
dlBtn.classList.add("loading");
dlBtn.textContent = "Downloading";
startSSE(sid);
const ifr = document.createElement("iframe");
ifr.style.display = "none";
ifr.src = `/download_file?sid=${sid}&url=${qs(hidUrl.value)}&format_id=${fmtId}`;
document.body.appendChild(ifr);
/* fadeoutcard not needed */
fadeOutCard();
});
cancelBtn.addEventListener("click", cancelFlow);
closeBtn .addEventListener("click", cancelFlow);

View File

@@ -0,0 +1,38 @@
<!DOCTYPE html>
<html lang="en" data-radix-theme="dark">
<head>
<meta charset="utf-8"/>
<meta name="viewport" content="width=device-width,initial-scale=1.0"/>
<title>loader</title>
<link rel="stylesheet" href="{{ url_for('static', filename='css/color.css') }}">
<link rel="stylesheet" href="{{ url_for('static', filename='css/style.css') }}">
<script defer src="{{ url_for('static', filename='js/anime.min.js') }}"></script>
</head>
<body class="dark-theme">
<div class="main-container centered-wrapper">
<div class="download-section">
<form id="urlForm" action="/choose_format" method="POST">
<div class="form-row">
<input id="downloadURL" name="url" type="text" class="monospace" required>
</div>
</form>
<div id="platformBadge" class="radix-badge hidden">Platform</div>
<div id="formatCard" class="format-selection-container hidden">
<h3 id="formatTitle"></h3>
<button id="closeFormatCard" aria-label="Close formats">×</button>
<form id="formatForm" action="/download_file" method="GET">
<div id="formatOptions" class="format-options"></div>
<input id="hiddenUrl" name="url" type="hidden">
<input id="hiddenSid" name="sid" type="hidden">
<button id="downloadBtn" class="radix-button primary" type="submit" disabled>Download</button>
<button id="cancelBtn" type="button" class="radix-button danger hidden" style="display:none;"></button>
</form>
</div>
{% if soft_banned %}
<div id="soft-ban-badge" class="radix-badge error-badge">Soft Ban</div>
{% endif %}
</div>
</div>
<script type="module" defer src="{{ url_for('static', filename='js/app.js') }}"></script>
</body>
</html>

View File

@@ -0,0 +1,43 @@
<!DOCTYPE html>
<html lang="en" data-radix-themes="dark">
<head>
<meta charset="UTF-8">
<title>s1ne</title>
<meta http-equiv="Content-Security-Policy"
content="default-src 'self'; img-src 'self' data:; script-src 'self'; style-src 'self'">
<link rel="stylesheet" href="{{ url_for('static', filename='css/style.css') }}">
<script defer src="{{ url_for('static', filename='js/anime.min.js') }}"></script>
</head>
<body class="dark-theme" style="background-color:#0a0a0a;">
<div class="login-card" style="border:none;background-color:transparent;">
<div class="center-container">
<div class="bloom-logo">
<img width="33" height="33" src="{{ url_for('static', filename='bloom.png') }}" alt="Bloom logo">
</div>
</div>
{% if error_badge %}
<div class="radix-badge error-badge">{{ error_badge }}</div>
{% endif %}
{% if success_badge %}
<div class="radix-badge success-badge">{{ success_badge }}</div>
{% endif %}
{% if banned %}
<div class="radix-badge error-badge">Too many attempts.</div>
{% endif %}
<form action="{{ url_for('login') }}" method="POST">
<div>
<input type="password" name="password" id="password" required
autofocus placeholder="Key here..." >
</div>
<div class="checkbox-container" style="display:none;opacity:0;">
<input type="checkbox" name="remember_me" id="remember_me" class="remember-me" checked style="width:0%;opacity:0;display:none;">
<label for="remember_me" style="opacity:0;display:none;"></label>
</div>
<button type="submit" class="radix-button" style="margin-top:1rem;display:none;"></button>
</form>
</div>
<script>
anime({ targets: '.radix-badge', translateY:[-10,0], opacity:[0,1], duration:600, delay:anime.stagger(80) });
</script>
</body>
</html>

60
init_db_local.py Normal file
View File

@@ -0,0 +1,60 @@
from sqlalchemy import create_engine, MetaData, Table, Column, Integer, String, Text, Boolean
# Connect to local SQLite DB
engine = create_engine("sqlite:///./dev.db")
metadata = MetaData()
# Define tables to initialize
users = Table(
"users", metadata,
Column("id", Integer, primary_key=True),
Column("ip", String, unique=True, nullable=False),
Column("xp", Integer, default=0),
Column("soft_banned", Boolean, default=False),
Column("first_visit", String), # could be DateTime if stored that way
Column("ban_status", String, default="ok"), # or Integer if enum-based
)
runs = Table(
"runs", metadata,
Column("id", Integer, primary_key=True),
Column("user_id", Integer, nullable=False),
Column("created_at", String),
Column("completed_at", String),
)
repos = Table(
"repos", metadata,
Column("id", Integer, primary_key=True),
Column("url", Text, nullable=False),
Column("owner", String),
Column("name", String),
Column("license", String),
Column("language", String),
Column("stars", Integer),
Column("forks", Integer),
Column("description", Text),
)
packages = Table(
"packages", metadata,
Column("id", Integer, primary_key=True),
Column("repo_id", Integer, nullable=False),
Column("name", String, nullable=False),
Column("version", String),
)
output = Table(
"output", metadata,
Column("id", Integer, primary_key=True),
Column("run_id", Integer, nullable=False),
Column("summary", Text),
Column("score", Integer),
Column("raw", Text),
)
# Create all tables
metadata.create_all(engine)
print("Database initialized with tables: users, runs, repos, packages, output")

18
playwright_cookies.txt Normal file
View File

@@ -0,0 +1,18 @@
# Netscape HTTP Cookie File
# This file is generated by yt-dlp. Do not edit.
.youtube.com TRUE / TRUE 1763394868 VISITOR_INFO1_LIVE EE6pKO7uPlI
.youtube.com TRUE / TRUE 1763394868 VISITOR_PRIVACY_METADATA CgJVUxIEGgAgCw%3D%3D
.youtube.com TRUE / FALSE 0 PREF tz=UTC&hl=en
.youtube.com TRUE / TRUE 1763385156 __Secure-ROLLOUT_TOKEN COCpnpCB2-2z5wEQ_cix1Mv4iwMY3pGs89G0jQM%3D
.youtube.com TRUE / TRUE 0 SOCS CAI
.youtube.com TRUE / TRUE 0 YSC -Il5msLzpMI
.youtube.com TRUE / TRUE 1763394868 YT_DEVICE_MEASUREMENT_ID oOjswVk=
.youtube.com TRUE / TRUE 1810914868 __Secure-YT_TVFAS t=483714&s=2
.youtube.com TRUE / TRUE 1763394868 DEVICE_INFO ChxOelE0T1RrME5EVTNORGN4T1RjNU1ERXlNQT09ELTut8EGGOjBxr8G
.youtube.com TRUE / TRUE 1747844668 GPS 1
.google.com TRUE / TRUE 1757183053 NID 522=K70f4UBzaoP4WpatQE2RJGs62FPpCvznNkoCUcXDDTLAg3DgABXZMJUfMsPTWdHIPtDmIi9iIBk7FLcbGKpEPPH994tXdUZiyfP46eH6QucL3ipEi5YP9kmAOej1Rjs3J0OU63xtxAbtWTD0I8CphCZ3HB1ZC_gzMZmc-P1HP-ShkNeXIsXcQ4zhrOPr6QIsJ2KhZsDkmA
.x.com TRUE / TRUE 1807754309 guest_id_marketing v1%3A174468230938246657
.x.com TRUE / TRUE 1807754309 guest_id_ads v1%3A174468230938246657
.x.com TRUE / TRUE 1807754309 personalization_id "v1_S6GfFeyWSxvzl018QY4kng=="
.x.com TRUE / TRUE 1807754309 guest_id v1%3A174468230938246657

65
pyproject.toml Normal file
View File

@@ -0,0 +1,65 @@
# pyproject.toml ────────────────────────────────────────────
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
# ─── Project metadata (PEP 621) ────────────────────────────
[project]
name = "s1ne"
version = "3.1.500"
description = "Async media downloader with proxy rotation and scoring, XP system, Quart web UI"
readme = "README.md"
requires-python = ">=3.11"
license = { text = "MIT" }
authors = [{ name = "c", email = "cx1t@pm.me" }]
# All runtime deps mirrors requirements.txt
dependencies = [
"quart",
"hypercorn",
"nest_asyncio",
"aiofiles",
"yt-dlp",
"fake-useragent",
"python-dotenv",
"colorama",
"ffmpeg-python",
"fasteners",
"tls-client",
"structlog",
"psycopg2-binary",
"SQLAlchemy",
"cachetools"
]
[project.optional-dependencies]
dev = [
"black",
"ruff",
"pytest",
"httpx",
"watchfiles",
]
[project.urls]
Source = "https://github.com/casperedits/s1ne"
# ─── Setuptools knobs ──────────────────────────────────────
[tool.setuptools]
include-package-data = true
[tool.setuptools.packages.find]
include = ["backend", "backend.*"] # ship only backend package tree
exclude = ["tests*", "examples*"] # optional
# NEW ───── install everything in “frontend/” as share/bloomloader/frontend/…
[tool.setuptools.data-files]
"share/s1ne/frontend" = [
"frontend/**", # recursively copy templates + static
]
# If you prefer explicit package-data instead of MANIFEST.in, you could add:
# [tool.setuptools.package-data]
# "backend.web" = ["../../frontend/templates/**/*.html", "../../frontend/static/**/*"]
#
# …but MANIFEST.in + include-package-data generally feels nicer.

16
requirements.txt Normal file
View File

@@ -0,0 +1,16 @@
quart
hypercorn
nest_asyncio
aiofiles
yt-dlp
python-dotenv
colorama
fake-useragent
ffmpeg-python
fasteners
tls-client
structlog
psycopg2-binary
sqlalchemy
cachetools
alembic

28
structure.txt Normal file
View File

@@ -0,0 +1,28 @@
backend/
|
├─ core/
│ ├─ settings.py ← Public Paths + Variables
│ ├─ logging.py ← Console Log
│ ├─ db.py ← SQLAlchemy Engine + Bootstrap Schema
│ ├─ db_xp.py ← XP + Users Table
│ ├─ db_cache.py ← Compat Layer
│ ├─ db_utils.py ← DB Upsert
│ ├─ progress_bus.py ← Progress
│ ├─ network.py ← Stealth Session
│ ├─ formats.py ← Format Selection + Caching
│ └─ download.py ← Downloader + Converter
|
|─ web/
| ├─ app.py ← Quart App
| └─ db_extra.py ← Proxy Pool + Download Stats + Abuse Windows
|
|─ data/
| ├─ users/
| └─ local.db ← Local Database for Dev Env
|
├─ .50.txt ← Proxy List
├─ .alembic.ini ← Database Init Script
├─ dockerfile ← Docker File
├─ structure.txt ← Structure
├─ requirements.txt ← Dependencies
├─ playwright_cookies.txt ← YouTube Cookies