commit df3aa7fdcd2a0f9a180179e6f7e8626248a2d8b4 Author: Burathar Date: Wed Dec 16 13:10:34 2020 +0100 Initial commit diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..29bb024 --- /dev/null +++ b/.gitignore @@ -0,0 +1,144 @@ +# Source: https://github.com/github/gitignore/blob/master/Python.gitignore + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +app.db +.vscode/ + diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000..e686d47 --- /dev/null +++ b/app/__init__.py @@ -0,0 +1,47 @@ +from flask import Flask +from config import Config +from flask_sqlalchemy import SQLAlchemy +from flask_migrate import Migrate +from flask_bootstrap import Bootstrap +import logging +from logging.handlers import SMTPHandler, RotatingFileHandler +import os + +app = Flask(__name__) +app.config.from_object(Config) +db = SQLAlchemy(app) +migrate = Migrate(app, db) +bootstrap = Bootstrap(app) + + +from app import routes, models, errors, database_cleaner + +if not app.debug: + # Mail errors + if app.config['MAIL_SERVER']: + auth = None + if app.config['MAIL_USERNAME'] or app.config['MAIL_PASSWORD']: + auth = (app.config['MAIL_USERNAME'], app.config['MAIL_PASSWORD']) + secure = None + if app.config['MAIL_USE_TLS']: + secure = () # The secure argument is an empty tuple to use TLS without providing a specific certificate and key. + mail_handler = SMTPHandler( + mailhost=(app.config['MAIL_SERVER'], app.config['MAIL_PORT']), + fromaddr='no-reply@' + app.config['MAIL_SERVER'], + toaddrs=app.config['ADMINS'], subject='Microblog Failure', + credentials=auth, secure=secure) + mail_handler.setLevel(logging.ERROR) + app.logger.addHandler(mail_handler) + # Log to File + if not os.path.exists('logs'): + os.mkdir('logs') + file_handler = RotatingFileHandler('logs/linkshortener.log', maxBytes=10240, backupCount=10) + file_handler.setFormatter(logging.Formatter( + '%(asctime)s %(levelname)s: %(message)s')) + file_handler.setLevel(logging.INFO) + app.logger.addHandler(file_handler) + + app.logger.setLevel(logging.INFO) + app.logger.info('Link Shortener startup') + +database_cleaner.start_scheduler() diff --git a/app/database_cleaner.py b/app/database_cleaner.py new file mode 100644 index 0000000..0faef0f --- /dev/null +++ b/app/database_cleaner.py @@ -0,0 +1,46 @@ +import time +from apscheduler.schedulers.background import BackgroundScheduler +import atexit +from datetime import datetime + +from app import app, db +from app.models import Url + +def remove_dead_urls(): + for url in Url.query.all(): + check_url(url) + +def check_url(url): + if url.death is not None: + if datetime.now() > url.death : + app.logger.info(f"Removed hash '{url.hash}'' because its retention time has passed.") + db.session.delete(url) + db.session.commit() + return "Timeout" + if url.view_counter is not None: + if url.view_counter <= 0: + app.logger.info(f"Removed hash '{url.hash}'' because its viewcouner has run out.") + db.session.delete(url) + db.session.commit() + return "Counter" + return None + +scheduler = None + +def start_scheduler(): + global scheduler + if scheduler is not None: + app.logger.info("Database_cleaner scheduler was already started, not launching a second instane.") + return + scheduler = BackgroundScheduler() + scheduler.add_job(func=remove_dead_urls, trigger="interval", hours=1) + scheduler.start() + app.logger.info("Database_cleaner scheduler was started.") + + # Shut down the scheduler when exiting the app + atexit.register(shutdown_scheduler) + +def shutdown_scheduler(): + scheduler.shutdown() + app.logger.info("Database_cleaner scheduler was shut down.") + \ No newline at end of file diff --git a/app/errors.py b/app/errors.py new file mode 100644 index 0000000..989fc38 --- /dev/null +++ b/app/errors.py @@ -0,0 +1,12 @@ +from flask import render_template +from app import app + +@app.errorhandler(404) +def not_found_error(error): + return render_template('404.html'), 404 + +@app.errorhandler(500) +def internal_error(error): + return render_template('500.html'), 500 + db.session.rollback() + diff --git a/app/forms.py b/app/forms.py new file mode 100644 index 0000000..96a0f4b --- /dev/null +++ b/app/forms.py @@ -0,0 +1,9 @@ +from flask_wtf import FlaskForm +from wtforms import StringField, IntegerField, SelectField, SubmitField +from wtforms.validators import DataRequired, NumberRange, Required + +class UrlForm(FlaskForm): + url = StringField('Url', validators=[DataRequired()]) + retention = IntegerField('Retention', default=5, validators = [NumberRange(min=0, max=100)]) + retention_type = SelectField('Retention', choices = [ 'Minute', 'Hour', 'Day', 'Time'], default=2, validators = [Required()]) + submit = SubmitField('Shorten Url') diff --git a/app/models.py b/app/models.py new file mode 100644 index 0000000..35476d2 --- /dev/null +++ b/app/models.py @@ -0,0 +1,16 @@ +from datetime import datetime +from app import app, db + +class Url(db.Model): + id = db.Column(db.Integer, primary_key=True) + url = db.Column(db.String(2000), nullable=False) + hash = db.Column(db.String(20), index=True, unique=True, nullable=False) + birth = db.Column(db.DateTime, default=datetime.utcnow) + death = db.Column(db.DateTime) + view_counter = db.Column(db.Integer) + + def __init__(self, **kwargs): + super(Url, self).__init__(**kwargs) + + if not self.url.lower().startswith(('http://', 'https://')): + self.url = f'https://{self.url}' diff --git a/app/routes.py b/app/routes.py new file mode 100644 index 0000000..7b3f1b2 --- /dev/null +++ b/app/routes.py @@ -0,0 +1,76 @@ +from datetime import datetime, timedelta +from secrets import token_urlsafe + +from flask import render_template, flash, redirect, url_for, abort, request, Markup +from app import app, db +from app.forms import UrlForm +from app.models import Url +from app.database_cleaner import check_url + +domain = 'http://127.0.0.1:5000' + +@app.before_request +def before_request(): + pass + +@app.route('/index', methods=['GET', 'POST']) +def index(): + form = UrlForm() + if form.validate_on_submit(): + hash = getHash() + if hash is None: + return redirect(url_for('index')) + + death = calcDeath(form.retention.data, form.retention_type.data) + view_counter = None if form.retention_type.data != "Time" else form.retention.data + if death is None and view_counter is None: + app.logger.warning("Neither death nor view_counter was recieved for url") + flash('Please specify a retention time') + return redirect(url_for('index')) + + url = Url(url = form.url.data.strip(), hash=hash, death=death, view_counter=view_counter) + db.session.add(url) + db.session.commit() + death = None if url.death is None else url.death.strftime('%Y-%m-%d %H:%M:%S') + app.logger.info(f"{request.environ['REMOTE_ADDR']} created hash '{url.hash}' for '{url.url}'. Death: {death}, View Counter: {url.view_counter}") + link = url_for("resolve_hash", hash=hash, _external = True) + flash(Markup(f'Your url is shortend to {link}')) + return redirect(url_for('index')) + return render_template("index.html", form=form) + +def getHash(): + for i in range(200): + hash = token_urlsafe(3) + url = Url.query.filter_by(hash=hash).first() + if (url is None and url != 'index'): + return hash + + flash('Failed generating a unique hash. Please try again.') + return None + +def calcDeath(retention, retention_type): + if retention_type == 'Minute': + return datetime.now() + timedelta(minutes=retention) + if retention_type == 'Hour': + return datetime.now() + timedelta(hours=retention) + if retention_type == 'Day': + return datetime.now() + timedelta(days=retention) + if retention_type == 'Time': + return None + app.logger.error(f"Retention_type out of range: '{retention_type}'") + +@app.route('/', defaults={'hash' : 'index'}) +@app.route('/') +def resolve_hash(hash): + if hash == 'index': + return redirect(url_for('index')) + + url = Url.query.filter_by(hash=hash).first_or_404() + if check_url(url) is None: + if url.view_counter is not None: + url.view_counter -= 1 + db.session.commit() + + countermessage = "" if url.view_counter is None else f". View counter was lowered to {url.view_counter}" + app.logger.info(f"{request.environ['REMOTE_ADDR']} requested hash '{url.hash}' which resolved to '{url.url}'{countermessage}") + return redirect(url.url, 301) diff --git a/app/templates/404.html b/app/templates/404.html new file mode 100644 index 0000000..4e77079 --- /dev/null +++ b/app/templates/404.html @@ -0,0 +1,7 @@ +{% extends "base.html" %} + +{% block app_content %} +

File Not Found

+

Back

+{% endblock %} + diff --git a/app/templates/500.html b/app/templates/500.html new file mode 100644 index 0000000..9fb3d07 --- /dev/null +++ b/app/templates/500.html @@ -0,0 +1,8 @@ +{% extends "base.html" %} + +{% block app_content %} +

An unexpected error has occurred

+

The administrator has been notified. Sorry for the inconvenience!

+

Back

+{% endblock %} + diff --git a/app/templates/base.html b/app/templates/base.html new file mode 100644 index 0000000..698a24b --- /dev/null +++ b/app/templates/base.html @@ -0,0 +1,30 @@ +{% extends 'bootstrap/base.html' %} + +{% block title %} +Link Shortener +{% endblock %} + +{% block content %} +
+
+
+
+ {% with messages = get_flashed_messages() %} + {% if messages %} + {% for message in messages %} + + {% endfor %} + {% endif %} + {% endwith %} + + {# application content needs to be provided in the app_content block #} + {% block app_content %}{% endblock %} +
+ +
+{% endblock %} + +{% block scripts %} +{{ super() }} +{% endblock %} + diff --git a/app/templates/index.html b/app/templates/index.html new file mode 100644 index 0000000..db528e3 --- /dev/null +++ b/app/templates/index.html @@ -0,0 +1,13 @@ +{% extends "base.html" %} +{% import 'bootstrap/wtf.html' as wtf %} + +{% block app_content %} + +{% if form %} +{{ wtf.quick_form(form, button_map={'submit': 'primary'}) }} +
+ +{% endif %} + +{% endblock %} + diff --git a/config.py b/config.py new file mode 100644 index 0000000..733f13e --- /dev/null +++ b/config.py @@ -0,0 +1,16 @@ +import os +basedir = os.path.abspath(os.path.dirname(__file__)) + +class Config(object): + SECRET_KEY = os.environ.get('SECRET_KEY') or 'you-will-never-guess' + SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \ + 'sqlite:///' + os.path.join(basedir, 'app.db') + SQLALCHEMY_TRACK_MODIFICATIONS = False + + MAIL_SERVER = os.environ.get('MAIL_SERVER') + MAIL_PORT = int(os.environ.get('MAIL_PORT') or 25) + MAIL_USE_TLS = os.environ.get('MAIL_USE_TLS') is not None + MAIL_USERNAME = os.environ.get('MAIL_USERNAME') + MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD') + ADMINS = ['your-email@example.com'] + diff --git a/linkshortener.py b/linkshortener.py new file mode 100644 index 0000000..8d73323 --- /dev/null +++ b/linkshortener.py @@ -0,0 +1,6 @@ +from app import app, db +from app.models import Url + +@app.shell_context_processor +def make_shell_context(): + return {'db': db, 'Url': Url} diff --git a/migrations/README b/migrations/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/migrations/alembic.ini b/migrations/alembic.ini new file mode 100644 index 0000000..f8ed480 --- /dev/null +++ b/migrations/alembic.ini @@ -0,0 +1,45 @@ +# A generic, single database configuration. + +[alembic] +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 0000000..9452179 --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,96 @@ +from __future__ import with_statement + +import logging +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) +logger = logging.getLogger('alembic.env') + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +from flask import current_app +config.set_main_option( + 'sqlalchemy.url', + str(current_app.extensions['migrate'].db.engine.url).replace('%', '%%')) +target_metadata = current_app.extensions['migrate'].db.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, target_metadata=target_metadata, literal_binds=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + # this callback is used to prevent an auto-migration from being generated + # when there are no changes to the schema + # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html + def process_revision_directives(context, revision, directives): + if getattr(config.cmd_opts, 'autogenerate', False): + script = directives[0] + if script.upgrade_ops.is_empty(): + directives[:] = [] + logger.info('No changes in schema detected.') + + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix='sqlalchemy.', + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=target_metadata, + process_revision_directives=process_revision_directives, + **current_app.extensions['migrate'].configure_args + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 0000000..2c01563 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/3c65fc2aac0a_initial_migration.py b/migrations/versions/3c65fc2aac0a_initial_migration.py new file mode 100644 index 0000000..a200608 --- /dev/null +++ b/migrations/versions/3c65fc2aac0a_initial_migration.py @@ -0,0 +1,38 @@ +"""initial migration + +Revision ID: 3c65fc2aac0a +Revises: +Create Date: 2020-12-15 23:06:09.507454 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '3c65fc2aac0a' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('url', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('url', sa.String(length=2000), nullable=False), + sa.Column('hash', sa.String(length=20), nullable=False), + sa.Column('birth', sa.DateTime(), nullable=True), + sa.Column('death', sa.DateTime(), nullable=True), + sa.Column('view_counter', sa.Integer(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_url_hash'), 'url', ['hash'], unique=True) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_url_hash'), table_name='url') + op.drop_table('url') + # ### end Alembic commands ### diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..2e6a167 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,32 @@ +alembic==1.4.3 +APScheduler==3.6.3 +astroid==2.4.2 +click==7.1.2 +dominate==2.6.0 +Flask==1.1.2 +Flask-Bootstrap==3.3.7.1 +Flask-Migrate==2.5.3 +Flask-SQLAlchemy==2.4.4 +Flask-WTF==0.14.3 +isort==5.6.4 +itsdangerous==1.1.0 +Jinja2==2.11.2 +lazy-object-proxy==1.4.3 +Mako==1.1.3 +MarkupSafe==1.1.1 +mccabe==0.6.1 +pylint==2.6.0 +pylint-flask-sqlalchemy==0.2.0 +python-dateutil==2.8.1 +python-dotenv==0.15.0 +python-editor==1.0.4 +pytz==2020.4 +six==1.15.0 +SQLAlchemy==1.3.20 +toml==0.10.2 +typed-ast==1.4.1 +tzlocal==2.1 +visitor==0.1.3 +Werkzeug==1.0.1 +wrapt==1.12.1 +WTForms==2.3.3