|
| 1 | +import os |
| 2 | +import sys |
| 3 | +import multiprocessing |
| 4 | +import json |
| 5 | +import gzip |
| 6 | +import mimetypes |
| 7 | + |
| 8 | +import psycopg_pool |
| 9 | +import psycopg.rows |
| 10 | + |
| 11 | +from flask import Flask, request, make_response, Response |
| 12 | +from flask import send_from_directory, jsonify |
| 13 | + |
| 14 | + |
| 15 | +app = Flask(__name__, static_folder = None) |
| 16 | +app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False |
| 17 | + |
| 18 | + |
| 19 | +# -- Dataset and constants -------------------------------------------------------- |
| 20 | + |
| 21 | +CPU_COUNT = int(multiprocessing.cpu_count()) |
| 22 | +WRK_COUNT = min(len(os.sched_getaffinity(0)), 128) |
| 23 | +WRK_COUNT = max(WRK_COUNT, 4) |
| 24 | + |
| 25 | +DATASET_LARGE_PATH = "/data/dataset-large.json" |
| 26 | +DATASET_PATH = os.environ.get("DATASET_PATH", "/data/dataset.json") |
| 27 | +DATASET_ITEMS = None |
| 28 | +try: |
| 29 | + with open(DATASET_PATH) as file: |
| 30 | + DATASET_ITEMS = json.load(file) |
| 31 | +except Exception: |
| 32 | + pass |
| 33 | + |
| 34 | + |
| 35 | +# -- Postgres DB ------------------------------------------------------------ |
| 36 | + |
| 37 | +DATABASE_URL = os.environ.get("DATABASE_URL", '') |
| 38 | +DATABASE_POOL = None |
| 39 | +DATABASE_QUERY = ( |
| 40 | + "SELECT id, name, category, price, quantity, active, tags, rating_score, rating_count" |
| 41 | + " FROM items" |
| 42 | + " WHERE price BETWEEN %s AND %s LIMIT %s" |
| 43 | +) |
| 44 | +if DATABASE_URL and DATABASE_URL.startswith("postgres://"): |
| 45 | + DATABASE_URL = "postgresql://" + DATABASE_URL[len("postgres://"):] |
| 46 | + |
| 47 | +PG_POOL_MIN_SIZE = 1 |
| 48 | +PG_POOL_MAX_SIZE = 2 |
| 49 | + |
| 50 | +def db_close(): |
| 51 | + global DATABASE_POOL |
| 52 | + if DATABASE_POOL: |
| 53 | + try: |
| 54 | + DATABASE_POOL.close() |
| 55 | + except Exception: |
| 56 | + pass |
| 57 | + DATABASE_POOL = None |
| 58 | + |
| 59 | +def db_setup(): |
| 60 | + global DATABASE_POOL, DATABASE_URL, PG_POOL_MIN_SIZE, PG_POOL_MAX_SIZE, WRK_COUNT |
| 61 | + db_close() |
| 62 | + if not DATABASE_URL: |
| 63 | + return |
| 64 | + DATABASE_MAX_CONN = os.environ.get("DATABASE_MAX_CONN", None) |
| 65 | + if DATABASE_MAX_CONN: |
| 66 | + avr_pool_size = int(DATABASE_MAX_CONN) * 0.92 / WRK_COUNT |
| 67 | + #PG_POOL_MIN_SIZE = int(avr_pool_size + 0.35) |
| 68 | + PG_POOL_MAX_SIZE = int(avr_pool_size + 0.95) |
| 69 | + try: |
| 70 | + DATABASE_POOL = psycopg_pool.ConnectionPool( |
| 71 | + conninfo = DATABASE_URL, |
| 72 | + min_size = max(PG_POOL_MIN_SIZE, 1), |
| 73 | + max_size = max(PG_POOL_MAX_SIZE, 2), |
| 74 | + kwargs = { 'row_factory': psycopg.rows.dict_row }, |
| 75 | + ) |
| 76 | + #DATABASE_POOL.wait() |
| 77 | + except Exception: |
| 78 | + DATABASE_POOL = None |
| 79 | + |
| 80 | +db_setup() |
| 81 | + |
| 82 | + |
| 83 | +# -- flask features ---------------------------------------------------------- |
| 84 | + |
| 85 | +@app.after_request |
| 86 | +def compress_response(response): |
| 87 | + if response.status_code < 200 or response.status_code in (204, 304, 206): |
| 88 | + return response |
| 89 | + |
| 90 | + accept_encoding = request.headers.get('Accept-Encoding', '') |
| 91 | + if 'gzip' not in accept_encoding: |
| 92 | + return response |
| 93 | + |
| 94 | + if response.headers.get('Content-Encoding'): |
| 95 | + return response |
| 96 | + |
| 97 | + #if response.direct_passthrough: |
| 98 | + # return response |
| 99 | + |
| 100 | + if response.content_length == 0: |
| 101 | + return response |
| 102 | + |
| 103 | + try: |
| 104 | + body = response.get_data() |
| 105 | + except Exception: |
| 106 | + return response |
| 107 | + |
| 108 | + if isinstance(body, str): |
| 109 | + body = body.encode('utf-8') |
| 110 | + |
| 111 | + compressed_body = gzip.compress(body, compresslevel = 5) |
| 112 | + new_response = make_response(compressed_body) |
| 113 | + new_response.headers.update(response.headers) |
| 114 | + new_response.headers['Content-Encoding'] = 'gzip' |
| 115 | + new_response.headers.pop('Content-Length', None) |
| 116 | + #new_response.headers['Vary'] = new_response.headers.get('Vary', '') + ', Accept-Encoding' |
| 117 | + return new_response |
| 118 | + |
| 119 | + |
| 120 | +# -- Routes ------------------------------------------------------------------ |
| 121 | + |
| 122 | +@app.route('/pipeline') |
| 123 | +def pipeline(): |
| 124 | + return b'ok' |
| 125 | + |
| 126 | + |
| 127 | +@app.route('/baseline11', methods=['GET', 'POST']) |
| 128 | +def baseline11(): |
| 129 | + total = 0 |
| 130 | + for val in request.args.values(): |
| 131 | + try: |
| 132 | + total += int(val) |
| 133 | + except ValueError: |
| 134 | + pass |
| 135 | + if request.method == 'POST' and request.data: |
| 136 | + try: |
| 137 | + total += int(request.data.strip()) |
| 138 | + except ValueError: |
| 139 | + pass |
| 140 | + return str(total) |
| 141 | + |
| 142 | + |
| 143 | +@app.route('/json/<int:count>') |
| 144 | +@app.route('/json-comp/<int:count>') |
| 145 | +def json_endpoint(count: int): |
| 146 | + global DATASET_ITEMS |
| 147 | + if not DATASET_ITEMS: |
| 148 | + return Response("No dataset", status=500) |
| 149 | + m_val = request.args.get('m', 1, type=float) |
| 150 | + items = [ ] |
| 151 | + for idx, dsitem in enumerate(DATASET_ITEMS): |
| 152 | + if idx >= count: |
| 153 | + break |
| 154 | + item = dict(dsitem) |
| 155 | + item["total"] = dsitem["price"] * dsitem["quantity"] * m_val |
| 156 | + items.append(item) |
| 157 | + return { 'items': items, 'count': len(items) } |
| 158 | + |
| 159 | + |
| 160 | +@app.route('/async-db') |
| 161 | +def async_db_endpoint(): |
| 162 | + global DATABASE_POOL |
| 163 | + if not DATABASE_POOL: |
| 164 | + return { "items": [ ], "count": 0 } |
| 165 | + try: |
| 166 | + min_val = request.args.get('min', type=float) |
| 167 | + max_val = request.args.get('max', type=float) |
| 168 | + limit = request.args.get('limit', type=int) |
| 169 | + with DATABASE_POOL.connection() as db_conn: |
| 170 | + rows = db_conn.execute(DATABASE_QUERY, (min_val, max_val, limit)).fetchall() |
| 171 | + items = [ |
| 172 | + { |
| 173 | + 'id' : row['id'], |
| 174 | + 'name' : row['name'], |
| 175 | + 'category': row['category'], |
| 176 | + 'price' : row['price'], |
| 177 | + 'quantity': row['quantity'], |
| 178 | + 'active' : row['active'], |
| 179 | + 'tags' : json.loads(row['tags']) if isinstance(row['tags'], str) else row['tags'], |
| 180 | + 'rating': { |
| 181 | + 'score': row['rating_score'], |
| 182 | + 'count': row['rating_count'], |
| 183 | + } |
| 184 | + } |
| 185 | + for row in rows |
| 186 | + ] |
| 187 | + return { "items": items, "count": len(items) } |
| 188 | + except Exception: |
| 189 | + return { "items": [ ], "count": 0 } |
| 190 | + |
| 191 | + |
| 192 | +@app.route('/upload', methods=['POST']) |
| 193 | +def upload_endpoint(): |
| 194 | + size = 0 |
| 195 | + while True: |
| 196 | + chunk = request.stream.read(256*1024) |
| 197 | + if not chunk: |
| 198 | + break |
| 199 | + size += len(chunk) |
| 200 | + return str(size) |
| 201 | + |
| 202 | + |
| 203 | +mimetypes.add_type('.woff2', 'font/woff2') |
| 204 | +mimetypes.add_type('.webp', 'image/webp') |
| 205 | + |
| 206 | +@app.route('/static/<path:filepath>') |
| 207 | +def static_endpoint(filepath): |
| 208 | + return send_from_directory('/data/static', filepath) |
0 commit comments