Skip to content

Commit 363d973

Browse files
committed
Merge commit 'refs/pull/325/head' of github.com:ail-project/ail-framework
2 parents 2b5cfe3 + 2a0ed60 commit 363d973

1 file changed

Lines changed: 61 additions & 0 deletions

File tree

var/www/blueprints/api_rest.py

Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -164,6 +164,67 @@ def lacus_cookiejar_import():
164164
return Response(json.dumps(res[0]), mimetype='application/json'), res[1]
165165

166166

167+
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
168+
# # # # # # # # # # # # CRAWLER SCHEDULER / STATS # # # # # # # # # # # # # #
169+
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
170+
171+
@api_rest.route("api/v1/crawler/scheduler", methods=['GET'])
172+
@token_required('user')
173+
def get_crawler_scheduler():
174+
res = crawlers.get_schedulers_metas()
175+
# Ensure tags sets are JSON-serializable
176+
for meta in res:
177+
if isinstance(meta.get('tags'), set):
178+
meta['tags'] = list(meta['tags'])
179+
return create_json_response(res, 200)
180+
181+
@api_rest.route("api/v1/crawler/schedule/<path:schedule_uuid>", methods=['DELETE'])
182+
@token_required('admin')
183+
def delete_crawler_schedule(schedule_uuid):
184+
data = {'uuid': schedule_uuid}
185+
res = crawlers.api_delete_schedule(data)
186+
# Ensure error payloads are JSON-serializable (uuid may be a CrawlerSchedule object)
187+
if isinstance(res[0], dict):
188+
res_dict = {k: str(v) if not isinstance(v, (str, int, float, bool, list, dict, type(None))) else v
189+
for k, v in res[0].items()}
190+
else:
191+
res_dict = res[0]
192+
return create_json_response(res_dict, res[1])
193+
194+
@api_rest.route("api/v1/crawler/captures/status", methods=['GET'])
195+
@token_required('user')
196+
def get_crawler_captures_status():
197+
res = crawlers.get_captures_status()
198+
return create_json_response(res, 200)
199+
200+
@api_rest.route("api/v1/crawler/stats", methods=['GET'])
201+
@token_required('user')
202+
def get_crawler_stats():
203+
domain_type = request.args.get('domain_type')
204+
res = crawlers.get_crawlers_stats(domain_type=domain_type)
205+
return create_json_response(res, 200)
206+
207+
@api_rest.route("api/v1/crawler/blocklist", methods=['GET'])
208+
@token_required('admin')
209+
def get_crawler_blocklist():
210+
res = crawlers.get_blacklist()
211+
return create_json_response(list(res), 200)
212+
213+
@api_rest.route("api/v1/crawler/blocklist", methods=['POST'])
214+
@token_required('admin')
215+
def add_crawler_blocklist():
216+
data = request.get_json()
217+
res = crawlers.api_blacklist_domain(data)
218+
return create_json_response(res[0], res[1])
219+
220+
@api_rest.route("api/v1/crawler/blocklist/<path:domain>", methods=['DELETE'])
221+
@token_required('admin')
222+
def delete_crawler_blocklist(domain):
223+
data = {'domain': domain}
224+
res = crawlers.api_unblacklist_domain(data)
225+
return create_json_response(res[0], res[1])
226+
227+
167228
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
168229
# # # # # # # # # # # # # # IMPORTERS # # # # # # # # # # # # # # # # #
169230
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #

0 commit comments

Comments
 (0)