Files
fachschaften/compiler/views.py
Andreas Stephanides debf6b0ccc que all / que
2017-01-24 11:25:07 +01:00

183 lines
5.3 KiB
Python

from flask import Blueprint, jsonify, render_template, abort, redirect, url_for, request, Response
compiler_pages = Blueprint('compiler', __name__,
template_folder='.')
from src.database import db_session2,init_db,read_json,init_db2,db_session
from .models import CrawlUrl
from .models import CrawlCache, CrawlCacheSchema
from .models import CrawlUrlSchema
from src import clogger
from src.articles import Article
from src.sections import Section
#import mworker
import flask
import json
import mworker
from compiler import do_compile
from fetching import fetch_page
from .controller import urls_test, start_workers, urls_que, url_add, urls_que_upd
#flask.json.JSONEncoder.default = lambda self,obj: ((obj.__json__()) if isinstance(obj, (Article,CrawlUrl)) else None)
@compiler_pages.route("/")
@compiler_pages.route("")
@compiler_pages.route(".json")
def index():
status="For documentation goto /doc"
return jsonify(status=status)
@compiler_pages.route("/doc")
@compiler_pages.route("/doc.json")
def doc():
return render_template("README")
# return jsonify(status=render_template("README"))
#
@compiler_pages.route("/initdb")
@compiler_pages.route("/initdb.json")
def initdb_json():
init_db() # initialisiere Datenbank
status="Datenbank Neu initialisiert"
return jsonify(status=status)
@compiler_pages.route("/initdb2")
@compiler_pages.route("/initdb2.json")
def initdb_json2():
init_db2() # initialisiere Datenbank
status="Datenbank Neu initialisiert"
return jsonify(status=status)
@compiler_pages.route("/start")
@compiler_pages.route("/start.json")
def start_json():
start_workers() # initialisiere Datenbank
status="Worker gestartet"
return jsonify(status=status)
@compiler_pages.route("/urls")
@compiler_pages.route("/urls.json")
def urls_index_json():
# Lade Alle Urls
status=CrawlUrl.query.all()
return jsonify(urls=status)
@compiler_pages.route("/urls.lst")
def urls_lst():
cus=CrawlUrl.query.all()
urls=map((lambda cu: ("id %d %s " % (cu.id, cu.url))),cus)
urls=map((lambda u: u+"\n"),urls)
return Response(urls,mimetype='text/plain')
# show an existing CrawlUrl
@compiler_pages.route("/urls/<int:id>")
@compiler_pages.route("/urls/<int:id>.json")
def urls_json(id):
# Lade Alle Urls
status=CrawlUrl.query.get(id)
cc=CrawlCache.query.filter(CrawlCache.url==status.url).first()
return jsonify(urls=status, cache=cc.__json__())
# que an existing CrawlUrl for fetching
@compiler_pages.route("/urls/<int:id>/que")
@compiler_pages.route("/urls/<int:id>/que.json")
def urls_que_json(id):
# Lade Alle Urls
cu=urls_que_upd(id)
cc=CrawlCache.query.filter(CrawlCache.url==cu.url)
return jsonify(urls=cu, cache=cc)
# que an existing CrawlUrl for fetching
@compiler_pages.route("/urls/<int:id>/que_all")
@compiler_pages.route("/urls/<int:id>/que_all.json")
def urls_queall_json(id):
# Lade Alle Urls
cu=urls_que(id)
cc=CrawlCache.query.filter(CrawlCache.url==cu.url)
return jsonify(urls=cu, cache=cc)
@compiler_pages.route("/urls/que.lst")
def urls_que_lst():
# Lade Alle Urls
# cu=urls_que(id)
#cc=CrawlCache.query.filter(CrawlCache.url==cu.url)
cus=CrawlUrl.query.all()
urls=map((lambda cu: url_for('.urls_que_json',id=cu.id)),cus)
if request.values.has_key('url'):
urls=map((lambda u: request.values["url"]+ u),urls)
urls=map((lambda u: u+"\n"),urls)
return Response(urls,mimetype='text/plain')
# que an existing CrawlUrl for fetching
@compiler_pages.route("/urls/<int:id>/test")
@compiler_pages.route("/urls/<int:id>/test.json")
def urls_test_json(id):
# Lade Alle Urls
return jsonify(result=urls_test(id))
@compiler_pages.route("/debug",methods=['GET','PUT'])
def debug():
status="did nothing"
js=read_json(request)
clogger.info(request.get_json())
if js["cmd"] == "runfetch":
mworker.run_fetch()
status="fetched something"
if js["cmd"] == "que":
cu = CrawlUrl.query.get(js["id"])
mworker.queue_url(cu.tpe, cu.url)
status= mworker.run_fetch()
if js["cmd"] == "comp":
status=mworker.run_compile()
if js["cmd"]=="process":
status=mworker.run_process()
return jsonify(status=status)
@compiler_pages.route("/debugurl")
def debugurl():
s=CrawlUrlSchema()
status=CrawlUrl.query.all()
return jsonify(status=status)
@compiler_pages.route("/urls",methods=['POST'])
def add_urls():
# Lese Daten
js =read_json(request)
# clogger.info(js)
# Finde oder Erzeuge Url in der Datenbank
url=CrawlUrlSchema().load(js["url"])
clogger.info(url)
url=url_add(url.data["url"],url.data["tpe"])
return jsonify(url=url, kk=js)
@compiler_pages.route("/urls/<int:id>",methods=['DELETE'])
@compiler_pages.route("/urls<int:id>.json",methods=['DELETE'])
def delete(id):
cu=CrawlUrl.query.get(id)
if cu != None:
db_session2.delete(cu)
db_session2.commit()
return jsonify(url={})
@compiler_pages.route("/section/<int:id>/reset",methods=['GET'])
@compiler_pages.route("/section/<int:id>/reset.json",methods=['GET'])
def reset(id):
section=Section.query.get(id)
clogger.info(section)
for a in section.articles:
db_session.delete(a)
db_session.commit()
section=Section.query.get(id)
return jsonify(section=section,articles=section.articles)