bot upgrade 1

This commit is contained in:
Andreas Stephanides
2017-01-14 14:22:17 +01:00
parent df6bff033a
commit 96cc97a13f
6 changed files with 91 additions and 19 deletions

13
articles/controller.py Normal file
View File

@@ -0,0 +1,13 @@
from .model import Article
from .model import ArticleSchema
#import flask
from datetime import datetime
import json
from src.database import db_session, read_json
def get_all():
return Article.query.all()
def search(s):
return Article.query.filter(Article.title.like("%"+s+"%")).all()

View File

@@ -11,6 +11,8 @@ from telepot.delegate import (
per_chat_id, pave_event_space, include_callback_query_chat_id, create_open, per_inline_from_id ) per_chat_id, pave_event_space, include_callback_query_chat_id, create_open, per_inline_from_id )
from src.compiler import CrawlUrl from src.compiler import CrawlUrl
from gevent import spawn, monkey, Greenlet from gevent import spawn, monkey, Greenlet
import src.compiler.controller as compiler_controller
import src.articles.controller as articles_controller
def IKB(h): def IKB(h):
return InlineKeyboardButton(text=h["text"], callback_data=h["callback_data"]) return InlineKeyboardButton(text=h["text"], callback_data=h["callback_data"])
@@ -23,35 +25,72 @@ def IKM(h):
def IKM2(h): def IKM2(h):
return InlineKeyboardMarkup(inline_keyboard= map(IKB2,h)) return InlineKeyboardMarkup(inline_keyboard= map(IKB2,h))
def is_admin(id):
lg.debug("check admin?"+str(id))
if str(id) in cfg.is_admin:
lg.debug("is admin")
return True
else:
return False
def query_que_url(url): def query_que_url(url):
print(json.dumps(url)) print(json.dumps(url))
return {"text": url.url, "callback_data":"/urls/"+str(url.id)+"/que"} return {"text": url.url, "callback_data":"/urls/"+str(url.id)+"/que"}
def query_test_url(url):
print(json.dumps(url))
return {"text": url.url, "callback_data":"/urls/"+str(url.id)+"/test"}
def handle_urls(handler, cmd): def handle_urls(handler, cmd):
curls=CrawlUrl.query.all() curls=CrawlUrl.query.all()
#sent=handler.sender.sendMessage(json.dumps(curls)) #sent=handler.sender.sendMessage(json.dumps(curls))
kb= IKM2(map(query_que_url,curls)) kb_que= IKM2(map(query_que_url,curls))
kb_test= IKM2(map(query_test_url,curls))
kb_url=IKM2([{"text": "Que an url", "callback_data":"/urls/que"},
{"text": "Test an url", "callback_data":"/urls/test"}
])
lg.debug(json.dumps(cmd)) lg.debug(json.dumps(cmd))
print json.dumps(cmd) print json.dumps(cmd)
if len(cmd) >= 4 and cmd[3]=="que": if len(cmd) >= 4 and cmd[3]=="que":
sent=handler.sender.sendMessage("I qued url "+str(cmd[2]), reply_markup=None) sent=handler.sender.sendMessage("I qued url "+ json.dumps(compiler_controller.urls_test(int(cmd[2]))), reply_markup=None)
elif len(cmd) >= 4 and cmd[3]=="test":
sent=handler.sender.sendMessage("I tested url: "+ json.dumps(compiler_controller.urls_test(int(cmd[2]))["comp"])[0:399], reply_markup=None)
elif len(cmd) >= 3 and cmd[2] == "que":
sent=handler.sender.sendMessage("Which url shoud I que?", reply_markup=kb_que)
handler._edit_msg_ident = telepot.message_identifier(sent)
handler._editor = telepot.helper.Editor(handler.bot, sent)
elif len(cmd) >= 3 and cmd[2] == "test":
sent=handler.sender.sendMessage("Which url shoud I test?", reply_markup=kb_test)
handler._edit_msg_ident = telepot.message_identifier(sent)
handler._editor = telepot.helper.Editor(handler.bot, sent)
else: else:
sent=handler.sender.sendMessage("que?", reply_markup=kb) sent=handler.sender.sendMessage("What do you want to do?", reply_markup=kb_url)
handler._edit_msg_ident = telepot.message_identifier(sent) handler._edit_msg_ident = telepot.message_identifier(sent)
handler._editor = telepot.helper.Editor(handler.bot, sent) handler._editor = telepot.helper.Editor(handler.bot, sent)
def execute_command(handler,cmd,msg=None): def execute_command(handler,cmd,msg=None, args=[]):
if cmd[1]=='urls': if cmd[1]=='urls':
if is_admin(msg["from"]["id"]):
handle_urls(handler,cmd) handle_urls(handler,cmd)
else:
handler.sender.sendMessage("Not allowed for "+json.dumps(msg["from"]))
elif cmd[1] =='articles':
handler.sender.sendMessage(json.dumps({"args": args, "cmd": cmd}))
handler.sender.sendMessage(json.dumps(articles_controller.search(args[0])))
elif cmd[1] =='startworkers':
if is_admin(msg["from"]["id"]):
handler.sender.sendMessage(compiler_controller.start_workers())
else:
handler.sender.sendMessage("Sorry, I didn't understand the command!")
def handle(handler,msg): def handle(handler,msg):
content_type,chat_type,chat_id = telepot.glance(msg) content_type,chat_type,chat_id = telepot.glance(msg)
if msg.has_key('text'): if msg.has_key('text'):
if msg['text'][0]=='/': if msg['text'][0]=='/':
cmd = msg['text'].split("/") cmd = msg['text'].split("/")
execute_command(handler, cmd, msg) args=cmd[-1].split(" ")[1:]
cmd[-1]=cmd[-1].split(" ")[0]
execute_command(handler, cmd, msg,args)
if msg.has_key('data'): if msg.has_key('data'):
lg.debug(msg['data']) lg.debug(msg['data'])
@@ -99,6 +138,7 @@ class FetBot(telepot.helper.ChatHandler):
handle(self,msg) handle(self,msg)
content_type,chat_type,chat_id = telepot.glance(msg) content_type,chat_type,chat_id = telepot.glance(msg)
lg.debug(content_type) lg.debug(content_type)
lg.debug(msg)
if content_type=="photo" or content_type=="sticker": if content_type=="photo" or content_type=="sticker":
lg.debug("try to download %s" % msg[content_type][-1]["file_id"]) lg.debug("try to download %s" % msg[content_type][-1]["file_id"])
f=self.bot.getFile(msg[content_type][-1]['file_id']) f=self.bot.getFile(msg[content_type][-1]['file_id'])

View File

@@ -13,3 +13,4 @@ from models import add_url, CrawlUrl
#start_workers(1,1,1) #start_workers(1,1,1)
from fetching import announce_articleid from fetching import announce_articleid
import controller

View File

@@ -9,6 +9,8 @@ from src import clogger, cfg
from src.fb import graph from src.fb import graph
from fixing import fix_link from fixing import fix_link
from facebook import GraphAPIError from facebook import GraphAPIError
import feedparser
#from fetching import downloadfile #from fetching import downloadfile
import json import json
def do_compile(tpe, cont): def do_compile(tpe, cont):

22
compiler/controller.py Normal file
View File

@@ -0,0 +1,22 @@
from .models import CrawlUrl
from .models import CrawlCache, CrawlCacheSchema
from src.database import db_session2,init_db,read_json,init_db2
from compiler import do_compile
from fetching import fetch_page
import mworker
def urls_test(id):
cu=CrawlUrl.query.get(id)
rw=fetch_page(cu.url)
h= {"url": cu.url, "sourcetype": cu.tpe, "raw": rw}
h2=do_compile(cu.tpe, h)
return {"rw": rw, "url": h, "comp": h2}
def urls_que(id):
cu=CrawlUrl.query.get(id)
mworker.queue_url(cu.tpe, cu.url)
return cu
def start_workers():
mworker.start_workers(1,1,1)
return "started workers"

View File

@@ -15,7 +15,7 @@ import mworker
from compiler import do_compile from compiler import do_compile
from fetching import fetch_page from fetching import fetch_page
from .controller import urls_test, start_workers, urls_que
#flask.json.JSONEncoder.default = lambda self,obj: ((obj.__json__()) if isinstance(obj, (Article,CrawlUrl)) else None) #flask.json.JSONEncoder.default = lambda self,obj: ((obj.__json__()) if isinstance(obj, (Article,CrawlUrl)) else None)
@compiler_pages.route("/") @compiler_pages.route("/")
@@ -50,7 +50,7 @@ def initdb_json2():
@compiler_pages.route("/start") @compiler_pages.route("/start")
@compiler_pages.route("/start.json") @compiler_pages.route("/start.json")
def start_json(): def start_json():
mworker.start_workers(1,1,1) # initialisiere Datenbank start_workers() # initialisiere Datenbank
status="Worker gestartet" status="Worker gestartet"
return jsonify(status=status) return jsonify(status=status)
@@ -76,24 +76,18 @@ def urls_json(id):
@compiler_pages.route("/urls/<int:id>/que.json") @compiler_pages.route("/urls/<int:id>/que.json")
def urls_que_json(id): def urls_que_json(id):
# Lade Alle Urls # Lade Alle Urls
cu=CrawlUrl.query.get(id) cu=urls_que(id)
mworker.queue_url(cu.tpe, cu.url)
cc=CrawlCache.query.filter(CrawlCache.url==cu.url) cc=CrawlCache.query.filter(CrawlCache.url==cu.url)
mworker.start_workers(1,1,1) # initialisiere Datenbank
status="Worker gestartet"
return jsonify(urls=cu, cache=cc) return jsonify(urls=cu, cache=cc)
# que an existing CrawlUrl for fetching # que an existing CrawlUrl for fetching
@compiler_pages.route("/urls/<int:id>/test") @compiler_pages.route("/urls/<int:id>/test")
@compiler_pages.route("/urls/<int:id>/test.json") @compiler_pages.route("/urls/<int:id>/test.json")
def urls_test_json(id): def urls_test_json(id):
# Lade Alle Urls # Lade Alle Urls
cu=CrawlUrl.query.get(id) return jsonify(result=urls_test(id))
rw=fetch_page(cu.url)
h= {"url": cu.url, "sourcetype": cu.tpe, "raw": rw}
h2=do_compile(cu.tpe, h)
return jsonify(urls=cu,hs=h2,rw=rw)