pipelines.py 939 B

123456789101112131415161718192021222324252627282930
  1. # -*- coding: utf-8 -*-
  2. import MySQLdb
  3. # Define your item pipelines here
  4. #
  5. # Don't forget to add your pipeline to the ITEM_PIPELINES setting
  6. # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
  7. class ShlibPipeline(object):
  8. #对数据保存操作,数据库/写文件/发邮件等
  9. def process_item(self, item, spider):
  10. DBKWARGS = spider.settings.get('DBKWARGS')
  11. MySQLdb.connect(**DBKWARGS)
  12. cur = con.cursor()
  13. sql = ("insert into proxy(IP,PORT,TYPE,POSITION,SPEED,LAST_CHECK_TIME) "
  14. "values(%s,%s,%s,%s,%s,%s)")
  15. lis = (item['IP'],item['PORT'],item['TYPE'],item['POSITION'],item['SPEED'],
  16. item['LAST_CHECK_TIME'])
  17. try:
  18. cur.execute(sql,lis)
  19. except Exception,e:
  20. print "Insert error:",e
  21. con.rollback()
  22. else:
  23. con.commit()
  24. cur.close()
  25. con.close()
  26. return item