# -*- coding: utf-8 -*-
import MySQLdb

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html


class ShlibPipeline(object):
    #对数据保存操作,数据库/写文件/发邮件等
    def process_item(self, item, spider):
        DBKWARGS = spider.settings.get('DBKWARGS')
        MySQLdb.connect(**DBKWARGS)
        cur = con.cursor()
        sql = ("insert into bookinfo(`bookid`, `bookname`, `url`, `desc`, `address`, `booknum`, `status`, `type`, `barcode`) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)")
        lis = (item['book_id'],item['bookname'],item['url'],item['desc'],item['address'],
               item['booknum'],item['status'],item['type'],item['barcode'])
        try:
            cur.execute(sql,lis)
        except Exception,e:
            print "Insert error:",e
            con.rollback()
        else:
            con.commit()
        cur.close()
        con.close()
        return item