Python twisted.enterprise.adbapi.ConnectionPool() Examples
The following are 30
code examples of twisted.enterprise.adbapi.ConnectionPool().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
twisted.enterprise.adbapi
, or try the search function
.
Example #1
Source File: pipelines.py From PythonCrawler-Scrapy-Mysql-File-Template with MIT License | 6 votes |
def from_settings(cls,settings): '''1、@classmethod声明一个类方法,而对于平常我们见到的则叫做实例方法。 2、类方法的第一个参数cls(class的缩写,指这个类本身),而实例方法的第一个参数是self,表示该类的一个实例 3、可以通过类来调用,就像C.f(),相当于java中的静态方法''' dbparams=dict( host=settings['MYSQL_HOST'],#读取settings中的配置 db=settings['MYSQL_DBNAME'], user=settings['MYSQL_USER'], passwd=settings['MYSQL_PASSWD'], charset='utf8',#编码要加上,否则可能出现中文乱码问题 cursorclass=MySQLdb.cursors.DictCursor, use_unicode=False, ) dbpool=adbapi.ConnectionPool('MySQLdb',**dbparams)#**表示将字典扩展为关键字参数,相当于host=xxx,db=yyy.... return cls(dbpool)#相当于dbpool付给了这个类,self中可以得到 #pipeline默认调用
Example #2
Source File: pipelines.py From FunpySpiderSearchEngine with Apache License 2.0 | 6 votes |
def from_settings(cls, settings): """ 自定义组件或扩展很有用的方法: 这个方法名字固定, 是会被scrapy调用的。 这里传入的cls是指当前的class """ db_parms = dict( host=settings["MYSQL_HOST"], db=settings["MYSQL_DBNAME"], user=settings["MYSQL_USER"], passwd=settings["MYSQL_PASSWORD"], charset='utf8mb4', cursorclass=MySQLdb.cursors.DictCursor, use_unicode=True, ) # 连接池ConnectionPool dbpool = adbapi.ConnectionPool("MySQLdb", **db_parms) # 此处相当于实例化pipeline, 要在init中接收。 return cls(dbpool)
Example #3
Source File: pipelines.py From vrequest with MIT License | 6 votes |
def process_item(self, item, spider): mysql_config = item.pop('__mysql__', None) # 存储时自动删除配置 if mysql_config and item: if type(mysql_config) is dict: table = mysql_config.pop('table', None) db = mysql_config.get('db', None) or 'vrequest' mysql_config.setdefault('charset','utf8mb4') mysql_config.setdefault('db', db) dbk = hmac.new(b'',json.dumps(mysql_config, sort_keys=True).encode(),'md5').hexdigest() if dbk not in self.dbn: self.dbn[dbk] = adbapi.ConnectionPool('pymysql', **mysql_config) self.init_database(self.dbn[dbk], mysql_config, db, table, item) self.dbn[dbk].runInteraction(self.insert_item, db, table, item) return item else: raise TypeError('Unable Parse mysql_config type:{}'.format(type(mysql_config))) else: return item
Example #4
Source File: fileHashUtils.py From iWant with MIT License | 5 votes |
def get_file(file_hash, dbpool): """Returns the corresponding file object :param file_hash: hash of the file :param dbpool: twisted.enterprise.adbapi.ConnectionPool object """ file_query_response = yield dbpool.runQuery('select filename from indexer where hash=?', (file_hash,)) # print file_query_response[0][0] defer.returnValue(open(file_query_response[0][0], 'rb'))
Example #5
Source File: database.py From floranet with MIT License | 5 votes |
def start(self): """Create the ADBAPI connection pool. """ Registry.DBPOOL = adbapi.ConnectionPool('psycopg2', host=self.host, user=self.user, password=self.password, database=self.database)
Example #6
Source File: mysql.py From openslack-crawler with Apache License 2.0 | 5 votes |
def from_settings(cls, settings): dbargs = dict( host=settings['MYSQL_HOST'], db=settings['MYSQL_DBNAME'], user=settings['MYSQL_USER'], passwd=settings['MYSQL_PASSWD'], charset='utf8', cursorclass=MySQLdb.cursors.DictCursor, use_unicode=True, ) dbpool = adbapi.ConnectionPool('MySQLdb', **dbargs) return cls(dbpool) # pipeline默认调用
Example #7
Source File: pipelines.py From openslack-crawler with Apache License 2.0 | 5 votes |
def from_settings(cls, settings): dbargs = dict( host=settings['MYSQL_HOST'], db=settings['MYSQL_DBNAME'], user=settings['MYSQL_USER'], passwd=settings['MYSQL_PASSWD'], charset='utf8', cursorclass=MySQLdb.cursors.DictCursor, use_unicode=True, ) print dbargs dbpool = adbapi.ConnectionPool('MySQLdb', **dbargs) return cls(dbpool)
Example #8
Source File: pipelines.py From PythonCrawler-Scrapy-Mysql-File-Template with MIT License | 5 votes |
def __init__(self,dbpool): self.dbpool=dbpool ''' 这里注释中采用写死在代码中的方式连接线程池,可以从settings配置文件中读取,更加灵活 self.dbpool=adbapi.ConnectionPool('MySQLdb', host='127.0.0.1', db='crawlpicturesdb', user='root', passwd='123456', cursorclass=MySQLdb.cursors.DictCursor, charset='utf8', use_unicode=False)'''
Example #9
Source File: sql_client.py From calvin-base with Apache License 2.0 | 5 votes |
def start(self, iface='', bootstrap=[], cb=None, name=None, nodeid=None): kwargs = copy.copy(config_kwargs) _log.debug("SQL start %s" % str(kwargs)) kwargs.pop('db', None) dbmodule = kwargs.pop('dbmodule', "MySQLdb") # FIXME does this take too long? self.dbpool = adbapi.ConnectionPool(dbmodule, **kwargs) if not self.dbpool: _log.debug("Failed SQL connection pool") if cb is not None: async.DelayedCall(0, cb, False) return d = self.dbpool.runQuery(QUERY_SETUP) d.addCallbacks(CalvinCB(self._setup_cb, cb=cb), CalvinCB(self._setup_fail_cb, cb=cb)) _log.debug("Sent SQL table setup query")
Example #10
Source File: PersistentBuffer.py From calvin-base with Apache License 2.0 | 5 votes |
def init(self, buffer_id, reporting=None, *args, **kwargs): self.db_name = buffer_id self.db_path = os.path.join(os.path.abspath(os.path.curdir), self.db_name + ".sq3") self.db = adbapi.ConnectionPool('sqlite3', self.db_path, check_same_thread=False) self._pushed_values = 0 self._popped_values = 0 self._latest_timestamp = 0 self._value = None self._changed = None self._statlogging = None def ready(length): def log_stats(): _log.info("{} : pushed {}, popped {} (latest timestamp: {}) ".format(self.db_name, self._pushed_values, self._popped_values, self._latest_timestamp)) self._statlogging.reset() self._changed = True # Something has changed, need to check if readable # install timer to report on pushing/popping if reporting: self._statlogging= async.DelayedCall(reporting, log_stats) self.scheduler_wakeup() def create(db): # Create simple queue table. Using TEXT unless there is a reason not to. db.execute("CREATE TABLE IF NOT EXISTS queue (value BLOB)") def error(e): _log.error("Error initializing queue {}: {}".format(self.db_name, e)) q = self.db.runInteraction(create) q.addCallback(ready) q.addErrback(error)
Example #11
Source File: pipelines.py From JobSpiders with Apache License 2.0 | 5 votes |
def from_settings(cls, settings): dbparms = dict( host=settings["MYSQL_HOST"], db=settings["MYSQL_DBNAME"], user=settings["MYSQL_USER"], passwd=settings["MYSQL_PASSWORD"], charset='utf8', cursorclass=MySQLdb.cursors.DictCursor, use_unicode=True, ) dbpool = adbapi.ConnectionPool("MySQLdb", **dbparms) return cls(dbpool)
Example #12
Source File: pipelines.py From China_stock_announcement with MIT License | 5 votes |
def from_settings(cls, settings): dbparams = dict( host = settings['HOST_ADDRESS'], db = settings['DB_NAME'], user = settings['USER'], passwd = settings['PASSWORD'], charset = 'utf8', # 编码要加上,否则可能出现中文乱码问题 cursorclass = MySQLdb.cursors.DictCursor, use_unicode = False, ) dbpool = adbapi.ConnectionPool('MySQLdb', **dbparams) # **表示将字典扩展为关键字参数,相当于host=xxx,db=yyy.... return cls(dbpool) # 相当于dbpool付给了这个类,self中可以得到 # pipeline默认调用
Example #13
Source File: database.py From BitTorrent with GNU General Public License v3.0 | 5 votes |
def __setstate__(self, state): self.__dict__ = state self.info['password'] = getpass.getpass('Database password for %s: ' % (self.info['user'],)) self.dbpool = adbapi.ConnectionPool(**self.info) del self.info['password']
Example #14
Source File: database.py From BitTorrent with GNU General Public License v3.0 | 5 votes |
def __init__(self, info): self.info = info self.dbpool = adbapi.ConnectionPool(**self.info)
Example #15
Source File: database.py From python-for-android with Apache License 2.0 | 5 votes |
def __setstate__(self, state): self.__dict__ = state self.info['password'] = getpass.getpass('Database password for %s: ' % (self.info['user'],)) self.dbpool = adbapi.ConnectionPool(**self.info) del self.info['password']
Example #16
Source File: database.py From python-for-android with Apache License 2.0 | 5 votes |
def __init__(self, info): self.info = info self.dbpool = adbapi.ConnectionPool(**self.info)
Example #17
Source File: fileHashUtils.py From iWant with MIT License | 5 votes |
def get_piecehashes_of(file_hash, dbpool): """Returns concatenated hash of all chunks belonging to a particular file :param file_hash: hash of the file :param dbpool: twisted.enterprise.adbapi.ConnectionPool object """ file_pieces_response = yield dbpool.runQuery('select piecehashes from indexer where hash=?', (file_hash,)) defer.returnValue(file_pieces_response[0][0])
Example #18
Source File: fileHashUtils.py From iWant with MIT License | 5 votes |
def add_new_file_entry_resume(file_entry, dbpool): """Add file entry to the resume table :param filename: absolute filepath :param dbpool: twisted.enterprise.adbapi.ConnectionPool object """ # print 'new entry added to resume table' # filename, checksum = file_entry[0], file_entry[3] # checksum = file_entry[3] filename = file_entry[0] yield dbpool.runQuery('insert into resume values (?)', (filename,)) yield dbpool.runQuery('insert into indexer values (?,?,?,?,?,?,?)', (file_entry))
Example #19
Source File: fileHashUtils.py From iWant with MIT License | 5 votes |
def remove_resume_entry(filename, dbpool): """Removes file entry from the resume table :param filename: absolute filepath :param dbpool: twisted.enterprise.adbapi.ConnectionPool object """ filename_response = yield dbpool.runQuery('select filename from indexer where filename=?', (filename,)) filename = filename_response[0][0] # print 'everything deleted from indexer and resume' yield dbpool.runQuery('delete from resume where filename=?', (filename,)) yield dbpool.runQuery('delete from indexer where filename=?', (filename,))
Example #20
Source File: fileHashUtils.py From iWant with MIT License | 5 votes |
def check_hash_present_in_resume(filename, dbpool): """Checks if hash value is present in the resume table :param filename: absolute filepath :param dbpool: twisted.enterprise.adbapi.ConnectionPool object """ response = yield dbpool.runQuery('select filename from resume where filename=?', (filename,)) if len(response) == 0: defer.returnValue(False) else: filename = response[0][0] if not os.path.exists(filename): yield remove_resume_entry(filename, dbpool) defer.returnValue(False) defer.returnValue(True)
Example #21
Source File: fileHashUtils.py From iWant with MIT License | 5 votes |
def share(files, dbpool): """Sets the share value of the files to 1 in the database :param files: list of absolute file paths :param dbool: twisted.enterprise.adbapi.ConnectionPool object """ for f in files: yield dbpool.runQuery('update indexer set share=1 where filename=?', (f,)) defer.returnValue('shared')
Example #22
Source File: fileHashUtils.py From iWant with MIT License | 5 votes |
def unshare(files, dbpool): """Sets the share value of the files to 0 in the database :param files: list of absolute file paths :param dbool: twisted.enterprise.adbapi.ConnectionPool object """ for f in files: yield dbpool.runQuery('update indexer set share=0 where filename=?', (f,)) defer.returnValue('unshared')
Example #23
Source File: fileHashUtils.py From iWant with MIT License | 5 votes |
def remove_all_deleted_files(dbpool): """Removes file entries from database that are deleted from the file system :param dbpool: twisted.enterprise.adbapi.ConnectionPool object """ filenames = yield dbpool.runQuery('select filename from indexer') for filename in filenames: if not os.path.exists(filename[0]): print '[Indexer][Removing]: {0}'.format(filename[0]) yield dbpool.runQuery('delete from indexer where filename=?', (filename[0],)) resume_table_filenames = yield dbpool.runQuery('select filename from resume') for filename in resume_table_filenames: if not os.path.exists(filename[0]): print '[Resume][Removing]: {0}'.format(filename[0]) yield dbpool.runQuery('delete from resume where filename=?', (filename[0],))
Example #24
Source File: pipelines.py From www_job_com with Apache License 2.0 | 5 votes |
def from_settings(cls, settings): dbparams = dict( host=settings['MYSQL_HOST'], db=settings['MYSQL_DBNAME'], user=settings['MYSQL_USER'], passwd=settings['MYSQL_PASSWD'], charset='utf8', cursorclass=pymysql.cursors.DictCursor, use_unicode=False, ) dbpool = adbapi.ConnectionPool('pymysql', **dbparams) return cls(dbpool)
Example #25
Source File: pipelines.py From tieba-crawler with MIT License | 5 votes |
def from_settings(cls, settings): dbargs = dict( host=settings['MYSQL_HOST'], db=settings['MYSQL_DBNAME'], user=settings['MYSQL_USER'], passwd=settings['MYSQL_PASSWD'], charset='utf8', use_unicode=True, ) dbpool = adbapi.ConnectionPool('MySQLdb', **dbargs) return cls(dbpool)
Example #26
Source File: database.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 5 votes |
def __setstate__(self, state): self.__dict__ = state self.info['password'] = getpass.getpass('Database password for %s: ' % (self.info['user'],)) self.dbpool = adbapi.ConnectionPool(**self.info) del self.info['password']
Example #27
Source File: database.py From Safejumper-for-Desktop with GNU General Public License v2.0 | 5 votes |
def __init__(self, info): self.info = info self.dbpool = adbapi.ConnectionPool(**self.info)
Example #28
Source File: fileHashUtils.py From iWant with MIT License | 4 votes |
def index_file(path, dbpool): """Builds meta information of a single file :param file: absolute pathname of the file to be indexed :param dbool: twisted.enterprise.adbapi.ConnectionPool object returns a dict containing a list containing (file_name, size, file_hash, root_hash) of files to be deleted and shared , shared_folder """ response = {} response['DEL'] = [] response['ADD'] = [] response['shared_folder'] = None filesize = get_file_size(path) filesize_from_db = yield dbpool.runQuery('select size from indexer where filename=?', (path,)) try: if filesize_from_db[0][0] != filesize: file_hash, piece_hashes, root_hash = get_file_hashes(path) file_index_entry = ( filesize, file_hash, piece_hashes, root_hash, False, path, ) print 'updating the hash' yield dbpool.runQuery('update indexer set size=?, hash=?, piecehashes=?, roothash=?, isdirectory=? where filename=?', (file_index_entry)) file_property_list = [ (path, filesize, file_hash, root_hash)] # , False)] response['ADD'] = file_property_list defer.returnValue(response) except IndexError: # print '@index_file {0}'.format(filesize_from_db) if len(filesize_from_db) == 0: file_hash, piece_hashes, root_hash = get_file_hashes(path) print '[New File: Indexed] {0}'.format(path) file_index_entry = ( path, 1, filesize, file_hash, piece_hashes, root_hash, False) yield dbpool.runQuery('insert into indexer values (?,?,?,?,?,?,?)', (file_index_entry)) file_property_list = [ (path, filesize, file_hash, root_hash)] # , False)] response['ADD'] = file_property_list defer.returnValue(response) else: defer.returnValue(response)
Example #29
Source File: fileHashUtils.py From iWant with MIT License | 4 votes |
def bootstrap(folder, dbpool): """Returns all the files and folder meta information that needs to be shared :param folder: absolute path of the shared folder :param dbpool: twisted.enterprise.adbapi.ConnectionPool object """ if not os.path.exists(folder): raise NotImplementedError else: # we need to remove all the entries for which the path doesnot exist yield remove_all_deleted_files(dbpool) all_filenames_response = yield dbpool.runQuery('select filename from indexer') all_filenames = set(map(lambda x: x[0], all_filenames_response)) # print 'all the filenames are {0}'.format(all_filenames) files_to_be_unshared = set( filter( lambda x: not x.startswith( os.path.abspath(folder)), all_filenames)) files_to_be_shared = all_filenames - files_to_be_unshared # print 'files to be shared {0}'.format(files_to_be_shared) all_unshared_files_response = yield dbpool.runQuery('select filename from indexer where share=0') all_unshared_files = set( map(lambda x: x[0], all_unshared_files_response)) all_shared_files = all_filenames - all_unshared_files share_remaining_files = files_to_be_shared - all_shared_files unshare_remaining_files = files_to_be_unshared - all_unshared_files yield share(share_remaining_files, dbpool) yield unshare(unshare_remaining_files, dbpool) yield index_folder(folder, dbpool) combined_response = {} combined_response['ADD'] = [] combined_response['DEL'] = [] combined_response['shared_folder'] = folder files_added_metainfo = [] files_removed_metainfo = [] for filepath in unshare_remaining_files: file_entry = yield dbpool.runQuery('select filename, size, hash, roothash from indexer where filename=?', (filepath,)) files_removed_metainfo.append(file_entry[0]) # files_removed_metainfo.extend(removed_files_temp) sharing_files = yield dbpool.runQuery('select filename, size, hash, roothash from indexer where share=1') files_added_metainfo.extend(sharing_files) combined_response['ADD'] = files_added_metainfo combined_response['DEL'] = files_removed_metainfo defer.returnValue(combined_response)
Example #30
Source File: postgresql.py From tensor with MIT License | 4 votes |
def get(self): try: p = adbapi.ConnectionPool('psycopg2', database='postgres', host=self.host, port=self.port, user=self.user, password=self.password) cols = ( ('xact_commit', 'commits'), ('xact_rollback', 'rollbacks'), ('blks_read', 'disk.read'), ('blks_hit', 'disk.cache'), ('tup_returned', 'returned'), ('tup_fetched', 'selects'), ('tup_inserted', 'inserts'), ('tup_updated', 'updates'), ('tup_deleted', 'deletes'), ('deadlocks', 'deadlocks') ) keys, names = zip(*cols) q = yield p.runQuery( 'SELECT datname,numbackends,%s FROM pg_stat_database' % ( ','.join(keys)) ) for row in q: db = row[0] threads = row[1] if db not in ('template0', 'template1'): self.queueBack(self.createEvent('ok', 'threads: %s' % threads, threads, prefix='%s.threads' % db) ) for i, col in enumerate(row[2:]): self.queueBack(self.createEvent('ok', '%s: %s' % (names[i], col), col, prefix='%s.%s' % (db, names[i]), aggregation=Counter64) ) yield p.close() defer.returnValue(self.createEvent('ok', 'Connection ok', 1, prefix='state')) except exceptions.ImportError: log.msg('tensor.sources.database.postgresql.PostgreSQL' ' requires psycopg2') defer.returnValue(None) except Exception as e: defer.returnValue(self.createEvent('critical', 'Connection error: %s' % str(e).replace('\n',' '), 0, prefix='state') )