Python arctic.Arctic() Examples

The following are 28 code examples of arctic.Arctic(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module arctic , or try the search function .
Example #1
Source File: timeseries_interface.py    From plastering with MIT License 7 votes vote down vote up
def write_to_db(target_building, iterator):
    '''write the data from a building'''

    conn = Arctic('localhost')

    #create a lib for the tgt_bldg, a lib is akin to a collection
    if target_building not in conn.list_libraries():
        conn.initialize_library(target_building, lib_type=CHUNK_STORE)
        print ('library for %s created'%target_building)

    #connect to the lib for writing
    lib = conn[target_building]

    for sensor, timestamps, data in iterator:
        df = pd.DataFrame({'date': timestamps, 'data': data})
        df.set_index('date', inplace=True)
        lib.write(sensor, df)
        #print ('writing %s is done'%sensor) 
Example #2
Source File: arctic_connection.py    From pysystemtrade with GNU General Public License v3.0 6 votes vote down vote up
def __init__(self, collection_name, mongo_db=None):


        if mongo_db is None:
            mongo_db = mongoDb()

        database_name = mongo_db.database_name
        host = mongo_db.host

        # Arctic doesn't accept a port

        store = Arctic(host)
        library_name = database_name+"."+collection_name
        store.initialize_library(library_name) # will this fail if already exists??
        library = store[library_name]

        self.database_name = database_name
        self.collection_name = collection_name
        self.host = host

        self.store = store
        self.library_name = library_name
        self.library = library 
Example #3
Source File: test_arctic_multithreading.py    From arctic with GNU Lesser General Public License v2.1 6 votes vote down vote up
def test_multiprocessing_safety(mongo_host, library_name):
    # Create/initialize library at the parent process, then spawn children, and start them aligned in time
    total_processes = 64
    total_writes_per_child = 100

    register_get_auth_hook(my_auth_hook)

    global MY_ARCTIC
    MY_ARCTIC = Arctic(mongo_host=mongo_host)

    MY_ARCTIC.initialize_library(library_name, VERSION_STORE)
    assert isinstance(MY_ARCTIC.get_library(library_name), VersionStore)

    processes = [Process(target=f, args=(library_name, total_writes_per_child, True)) for _ in range(total_processes)]

    for p in processes:
        p.start()

    for p in processes:
        p.join()

    for p in processes:
        assert p.exitcode == 0

    assert isinstance(MY_ARCTIC.get_library(library_name), VersionStore) 
Example #4
Source File: FundCrawler.py    From factorset with MIT License 6 votes vote down vote up
def __init__(self, TYPE):
        """
        :param TYPE: 'BS', 'IS', 'CF'
        """

        ############ SETTING #############
        self.config = GetConfig()
        self.TYPE = TYPE # 'BS', 'IS', 'CF'
        self.MONGO = self.config.MONGO
        self.CSV = self.config.CSV
        self.RAW = False
        self.outdir = self.config.fund_dir
        self.encode = self.config.encode
        self.proxypool = self.config.proxypool

        ############ CHANGE ABOVE SETTING #############

        if self.MONGO:
            from arctic import Arctic
            # mongod --dbpath D:/idwzx/project/arctic
            a = Arctic(self.config.ahost)
            a.initialize_library('ashare_{}'.format(self.TYPE))
            self.lib = a['ashare_{}'.format(self.TYPE)]

        self.result_dict = {} 
Example #5
Source File: arctic_loader.py    From dtale with GNU Lesser General Public License v2.1 6 votes vote down vote up
def loader_func(**kwargs):
    try:
        from arctic import Arctic
        from arctic.store.versioned_item import VersionedItem
    except ImportError:
        raise ImportError("In order to use the arctic loader you must install arctic!")
    host = Arctic(kwargs.get("host"))
    lib = host.get_library(kwargs.get("library"))
    read_kwargs = {}
    start, end = (kwargs.get(p) for p in ["start", "end"])
    if start and end:
        read_kwargs["chunk_range"] = pd.date_range(start, end)
    data = lib.read(kwargs.get("node"), **read_kwargs)
    if isinstance(data, VersionedItem):
        data = data.data
    return data


# IMPORTANT!!! This function is required for building any customized CLI loader. 
Example #6
Source File: arctic_loader.py    From dtale with GNU Lesser General Public License v2.1 6 votes vote down vote up
def find_loader(kwargs):
    """
    Arctic implementation of data loader which will return a function if any of the
    `click` options based on LOADER_KEY & LOADER_PROPS have been used, otherwise return None

    :param kwargs: Optional keyword arguments to be passed from `click`
    :return: data loader function for arctic implementation
    """
    arctic_opts = get_loader_options(LOADER_KEY, kwargs)
    if len([f for f in arctic_opts.values() if f]):

        def _arctic_loader():
            return loader_func(**arctic_opts)

        return _arctic_loader
    return None 
Example #7
Source File: arctic_store_database.py    From blockchain-predictor with MIT License 5 votes vote down vote up
def open(self, store='chunkstore'):
		self.db = Arctic('localhost')
		try:
			self.store = self.db[store]
		except:
			self.db.initialize_library(store, lib_type=CHUNK_STORE)
			self.store = self.db[store]
			self.store._arctic_lib.set_quota(maxDBStorage * 1024 * 1024 * 1024) 
Example #8
Source File: backtest.py    From AlgoTrading with MIT License 5 votes vote down vote up
def __init__(self, agent, cash, stock=0, logging=False, ticker=None):
        self.agent = agent
        self.cash = cash
        self.stock = stock
        self.logging = logging
        self.ticker = ticker
        if logging:
            if ticker is None:
                raise Exception("Ticker not defined. Unable to log records")
            from arctic import Arctic

            self.store = Arctic('localhost')
            self.store.initialize_library('TransactionLogs')
            self.logstore = self.store['TransactionLogs'] 
Example #9
Source File: backtest.py    From AlgoTrading with MIT License 5 votes vote down vote up
def __init__(self, agent, cash, stock=0, logging=False, ticker=None):
        self.agent = agent
        self.cash = cash
        self.stock = stock
        self.logging = logging
        self.ticker = ticker
        if logging:
            if ticker is None:
                raise Exception("Ticker not defined. Unable to log records")
            from arctic import Arctic

            self.store = Arctic('localhost')
            self.store.initialize_library('TransactionLogs')
            self.logstore = self.store['TransactionLogs'] 
Example #10
Source File: arctic_connection.py    From pysystemtrade with GNU General Public License v3.0 5 votes vote down vote up
def __repr__(self):
        return "Arctic connection: host %s, db name %s, collection %s" % \
               (self.host, self.database_name, self.collection_name) 
Example #11
Source File: bitmex_pandas_vol.py    From archon with MIT License 5 votes vote down vote up
def sync():
    store = Arctic('localhost')
    store.initialize_library('Bitmex')
    library = store['Bitmex']
    df = get_candle_pandas()
    print (df)
    library.write('XBTUSD', df, metadata={'source': 'Bitmex'}) 
Example #12
Source File: bitmex_pandas_vol.py    From archon with MIT License 5 votes vote down vote up
def show():        
    # Connect to Local MONGODB
    store = Arctic('localhost')
    # Create the library - defaults to VersionStore
    store.initialize_library('Bitmex')
    # Access the library
    library = store['Bitmex']
    #library.write('XBTUSD', df, metadata={'source': 'Bitmex'})

    # Reading the data
    item = library.read('XBTUSD')
    xbtusd = item.data
    metadata = item.metadata
    print (xbtusd)
    print (metadata)
    
    xbtusd['ret'] = -1+xbtusd['close']/xbtusd['close'].shift(1)

    from math import sqrt
    xbtusd['vol10'] = sqrt(260)*xbtusd['ret'].rolling(10).std(ddof=0)
    xbtusd['vol30'] = sqrt(260)*xbtusd['ret'].rolling(30).std(ddof=0)

    #print (volList)

    #plt.plot(df.index, df['close'], label='price')
    plt.plot(xbtusd.index, xbtusd['vol10'], label='vol10')
    plt.plot(xbtusd.index, xbtusd['vol30'], label='vol30')
    #plt.plot(xbtusd['ret'])
    plt.ylabel('vol')
    plt.xlabel('Date')
    plt.legend(loc=0)

    plt.show() 
Example #13
Source File: arctic_candle.py    From archon with MIT License 5 votes vote down vote up
def read_candles():  
    # Connect to Local MONGODB
    store = Arctic('localhost')

    # Create the library - defaults to VersionStore
    store.initialize_library('crypto')

    # Access the library
    library = store['crypto']
    # Reading the data
    item = library.read('XBTUSD')
    xbtusd = item.data

    for x in xbtusd:
        print(x) 
Example #14
Source File: arctic_candle.py    From archon with MIT License 5 votes vote down vote up
def write_candles():  
    client = broker.get_client(exc.BITMEX)
    candles = client.trades_candle("XBTUSD", mex.candle_1d)
    candles.reverse()

    # Connect to Local MONGODB
    store = Arctic('localhost')

    # Create the library - defaults to VersionStore
    store.initialize_library('crypto')

    # Access the library
    library = store['crypto']

    library.write('XBTUSD', candles, metadata={'source': 'Bitmex'}) 
Example #15
Source File: database_tools_old.py    From blockchain-predictor with MIT License 5 votes vote down vote up
def getChunkstore():
	chunkStore = getLibrary(storeKey)
	if(chunkStore == None):
		initLibrary(storeKey, CHUNK_STORE)
		chunkStore = getLibrary(storeKey)
		#turn GB to bytes and set the max quota of storage. Arctic's default is 10GB
		chunkStore._arctic_lib.set_quota(maxDBStorage * 1024 * 1024 * 1024)
	return chunkStore 
Example #16
Source File: database_tools_old.py    From blockchain-predictor with MIT License 5 votes vote down vote up
def init():
	global db
	db = Arctic('localhost')
	updateKeys(masterKey) 
Example #17
Source File: move_keys_mongodb.py    From blockchain-predictor with MIT License 5 votes vote down vote up
def getChunkstore(db):
	chunkStore = getLibrary(storeKey, db)
	if(chunkStore == None):
		initLibrary(storeKey, db, CHUNK_STORE)
		chunkStore = getLibrary(storeKey, db)
		#turn GB to bytes and set the max quota of storage. Arctic's default is 10GB
		chunkStore._arctic_lib.set_quota(maxDBStorage * 1024 * 1024 * 1024)
	return chunkStore 
Example #18
Source File: models.py    From backtrader-cn with GNU General Public License v3.0 5 votes vote down vote up
def get_store():
    """
    get Arctic store connection
    :return: arctic connection
    """

    mongo_host = conf.MONGO_HOST
    store = arctic.Arctic(mongo_host)
    return store 
Example #19
Source File: conftest.py    From dtale with GNU Lesser General Public License v2.1 5 votes vote down vote up
def arctic(mongo_server_module):
    disable_arctic_cache(mongo_server_module.api)
    mongo_server_module.api.drop_database("arctic")
    mongo_server_module.api.drop_database("arctic_{}".format(getpass.getuser()))
    return Arctic(mongo_server_module.api) 
Example #20
Source File: test_bson_store.py    From arctic with GNU Lesser General Public License v2.1 5 votes vote down vote up
def test_enable_sharding():
    arctic_lib = create_autospec(ArcticLibraryBinding)
    arctic_lib.arctic = create_autospec(Arctic)
    with patch('arctic.store.bson_store.enable_sharding', autospec=True) as enable_sharding:
        arctic_lib.get_top_level_collection.return_value.database.create_collection.__name__ = 'some_name'
        arctic_lib.get_top_level_collection.return_value.database.collection_names.__name__ = 'some_name'
        bsons = BSONStore(arctic_lib)
        bsons.enable_sharding()
        # Check we always set the sharding to be hashed.
        assert enable_sharding.call_args_list == [call(arctic_lib.arctic, arctic_lib.get_name(), hashed=True, key='_id')] 
Example #21
Source File: test_arctic_multithreading.py    From arctic with GNU Lesser General Public License v2.1 5 votes vote down vote up
def test_multiprocessing_safety_parent_children_race(mongo_host, library_name):
    # Create Arctic and directly fork/start children (no wait)
    total_iterations = 12
    total_processes = 6
    total_writes_per_child = 20

    global MY_ARCTIC

    for i in range(total_iterations):
        processes = list()

        MY_ARCTIC = Arctic(mongo_host=mongo_host)
        for j in range(total_processes):
            p = Process(target=f, args=(library_name, total_writes_per_child, False))
            p.start()  # start directly, don't wait to create first all children procs
            processes.append(p)

        MY_ARCTIC.initialize_library(library_name, VERSION_STORE)  # this will unblock spinning children

        for p in processes:
            p.join()

        for p in processes:
            assert p.exitcode == 0

        MY_ARCTIC.reset()

    assert isinstance(MY_ARCTIC.get_library(library_name), VersionStore) 
Example #22
Source File: fwd_benchmarks.py    From arctic with GNU Lesser General Public License v2.1 5 votes vote down vote up
def append_random_rows(config, args, n_rows):
    store = Arctic(args.mongodb, app_name="benchmark")
    lib_name = lib_name_from_args(config)

    lib = store[lib_name]

    for _ in range(args.appends):
        for sym in range(args.symbols):
            df = gen_oneminute_dataset(n_row=APPEND_NROWS, n_col=n_rows, dense=False)
            lib.append('sym' + str(sym), df) 
Example #23
Source File: fwd_benchmarks.py    From arctic with GNU Lesser General Public License v2.1 5 votes vote down vote up
def insert_random_data(config, args, n_rows):
    store = Arctic(args.mongodb, app_name="benchmark")
    lib_name = lib_name_from_args(config)
    store.delete_library(lib_name)
    store.initialize_library(lib_name, segment='month')
    lib = store[lib_name]

    for sym in range(args.symbols):
        df = gen_oneminute_dataset(n_row=n_rows, n_col=n_rows, dense=args.dense)
        lib.write('sym' + str(sym), df) 
Example #24
Source File: benchmarks.py    From arctic with GNU Lesser General Public License v2.1 5 votes vote down vote up
def __init__(self):
        self.store = Arctic("127.0.0.1") 
Example #25
Source File: benchmarks.py    From arctic with GNU Lesser General Public License v2.1 5 votes vote down vote up
def __init__(self):
        self.store = Arctic("127.0.0.1") 
Example #26
Source File: benchmarks.py    From arctic with GNU Lesser General Public License v2.1 5 votes vote down vote up
def setup(self, arg):
        self.store = Arctic("127.0.0.1")
        self.store.delete_library('test.lib')
        self.store.initialize_library('test.lib')
        self.lib = self.store['test.lib'] 
Example #27
Source File: timeseries_interface.py    From plastering with MIT License 4 votes vote down vote up
def read_from_db(target_building, start_time=None, end_time=None):
    '''
    load the data from for tgt_bldg
    return:
    {
        point name: data
    }
    data is in pandas.DataFrame format with two columns ['date', 'data']
    '''
    if isinstance(start_time, arrow.Arrow):
        start_time = start_time.datetime
    elif isinstance(start_time, (dt, date)):
        pass
    elif start_time == None:
        pass
    else:
        raise ValueError('the type of time value is unknown: {0}'
                         .format(type(start_time)))
    if isinstance(end_time, arrow.Arrow):
        end_time = end_time.datetime
    elif isinstance(end_time, (dt, date)):
        pass
    elif end_time == None:
        pass
    else:
        raise ValueError('the type of time value is unknown: {0}'
                         .format(type(end_time)))
    if start_time and end_time:
        date_range = DateRange(start=start_time, end=end_time)
    else:
        date_range = None

    print ('loading timeseries data from db for %s...'%target_building)

    conn = Arctic('localhost')
    if target_building not in conn.list_libraries():
        raise ValueError('%s not found in the DB!'%target_building)
    else:
        lib = conn[target_building]
        srcids = lib.list_symbols()
        res = {}
        for srcid in srcids:
            data = lib.read(srcid, chunk_range=date_range)
            if len(data) == 0:
                print('WARNING: {0} has empty data.'.format(srcid))
                #pdb.set_trace()
                continue

            res[srcid] = data
        print('correctly done')

        return res 
Example #28
Source File: data_fetch.py    From factorset with MIT License 4 votes vote down vote up
def data_fetch():
    """
    从config中读取配置,爬取行情,基本面,及其他数据。
    """
    gc = GetConfig()
    if gc.target == 'all':
        target = pd.read_csv(data.__file__.strip(data.__file__.split('\\')[-1])+'allAShare.csv')
        target = target['0']
    elif gc.target == 'hs300':
        hs300 = ts.get_hs300s()
        hs300.code = hs300.code.apply(code_to_symbol)
        target = hs300.code.tolist()
    else:
        if isinstance(gc.target, str):
            target = gc.target.split(', ')
        assert isinstance(target, list)

    # Arctic Start
    if gc.MONGO:
        from arctic import Arctic
        a = Arctic(gc.ahost)
        a.initialize_library('Ashare')
        lib_stock = a['Ashare']
    else:
        lib_stock = None

    # Stock & index
    print("Start Fetching Stock & Index Data!")
    StockSaver.write_all_stock(target, lib_stock)
    try:
        StockSaver.save_index('000905')
        time.sleep(0.1)
        StockSaver.save_index('000300')
    except IOError as e:
        print(e)
    print("Finish Fetching Stock & Index Data!")

    # Other data
    print("Start Fetching Other Data!")
    OtherData.write_all_date(OtherData.tradecal())
    OtherData.write_new_stocks()
    print("Finish Fetching Other Data!")

    # Fundamental data
    while 1:
        print("Start Fetching Fundamental Data!")
        if len(get_all_proxy(gc.proxypool)) >= gc.proxymin:
            a = FundCrawler('BS')
            a.main(target, num=5)
            b = FundCrawler('IS')
            b.main(target, num=5)
            c = FundCrawler('CF')
            c.main(target, num=5)
            print("Finish Fetching Fundamental Data!")

            break
        else:
            print("Proxy pool is not ready! We only have {} proxies!".format(len(get_all_proxy(gc.proxypool))))
            time.sleep(5)