Python asgiref.sync.async_to_sync() Examples
The following are 23
code examples of asgiref.sync.async_to_sync().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
asgiref.sync
, or try the search function
.
Example #1
Source File: listener.py From resolwe with Apache License 2.0 | 6 votes |
def _abort_processing(self, obj): """Abort processing of the current data object. Also notify worker and frontend. """ async_to_sync(self._send_reply)( obj, {ExecutorProtocol.RESULT: ExecutorProtocol.RESULT_ERROR} ) async_to_sync(consumer.send_event)( { WorkerProtocol.COMMAND: WorkerProtocol.ABORT, WorkerProtocol.DATA_ID: obj[ExecutorProtocol.DATA_ID], WorkerProtocol.FINISH_COMMUNICATE_EXTRA: { "executor": getattr(settings, "FLOW_EXECUTOR", {}).get( "NAME", "resolwe.flow.executors.local" ), }, } )
Example #2
Source File: nabairqualityd_test.py From pynab with GNU General Public License v3.0 | 6 votes |
def test_perform(self): config = models.Config.load() config.index_airquality = "aqi" config.visual_airquality = "always" config.localisation = None config.save() service = NabAirqualityd() writer = MockWriter() service.writer = writer config_t = ("aqi", "always") expiration = datetime.datetime(2019, 4, 22, 0, 0, 0) async_to_sync(service.perform)(expiration, "today", config_t) self.assertEqual(len(writer.written), 2) packet = writer.written[0] packet_json = json.loads(packet.decode("utf8")) self.assertEqual(packet_json["type"], "info") self.assertEqual(packet_json["info_id"], "nabairqualityd") self.assertTrue("animation" in packet_json) packet = writer.written[1] packet_json = json.loads(packet.decode("utf8")) self.assertEqual(packet_json["type"], "message") self.assertTrue("signature" in packet_json) self.assertTrue("body" in packet_json)
Example #3
Source File: nabweatherd_test.py From pynab with GNU General Public License v3.0 | 6 votes |
def test_perform(self): service = NabWeatherd() writer = MockWriter() service.writer = writer config_t = ("75005", NabWeatherd.UNIT_CELSIUS, "weather") expiration = datetime.datetime(2019, 4, 22, 0, 0, 0) async_to_sync(service.perform)(expiration, "today", config_t) self.assertEqual(len(writer.written), 3) packet = writer.written[0] packet_json = json.loads(packet.decode("utf8")) self.assertEqual(packet_json["type"], "info") self.assertEqual(packet_json["info_id"], "nabweatherd_rain") packet = writer.written[1] packet_json = json.loads(packet.decode("utf8")) self.assertEqual(packet_json["type"], "info") self.assertEqual(packet_json["info_id"], "nabweatherd") self.assertTrue("animation" in packet_json) packet = writer.written[2] packet_json = json.loads(packet.decode("utf8")) self.assertEqual(packet_json["type"], "message") self.assertTrue("signature" in packet_json) self.assertTrue("body" in packet_json)
Example #4
Source File: nabweatherd_test.py From pynab with GNU General Public License v3.0 | 6 votes |
def test_perform_rain(self): service = NabWeatherd() writer = MockWriter() service.writer = writer config_t = ("75005", NabWeatherd.UNIT_CELSIUS, "rain") expiration = datetime.datetime(2019, 4, 22, 0, 0, 0) async_to_sync(service.perform)(expiration, "today", config_t) self.assertEqual(len(writer.written), 3) packet = writer.written[0] packet_json = json.loads(packet.decode("utf8")) self.assertEqual(packet_json["type"], "info") self.assertEqual(packet_json["info_id"], "nabweatherd_rain") packet = writer.written[1] packet_json = json.loads(packet.decode("utf8")) self.assertEqual(packet_json["type"], "info") self.assertEqual(packet_json["info_id"], "nabweatherd") self.assertFalse("animation" in packet_json) packet = writer.written[2] packet_json = json.loads(packet.decode("utf8")) self.assertEqual(packet_json["type"], "message") self.assertTrue("signature" in packet_json) self.assertTrue("body" in packet_json)
Example #5
Source File: nabweatherd_test.py From pynab with GNU General Public License v3.0 | 6 votes |
def test_perform_both(self): service = NabWeatherd() writer = MockWriter() service.writer = writer config_t = ("75005", NabWeatherd.UNIT_CELSIUS, "both") expiration = datetime.datetime(2019, 4, 22, 0, 0, 0) async_to_sync(service.perform)(expiration, "today", config_t) self.assertEqual(len(writer.written), 3) packet = writer.written[0] packet_json = json.loads(packet.decode("utf8")) self.assertEqual(packet_json["type"], "info") self.assertEqual(packet_json["info_id"], "nabweatherd_rain") packet = writer.written[1] packet_json = json.loads(packet.decode("utf8")) self.assertEqual(packet_json["type"], "info") self.assertEqual(packet_json["info_id"], "nabweatherd") self.assertTrue("animation" in packet_json) packet = writer.written[2] packet_json = json.loads(packet.decode("utf8")) self.assertEqual(packet_json["type"], "message") self.assertTrue("signature" in packet_json) self.assertTrue("body" in packet_json)
Example #6
Source File: tools.py From scout_apm_python with MIT License | 6 votes |
def async_test(func): """ Wrap async_to_sync with another function because Pytest complains about collecting the resulting callable object as a test because it's not a true function: PytestCollectionWarning: cannot collect 'test_foo' because it is not a function. """ # inner import because for Python 3.6+ tests only from asgiref.sync import async_to_sync sync_func = async_to_sync(func) @functools.wraps(func) def wrapper(*args, **kwargs): return sync_func(*args, **kwargs) return wrapper
Example #7
Source File: nabairqualityd_test.py From pynab with GNU General Public License v3.0 | 5 votes |
def test_asr(self): config = models.Config.load() config.index_airquality = "aqi" config.visual_airquality = "always" config.localisation = None config.save() service = NabAirqualityd() writer = MockWriter() service.writer = writer config_t = "aqi" expiration = datetime.datetime(2019, 4, 22, 0, 0, 0) packet = { "type": "asr_event", "nlu": {"intent": "nabairqualityd/forecast"}, } async_to_sync(service.process_nabd_packet)(packet) print(writer.written) self.assertEqual(len(writer.written), 2) packet = writer.written[0] packet_json = json.loads(packet.decode("utf8")) self.assertEqual(packet_json["type"], "info") self.assertEqual(packet_json["info_id"], "nabairqualityd") self.assertTrue("animation" in packet_json) packet = writer.written[1] packet_json = json.loads(packet.decode("utf8")) self.assertEqual(packet_json["type"], "message") self.assertTrue("signature" in packet_json) self.assertTrue("body" in packet_json)
Example #8
Source File: nabweatherd_test.py From pynab with GNU General Public License v3.0 | 5 votes |
def test_asr(self): config = models.Config.load() config.location = "75005" config.unit = NabWeatherd.UNIT_CELSIUS config.weather_animation_type = "weather" config.save() service = NabWeatherd() writer = MockWriter() service.writer = writer packet = {"type": "asr_event", "nlu": {"intent": "nabweatherd/forecast"}} async_to_sync(service.process_nabd_packet)(packet) self.assertEqual(len(writer.written), 3) packet = writer.written[0] packet_json = json.loads(packet.decode("utf8")) self.assertEqual(packet_json["type"], "info") self.assertEqual(packet_json["info_id"], "nabweatherd_rain") packet = writer.written[1] packet_json = json.loads(packet.decode("utf8")) self.assertEqual(packet_json["type"], "info") self.assertEqual(packet_json["info_id"], "nabweatherd") self.assertTrue("animation" in packet_json) packet = writer.written[2] packet_json = json.loads(packet.decode("utf8")) self.assertEqual(packet_json["type"], "message") self.assertTrue("signature" in packet_json) self.assertTrue("body" in packet_json)
Example #9
Source File: nabweatherd_test.py From pynab with GNU General Public License v3.0 | 5 votes |
def test_fetch_info_data(self): service = NabWeatherd() data = async_to_sync(service.fetch_info_data)( ("75005", NabWeatherd.UNIT_CELSIUS, "both") ) self.assertTrue("current_weather_class" in data) self.assertTrue("today_forecast_weather_class" in data) self.assertTrue("today_forecast_max_temp" in data) self.assertTrue("tomorrow_forecast_weather_class" in data) self.assertTrue("tomorrow_forecast_max_temp" in data) self.assertTrue("next_rain" in data) self.assertTrue("weather_animation_type" in data)
Example #10
Source File: test_decorators.py From cjworkbench with GNU Affero General Public License v3.0 | 5 votes |
def handle(self, **kwargs): """handlers.handle(), synchronous.""" request = self.build_request(**kwargs) return async_to_sync(handlers.handle)(request)
Example #11
Source File: test_twitter.py From cjworkbench with GNU Affero General Public License v3.0 | 5 votes |
def fetch(params, secret, stored_dataframe=None): async def get_stored_dataframe(): return stored_dataframe secrets = {"twitter_credentials": secret} if secret else {} return async_to_sync(twitter.fetch)( params, secrets=secrets, get_stored_dataframe=get_stored_dataframe )
Example #12
Source File: test_urlscraper.py From cjworkbench with GNU Affero General Public License v3.0 | 5 votes |
def fetch(params, input_dataframe): async def get_input_dataframe(): return input_dataframe return async_to_sync(urlscraper.fetch)( params, get_input_dataframe=get_input_dataframe )
Example #13
Source File: test_scrapetable.py From cjworkbench with GNU Affero General Public License v3.0 | 5 votes |
def fetch(**kwargs): params = P(**kwargs) return async_to_sync(scrapetable.fetch)(params)
Example #14
Source File: consumers.py From chain with Apache License 2.0 | 5 votes |
def disconnect(self, close_code): async_to_sync(self.channel_layer.group_discard)(self.scope['user'].username, self.channel_name) # class StatsConsumer(WebsocketConsumer): # # def connect(self): # async_to_sync(self.channel_layer.group_add)(self.scope['user'].username, self.channel_name) # # self.accept() # # def receive(self, text_data): # key = '-'.join(('django-mstats-processlist', str(self.scope['user'].uid))) # cache.set(key, 'start', timeout=None) # show_processlist.delay(host=text_data, user=self.scope['user'].username, key=key) # # async_to_sync(self.channel_layer.group_send)( # self.scope['user'].username, # { # "type": "user.message", # "text": text_data, # }, # ) # # def user_message(self, event): # self.send(text_data=event["text"]) # # def disconnect(self, close_code): # key = '-'.join(('django-mstats-processlist', str(self.scope['user'].uid))) # cache.set(key, 'end', timeout=None) # async_to_sync(self.channel_layer.group_discard)(self.scope['user'].username, self.channel_name)
Example #15
Source File: consumers.py From chain with Apache License 2.0 | 5 votes |
def receive(self, text_data): async_to_sync(self.channel_layer.group_send)( self.scope['user'].username, { "type": "user.message", "text": text_data, }, )
Example #16
Source File: consumers.py From chain with Apache License 2.0 | 5 votes |
def connect(self): # 创建channels group, 命名为:用户名,并使用channel_layer写入到redis async_to_sync(self.channel_layer.group_add)(self.scope['user'].username, self.channel_name) # 返回给receive方法处理 self.accept()
Example #17
Source File: listener.py From resolwe with Apache License 2.0 | 5 votes |
def handle_abort(self, obj): """Handle an incoming ``Data`` abort processing request. .. IMPORTANT:: This only makes manager's state consistent and doesn't affect Data object in any way. Any changes to the Data must be applied over ``handle_update`` method. :param obj: The Channels message object. Command object format: .. code-block:: none { 'command': 'abort', 'data_id': [id of the :class:`~resolwe.flow.models.Data` object this command was triggered by], } """ async_to_sync(consumer.send_event)( { WorkerProtocol.COMMAND: WorkerProtocol.ABORT, WorkerProtocol.DATA_ID: obj[ExecutorProtocol.DATA_ID], WorkerProtocol.FINISH_COMMUNICATE_EXTRA: { "executor": getattr(settings, "FLOW_EXECUTOR", {}).get( "NAME", "resolwe.flow.executors.local" ), }, } )
Example #18
Source File: listener.py From resolwe with Apache License 2.0 | 5 votes |
def handle_get_files_to_download(self, obj): """Get a list of files belonging to a given storage location object. :param obj: The Channels message object. Command object format: .. code-block:: none { 'command': 'get_files_to_download', 'data_id': [id of the :class:`~resolwe.flow.models.Data` object], 'storage_location_id': id of the :class:`~resolwe.storage.models.StorageLocation` object. } """ try: storage_location_id = obj[ExecutorProtocol.STORAGE_LOCATION_ID] location = StorageLocation.objects.get(pk=storage_location_id) except StorageLocation.DoesNotExist: logger.error( "StorageLocation object does not exist (handle_get_files_to_download).", extra={"storage_location_id": storage_location_id,}, ) self._abort_processing(obj) return async_to_sync(self._send_reply)( obj, { ExecutorProtocol.RESULT: ExecutorProtocol.RESULT_OK, ExecutorProtocol.REFERENCED_FILES: list(location.files.values()), }, )
Example #19
Source File: listener.py From resolwe with Apache License 2.0 | 5 votes |
def handle_get_referenced_files(self, obj): """Get a list of files referenced by the data object. To get the entire output this request must be sent after processing is finished. :param obj: The Channels message object. Command object format: .. code-block:: none { 'command': 'get_referenced_data', 'data_id': [id of the :class:`~resolwe.flow.models.Data` object], } """ try: data_id = obj[ExecutorProtocol.DATA_ID] data = Data.objects.get(pk=data_id) except Data.DoesNotExist: logger.error( "Data object does not exist (handle_get_referenced_files).", extra={"data_id": data_id,}, ) self._abort_processing(obj) return async_to_sync(self._send_reply)( obj, { ExecutorProtocol.RESULT: ExecutorProtocol.RESULT_OK, ExecutorProtocol.REFERENCED_FILES: referenced_files(data), }, )
Example #20
Source File: test_observer.py From djangochannelsrestframework with MIT License | 4 votes |
def test_model_observer_wrapper_in_transaction(settings): settings.CHANNEL_LAYERS = { "default": { "BACKEND": "channels.layers.InMemoryChannelLayer", "TEST_CONFIG": {"expiry": 100500,}, }, } layer = channel_layers.make_test_backend(DEFAULT_CHANNEL_LAYER) class TestConsumer(AsyncAPIConsumer): async def accept(self): await TestConsumer.user_change.subscribe(self) await super().accept() @model_observer(get_user_model()) async def user_change(self, message, observer=None, **kwargs): await self.send_json(message) communicator = WebsocketCommunicator(TestConsumer, "/testws/") connected, _ = await communicator.connect() assert connected @database_sync_to_async def create_user_and_wait(): with transaction.atomic(): user = get_user_model().objects.create( username="test", email="test@example.com" ) assert async_to_sync(communicator.receive_nothing(timeout=0.1)) user.username = "mike" user.save() assert async_to_sync(communicator.receive_nothing(timeout=0.1)) return user user = await create_user_and_wait() response = await communicator.receive_json_from() assert {"action": "create", "pk": user.pk, "type": "user.change"} == response await communicator.disconnect()
Example #21
Source File: listener.py From resolwe with Apache License 2.0 | 4 votes |
def handle_storage_location_lock(self, obj): """Handle an incoming request to lock StorageLocation object. Lock is implemented by creating AccessLog object with finish date set to None. :param obj: The Channels message object. Command object format: .. code-block:: none { 'command': 'storage_location_lock', 'data_id': [id of the :class:`~resolwe.flow.models.Data` object], 'storage_location_id': id of storage location 'storage_location_lock_reason': reason for lock. } """ storage_location_id = obj[ExecutorProtocol.STORAGE_LOCATION_ID] query = StorageLocation.all_objects.filter(pk=storage_location_id) if not query.exists(): # Log error and continue logger.error( "StorageLocation does not exist", extra={"storage_location_id": storage_location_id}, ) async_to_sync(self._send_reply)( obj, {ExecutorProtocol.RESULT: ExecutorProtocol.RESULT_ERROR}, ) return storage_location = query.get() access_log = AccessLog.objects.create( storage_location=storage_location, reason=obj[ExecutorProtocol.STORAGE_LOCATION_LOCK_REASON], ) async_to_sync(self._send_reply)( obj, { ExecutorProtocol.RESULT: ExecutorProtocol.RESULT_OK, ExecutorProtocol.STORAGE_ACCESS_LOG_ID: access_log.id, }, )
Example #22
Source File: listener.py From resolwe with Apache License 2.0 | 4 votes |
def handle_download_started(self, obj): """Handle an incoming request to start downloading data. We have to check if the download for given StorageLocation object has already started. :param obj: The Channels message object. Command object format: .. code-block:: none { 'command': 'download_started', 'data_id': [id of the :class:`~resolwe.flow.models.Data` object], 'storage_location_id': id of storage location 'download_started_lock': obtain lock, defaults to False } """ storage_location_id = obj[ExecutorProtocol.STORAGE_LOCATION_ID] lock = obj.get(ExecutorProtocol.DOWNLOAD_STARTED_LOCK, False) query = StorageLocation.all_objects.select_for_update().filter( pk=storage_location_id ) with transaction.atomic(): if not query.exists(): # Log error and abort logger.error( "StorageLocation for downloaded data does not exist", extra={"storage_location_id": storage_location_id}, ) self._abort_processing(obj) return storage_location = query.get() return_status = { StorageLocation.STATUS_PREPARING: ExecutorProtocol.DOWNLOAD_STARTED, StorageLocation.STATUS_UPLOADING: ExecutorProtocol.DOWNLOAD_IN_PROGRESS, StorageLocation.STATUS_DONE: ExecutorProtocol.DOWNLOAD_FINISHED, }[storage_location.status] if storage_location.status == StorageLocation.STATUS_PREPARING and lock: storage_location.status = StorageLocation.STATUS_UPLOADING storage_location.save() async_to_sync(self._send_reply)( obj, { ExecutorProtocol.RESULT: ExecutorProtocol.RESULT_OK, ExecutorProtocol.DOWNLOAD_RESULT: return_status, }, )
Example #23
Source File: listener.py From resolwe with Apache License 2.0 | 4 votes |
def _queue_response_channel(self, obj): """Generate the feedback channel name from the object's id. :param obj: The Channels message object. """ return "{}.{}".format( state.MANAGER_EXECUTOR_CHANNELS.queue_response, obj[ExecutorProtocol.DATA_ID], ) # The handle_* methods are all Django synchronized, meaning they're run # in separate threads. Having this method be sync and calling async_to_sync # on rpush itself would mean reading self._redis from the sync thread, # which isn't very tidy. If it's async, it'll be called from the main # thread by the async_to_sync calls in handle_*.