Python streamlit.write() Examples
The following are 7
code examples of streamlit.write().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
streamlit
, or try the search function
.
![](https://www.programcreek.com/common/static/images/search.png)
Example #1
Source File: app.py From demo-self-driving with Apache License 2.0 | 6 votes |
def run_the_app(): # To make Streamlit fast, st.cache allows us to reuse computation across runs. # In this common pattern, we download data from an endpoint only once. @st.cache def load_metadata(url): return pd.read_csv(url) # This function uses some Pandas magic to summarize the metadata Dataframe. @st.cache def create_summary(metadata): one_hot_encoded = pd.get_dummies(metadata[["frame", "label"]], columns=["label"]) summary = one_hot_encoded.groupby(["frame"]).sum().rename(columns={ "label_biker": "biker", "label_car": "car", "label_pedestrian": "pedestrian", "label_trafficLight": "traffic light", "label_truck": "truck" }) return summary # An amazing property of st.cached functions is that you can pipe them into # one another to form a computation DAG (directed acyclic graph). Streamlit # recomputes only whatever subset is required to get the right answer! metadata = load_metadata(os.path.join(DATA_URL_ROOT, "labels.csv.gz")) summary = create_summary(metadata) # Uncomment these lines to peek at these DataFrames. # st.write('## Metadata', metadata[:1000], '## Summary', summary[:1000]) # Draw the UI elements to search for objects (pedestrians, cars, etc.) selected_frame_index, selected_frame = frame_selector_ui(summary) if selected_frame_index == None: st.error("No frames fit the criteria. Please select different label or number.") return # Draw the UI element to select parameters for the YOLO object detector. confidence_threshold, overlap_threshold = object_detector_ui() # Load the image from S3. image_url = os.path.join(DATA_URL_ROOT, selected_frame) image = load_image(image_url) # Add boxes for objects on the image. These are the boxes for the ground image. boxes = metadata[metadata.frame == selected_frame].drop(columns=["frame"]) draw_image_with_boxes(image, boxes, "Ground Truth", "**Human-annotated data** (frame `%i`)" % selected_frame_index) # Get the boxes for the objects detected by YOLO by running the YOLO model. yolo_boxes = yolo_v3(image, confidence_threshold, overlap_threshold) draw_image_with_boxes(image, yolo_boxes, "Real-time Computer Vision", "**YOLO v3 Model** (overlap `%3.1f`) (confidence `%3.1f`)" % (overlap_threshold, confidence_threshold)) # This sidebar UI is a little search engine to find certain object types.
Example #2
Source File: data_utils.py From nl2sql with MIT License | 6 votes |
def load_data(sql_path, table_path, use_small=False): sql_data = [] table_data = {} st.write("Loading data from %s" % sql_path) with open(sql_path) as lines: for idx, line in enumerate(lines): if use_small and idx >= 1000: break sql = json.loads(line.strip()) sql_data.append(sql) with open(table_path) as lines: for line in lines: tab = json.loads(line.strip()) table_data[tab[u'id']] = tab for sql in sql_data: assert sql[u'table_id'] in table_data return sql_data, table_data
Example #3
Source File: streamlit_demo.py From RecNN with Apache License 2.0 | 6 votes |
def get_mov_base(): links = load_links() movies_embeddings_tensor, key_to_id, id_to_key = get_embeddings() meta = load_omdb_meta() popular = pd.read_csv(DATAPATH + 'movie_counts.csv')[:SHOW_TOPN_MOVIES] st.write(popular['id']) mov_base = {} for i, k in list(meta.items()): tmdid = int(meta[i]['tmdbId']) if tmdid > 0 and popular['id'].isin([i]).any(): movieid = pd.to_numeric(links.loc[tmdid]['movieId']) if isinstance(movieid, pd.Series): continue mov_base[int(movieid)] = meta[i]['omdb']['Title'] return mov_base
Example #4
Source File: beta_distribution.py From minimal-streamlit-example with MIT License | 6 votes |
def plot_dist(alpha_value: float, beta_value: float, data: np.ndarray = None): beta_dist = beta(alpha_value, beta_value) xs = np.linspace(0, 1, 1000) ys = beta_dist.pdf(xs) fig, ax = plt.subplots(figsize=(7, 3)) ax.plot(xs, ys) ax.set_xlim(0, 1) ax.set_xlabel("x") ax.set_ylabel("P(x)") if data is not None: likelihoods = beta_dist.pdf(data) sum_log_likelihoods = np.sum(beta_dist.logpdf(data)) ax.vlines(data, ymin=0, ymax=likelihoods) ax.scatter(data, likelihoods, color="black") st.write( f""" _Under your alpha={alpha_slider:.2f} and beta={beta_slider:.2f}, the sum of log likelihoods is {sum_log_likelihoods:.2f}_ """ ) st.pyplot(fig)
Example #5
Source File: app.py From demo-self-driving with Apache License 2.0 | 5 votes |
def download_file(file_path): # Don't download the file twice. (If possible, verify the download using the file length.) if os.path.exists(file_path): if "size" not in EXTERNAL_DEPENDENCIES[file_path]: return elif os.path.getsize(file_path) == EXTERNAL_DEPENDENCIES[file_path]["size"]: return # These are handles to two visual elements to animate. weights_warning, progress_bar = None, None try: weights_warning = st.warning("Downloading %s..." % file_path) progress_bar = st.progress(0) with open(file_path, "wb") as output_file: with urllib.request.urlopen(EXTERNAL_DEPENDENCIES[file_path]["url"]) as response: length = int(response.info()["Content-Length"]) counter = 0.0 MEGABYTES = 2.0 ** 20.0 while True: data = response.read(8192) if not data: break counter += len(data) output_file.write(data) # We perform animation by overwriting the elements. weights_warning.warning("Downloading %s... (%6.2f/%6.2f MB)" % (file_path, counter / MEGABYTES, length / MEGABYTES)) progress_bar.progress(min(counter / length, 1.0)) # Finally, we remove these visual elements by calling .empty(). finally: if weights_warning is not None: weights_warning.empty() if progress_bar is not None: progress_bar.empty() # This is the main app app itself, which appears when the user selects "Run the app".
Example #6
Source File: data_utils.py From nl2sql with MIT License | 5 votes |
def print_sample_data(index, sql_data, table_data): query = qu.Query(sql_data[index]['sql']['sel'], sql_data[index]['sql']['agg'], sql_data[index]['sql']['conds']) st.write('**Sample data:**') st.write('*Question*: %s' % sql_data[index][u'question']) st.write('*Query*: %s' % repr(query)) st.write('*Table columns*: %s' % ', '.join(['{}: {}'.format(i, x) for i,x in \ enumerate(table_data[sql_data[index][u'table_id']][u'header'])]))
Example #7
Source File: viewer.py From mqtt-camera-streamer with GNU General Public License v3.0 | 5 votes |
def on_connect(client, userdata, flags, rc): st.write( f"Connected with result code {str(rc)} to MQTT broker on {MQTT_BROKER}" ) # The callback for when a PUBLISH message is received from the server.