"Streamlit real time"의 두 판 사이의 차이
DB CAFE
(새 문서: ==streamlit real time chart == ===샘플 === * https://github.com/akutlusenturk/DAQ_GUI <source lang=python> import streamlit as st import time import numpy as np import pandas as pd...) |
|||
(같은 사용자의 중간 판 15개는 보이지 않습니다) | |||
1번째 줄: | 1번째 줄: | ||
==streamlit real time chart == | ==streamlit real time chart == | ||
+ | |||
+ | <source lang=python> | ||
+ | import streamlit as st | ||
+ | import pandas as pd | ||
+ | import cx_Oracle | ||
+ | import altair as alt | ||
+ | import time | ||
+ | import json | ||
+ | |||
+ | # Function to load the database configuration from a JSON file | ||
+ | def load_db_config(file_path): | ||
+ | with open(file_path, 'r') as f: | ||
+ | config = json.load(f) | ||
+ | return config | ||
+ | |||
+ | # Load the database configuration from the JSON file | ||
+ | db_config = load_db_config('db_config.json') | ||
+ | |||
+ | # Setup Oracle connection using cx_Oracle | ||
+ | dsn_tns = cx_Oracle.makedsn(db_config['host'], db_config['port'], service_name=db_config['service_name']) | ||
+ | connection = cx_Oracle.connect(user=db_config['user'], password=db_config['password'], dsn=dsn_tns) | ||
+ | |||
+ | # Streamlit app title | ||
+ | st.title("Real-time Oracle CPU Usage Monitoring") | ||
+ | |||
+ | # Create a placeholder for the chart | ||
+ | chart_placeholder = st.empty() | ||
+ | |||
+ | # Initialize an empty DataFrame to store the results | ||
+ | chart_data = pd.DataFrame(columns=['time', 'cpu_busy', 'cpu_idle']) | ||
+ | |||
+ | # Try-except block to handle any runtime errors or interruptions | ||
+ | try: | ||
+ | while True: | ||
+ | # SQL query to get CPU statistics from V$OSSTAT | ||
+ | query = """ | ||
+ | SELECT | ||
+ | SUM(CASE WHEN stat_name = 'BUSY_TIME' THEN value ELSE 0 END) AS cpu_busy, | ||
+ | SUM(CASE WHEN stat_name = 'IDLE_TIME' THEN value ELSE 0 END) AS cpu_idle | ||
+ | FROM v$osstat | ||
+ | WHERE stat_name IN ('BUSY_TIME', 'IDLE_TIME') | ||
+ | """ | ||
+ | |||
+ | # Execute the query and fetch the result | ||
+ | cursor = connection.cursor() | ||
+ | cursor.execute(query) | ||
+ | result = cursor.fetchone() | ||
+ | |||
+ | # Get the current timestamp | ||
+ | current_time = pd.Timestamp.now() | ||
+ | |||
+ | # Append the new data to the DataFrame | ||
+ | new_data = pd.DataFrame({ | ||
+ | 'time': [current_time], | ||
+ | 'cpu_busy': [result[0]], | ||
+ | 'cpu_idle': [result[1]] | ||
+ | }) | ||
+ | |||
+ | # Calculate the delta (CPU usage since the last fetch) | ||
+ | if not chart_data.empty: | ||
+ | last_row = chart_data.iloc[-1] | ||
+ | new_data['delta_busy'] = new_data['cpu_busy'] - last_row['cpu_busy'] | ||
+ | new_data['delta_idle'] = new_data['cpu_idle'] - last_row['cpu_idle'] | ||
+ | else: | ||
+ | new_data['delta_busy'] = 0 | ||
+ | new_data['delta_idle'] = 0 | ||
+ | |||
+ | # Append the new data to the chart_data DataFrame | ||
+ | chart_data = pd.concat([chart_data, new_data]) | ||
+ | |||
+ | # Create an Altair area chart for CPU usage | ||
+ | chart = alt.Chart(chart_data).transform_fold( | ||
+ | ['delta_busy', 'delta_idle'], | ||
+ | as_=['type', 'value'] | ||
+ | ).mark_area(opacity=0.5).encode( | ||
+ | x='time:T', | ||
+ | y='value:Q', | ||
+ | color='type:N', | ||
+ | tooltip=['time:T', 'value:Q'] | ||
+ | ).interactive() | ||
+ | |||
+ | # Display the chart in the placeholder | ||
+ | chart_placeholder.altair_chart(chart, use_container_width=True) | ||
+ | |||
+ | # Sleep for 3 seconds before fetching the next data point | ||
+ | time.sleep(3) | ||
+ | |||
+ | except KeyboardInterrupt: | ||
+ | st.write("Monitoring stopped.") | ||
+ | |||
+ | finally: | ||
+ | # Close the cursor and the connection to Oracle DB | ||
+ | cursor.close() | ||
+ | connection.close() | ||
+ | </source> | ||
+ | |||
===샘플 === | ===샘플 === | ||
* https://github.com/akutlusenturk/DAQ_GUI | * https://github.com/akutlusenturk/DAQ_GUI | ||
154번째 줄: | 250번째 줄: | ||
# kaydet=0 | # kaydet=0 | ||
</source> | </source> | ||
+ | |||
+ | === oracle ash real-time dash-board === | ||
+ | * matplotlib 이용 | ||
+ | <source lang=python> | ||
+ | pip install streamlit cx_Oracle pandas matplotlib | ||
+ | </source> | ||
+ | |||
+ | <source lang=python> | ||
+ | import cx_Oracle | ||
+ | import pandas as pd | ||
+ | import streamlit as st | ||
+ | import time | ||
+ | |||
+ | # Oracle 데이터베이스 연결 설정 | ||
+ | def connect_to_oracle(user, password, dsn): | ||
+ | try: | ||
+ | connection = cx_Oracle.connect(user, password, dsn) | ||
+ | return connection | ||
+ | except cx_Oracle.DatabaseError as e: | ||
+ | st.error(f"Database connection error: {e}") | ||
+ | return None | ||
+ | |||
+ | # ASH 데이터를 가져오는 함수 | ||
+ | def fetch_ash_data(connection): | ||
+ | query = """ | ||
+ | SELECT sample_time, | ||
+ | session_id, | ||
+ | session_state, | ||
+ | event, | ||
+ | wait_class, | ||
+ | blocking_session, | ||
+ | current_obj#, | ||
+ | current_file#, | ||
+ | current_block# | ||
+ | FROM v$active_session_history | ||
+ | WHERE sample_time > sysdate - INTERVAL '5' MINUTE | ||
+ | """ | ||
+ | df = pd.read_sql(query, connection) | ||
+ | return df | ||
+ | </source> | ||
+ | <source lang=python> | ||
+ | import matplotlib.pyplot as plt | ||
+ | |||
+ | # Streamlit 페이지 구성 | ||
+ | def main(): | ||
+ | st.title("Oracle ASH Real-time Monitoring") | ||
+ | |||
+ | # Oracle 연결 정보 입력받기 | ||
+ | st.sidebar.header("Database Connection") | ||
+ | user = st.sidebar.text_input("Username") | ||
+ | password = st.sidebar.text_input("Password", type="password") | ||
+ | host = st.sidebar.text_input("Host") | ||
+ | port = st.sidebar.text_input("Port", value="1521") | ||
+ | service_name = st.sidebar.text_input("Service Name") | ||
+ | |||
+ | dsn = cx_Oracle.makedsn(host, port, service_name=service_name) | ||
+ | |||
+ | if st.sidebar.button("Connect"): | ||
+ | connection = connect_to_oracle(user, password, dsn) | ||
+ | |||
+ | if connection: | ||
+ | st.success("Connected to Oracle Database!") | ||
+ | |||
+ | # 실시간 모니터링 시작 | ||
+ | placeholder = st.empty() | ||
+ | |||
+ | while True: | ||
+ | # ASH 데이터 가져오기 | ||
+ | df = fetch_ash_data(connection) | ||
+ | |||
+ | # ASH 데이터 프레임 출력 | ||
+ | with placeholder.container(): | ||
+ | st.subheader("Active Session History Data (Last 5 Minutes)") | ||
+ | st.dataframe(df) | ||
+ | |||
+ | # 세션 상태 분포 시각화 | ||
+ | st.subheader("Session State Distribution") | ||
+ | session_state_counts = df['SESSION_STATE'].value_counts() | ||
+ | st.bar_chart(session_state_counts) | ||
+ | |||
+ | # 이벤트별 분포 시각화 | ||
+ | st.subheader("Wait Events Distribution") | ||
+ | event_counts = df['EVENT'].value_counts().head(10) | ||
+ | st.bar_chart(event_counts) | ||
+ | |||
+ | # 주기적으로 데이터 갱신 (5초마다) | ||
+ | time.sleep(5) | ||
+ | |||
+ | if __name__ == "__main__": | ||
+ | main() | ||
+ | </source> | ||
+ | |||
+ | |||
+ | * echart 이용 | ||
+ | <source lang=python> | ||
+ | pip install streamlit cx_Oracle pandas pyecharts streamlit-echarts | ||
+ | </source> | ||
+ | - 오라클 연결은 위와 동일 | ||
+ | |||
+ | <source lang=python> | ||
+ | import cx_Oracle | ||
+ | import pandas as pd | ||
+ | import streamlit as st | ||
+ | import time | ||
+ | from pyecharts.charts import Bar | ||
+ | from pyecharts import options as opts | ||
+ | from streamlit_echarts import st_pyecharts | ||
+ | from pyecharts.charts import Page, Line | ||
+ | from pyecharts.faker import Faker | ||
+ | import random | ||
+ | |||
+ | # Oracle 데이터베이스 연결 설정 | ||
+ | def connect_to_oracle(user, password, dsn): | ||
+ | try: | ||
+ | connection = cx_Oracle.connect(user, password, dsn) | ||
+ | return connection | ||
+ | except cx_Oracle.DatabaseError as e: | ||
+ | st.error(f"Database connection error: {e}") | ||
+ | return None | ||
+ | |||
+ | |||
+ | # CPU 사용률 데이터를 가져오는 함수 | ||
+ | def fetch_cpu_usage(connection): | ||
+ | query = """ | ||
+ | SELECT metric_name, value | ||
+ | FROM v$sysmetric | ||
+ | WHERE metric_name = 'Host CPU Utilization (%)' | ||
+ | ORDER BY begin_time DESC | ||
+ | FETCH FIRST 1 ROWS ONLY | ||
+ | """ | ||
+ | df = pd.read_sql(query, connection) | ||
+ | if not df.empty: | ||
+ | return df['VALUE'].values[0] | ||
+ | return None | ||
+ | |||
+ | # 실시간 CPU 사용률 차트 생성 | ||
+ | def create_cpu_chart(cpu_data): | ||
+ | line = Line() | ||
+ | line.add_xaxis([i for i in range(len(cpu_data))]) | ||
+ | line.add_yaxis("CPU Usage (%)", cpu_data) | ||
+ | line.set_global_opts( | ||
+ | title_opts=opts.TitleOpts(title="Real-time CPU Usage"), | ||
+ | yaxis_opts=opts.AxisOpts(max_=100, min_=0) | ||
+ | ) | ||
+ | return line | ||
+ | |||
+ | |||
+ | # ASH 데이터를 가져오는 함수 | ||
+ | def fetch_ash_data(connection): | ||
+ | query = """ | ||
+ | SELECT sample_time, | ||
+ | session_id, | ||
+ | session_state, | ||
+ | event, | ||
+ | wait_class, | ||
+ | blocking_session, | ||
+ | current_obj#, | ||
+ | current_file#, | ||
+ | current_block# | ||
+ | FROM v$active_session_history | ||
+ | WHERE sample_time > sysdate - INTERVAL '5' MINUTE | ||
+ | """ | ||
+ | df = pd.read_sql(query, connection) | ||
+ | return df | ||
+ | |||
+ | |||
+ | # 세션 상태 분포 차트 생성 | ||
+ | def create_session_state_chart(session_state_counts): | ||
+ | bar = Bar() | ||
+ | bar.add_xaxis(session_state_counts.index.tolist()) | ||
+ | bar.add_yaxis("Session State", session_state_counts.values.tolist()) | ||
+ | bar.set_global_opts(title_opts=opts.TitleOpts(title="Session State Distribution")) | ||
+ | return bar | ||
+ | |||
+ | # 이벤트 분포 차트 생성 | ||
+ | def create_event_chart(event_counts): | ||
+ | bar = Bar() | ||
+ | bar.add_xaxis(event_counts.index.tolist()) | ||
+ | bar.add_yaxis("Wait Events", event_counts.values.tolist()) | ||
+ | bar.set_global_opts(title_opts=opts.TitleOpts(title="Wait Events Distribution")) | ||
+ | return bar | ||
+ | |||
+ | # Function to create a stacked area chart | ||
+ | def create_stacked_area_chart(): | ||
+ | # Sample data | ||
+ | x_data = Faker.choose() | ||
+ | y_data1 = Faker.values() | ||
+ | y_data2 = Faker.values() | ||
+ | y_data3 = Faker.values() | ||
+ | |||
+ | # Create a line chart with stacked area style | ||
+ | chart = Line() | ||
+ | chart.add_xaxis(x_data) | ||
+ | chart.add_yaxis("Series 1", y_data1, stack="stack1") | ||
+ | chart.add_yaxis("Series 2", y_data2, stack="stack1") | ||
+ | chart.add_yaxis("Series 3", y_data3, stack="stack1") | ||
+ | |||
+ | chart.set_global_opts(title_opts=opts.TitleOpts(title="Stacked Area Chart")) | ||
+ | return chart | ||
+ | |||
+ | # Streamlit 페이지 구성 | ||
+ | def main(): | ||
+ | st.title("Oracle ASH Real-time Monitoring with ECharts") | ||
+ | |||
+ | # Oracle 연결 정보 입력받기 | ||
+ | st.sidebar.header("Database Connection") | ||
+ | user = st.sidebar.text_input("Username", value="dbcafe") | ||
+ | password = st.sidebar.text_input("Password", type="password", value="") | ||
+ | host = st.sidebar.text_input("Host", value="localhost") | ||
+ | port = st.sidebar.text_input("Port", value="1521") | ||
+ | service_name = st.sidebar.text_input("Service Name",value="orcl") | ||
+ | |||
+ | dsn = cx_Oracle.makedsn(host, port, service_name=service_name) | ||
+ | |||
+ | |||
+ | if st.sidebar.button("Connect"): | ||
+ | connection = connect_to_oracle(user, password, dsn) | ||
+ | |||
+ | if connection: | ||
+ | st.success("Connected to Oracle Database!") | ||
+ | |||
+ | # 실시간 모니터링 시작 | ||
+ | placeholder = st.empty() | ||
+ | cpu_data = [] | ||
+ | |||
+ | while True: | ||
+ | |||
+ | # CPU 사용률 데이터 가져오기 | ||
+ | cpu_usage = fetch_cpu_usage(connection) | ||
+ | |||
+ | if cpu_usage is not None: | ||
+ | # 최대 20개의 데이터를 저장하고 그려줌 (실시간 업데이트용) | ||
+ | if len(cpu_data) >= 20: | ||
+ | cpu_data.pop(0) | ||
+ | cpu_data.append(cpu_usage) | ||
+ | |||
+ | # CPU 사용률 차트 생성 | ||
+ | cpu_chart = create_cpu_chart(cpu_data) | ||
+ | |||
+ | # 차트 업데이트 | ||
+ | with placeholder.container(): | ||
+ | st.subheader("CPU Usage % ") | ||
+ | st_pyecharts(cpu_chart,key=f'unique_chart_key_{random.randint(1, 10000)}') | ||
+ | |||
+ | # ASH 데이터 가져오기 | ||
+ | df = fetch_ash_data(connection) | ||
+ | |||
+ | # ASH 데이터 프레임 출력 | ||
+ | with placeholder.container(): | ||
+ | st.subheader("Active Session History Data (Last 5 Minutes)") | ||
+ | st.dataframe(df) | ||
+ | |||
+ | # 세션 상태 분포 시각화 | ||
+ | st.title('Stacked Area Chart') | ||
+ | |||
+ | # Generate the chart | ||
+ | stacked_area_chart = create_stacked_area_chart() | ||
+ | |||
+ | # Convert the chart to HTML and display it | ||
+ | # st.components.v1.html(chart.render_embed(), height=500) | ||
+ | st_pyecharts(stacked_area_chart,key=f'unique_chart_key_{random.randint(1, 10000)}') | ||
+ | # 세션 상태 분포 시각화 | ||
+ | st.subheader("Session State Distribution") | ||
+ | session_state_counts = df['SESSION_STATE'].value_counts() | ||
+ | session_state_chart = create_session_state_chart(session_state_counts) | ||
+ | st_pyecharts(session_state_chart,key=f'unique_chart_key_{random.randint(1, 10000)}') | ||
+ | |||
+ | # 이벤트별 분포 시각화 | ||
+ | st.subheader("Wait Events Distribution") | ||
+ | event_counts = df['EVENT'].value_counts().head(10) | ||
+ | event_chart = create_event_chart(event_counts) | ||
+ | st_pyecharts(event_chart,key=f'unique_chart_key_{random.randint(1, 10000)}') | ||
+ | |||
+ | # 주기적으로 데이터 갱신 (5초마다) | ||
+ | time.sleep(5) | ||
+ | |||
+ | if __name__ == "__main__": | ||
+ | main() | ||
+ | |||
+ | </source> | ||
+ | |||
+ | === 주식 시황 모니터링 === | ||
+ | * https://mljar.com/blog/python-dashboard/ | ||
+ | * https://github.com/pplonski/python-dashboard |
2024년 9월 5일 (목) 14:11 기준 최신판
thumb_up 추천메뉴 바로가기
- DBA { Oracle DBA 명령어 > DBA 초급 과정 > DBA 고급 과정 }
- 튜닝 { 오라클 튜닝 목록 }
- 모델링 { 데이터 모델링 가이드 }
1 streamlit real time chart[편집]
import streamlit as st
import pandas as pd
import cx_Oracle
import altair as alt
import time
import json
# Function to load the database configuration from a JSON file
def load_db_config(file_path):
with open(file_path, 'r') as f:
config = json.load(f)
return config
# Load the database configuration from the JSON file
db_config = load_db_config('db_config.json')
# Setup Oracle connection using cx_Oracle
dsn_tns = cx_Oracle.makedsn(db_config['host'], db_config['port'], service_name=db_config['service_name'])
connection = cx_Oracle.connect(user=db_config['user'], password=db_config['password'], dsn=dsn_tns)
# Streamlit app title
st.title("Real-time Oracle CPU Usage Monitoring")
# Create a placeholder for the chart
chart_placeholder = st.empty()
# Initialize an empty DataFrame to store the results
chart_data = pd.DataFrame(columns=['time', 'cpu_busy', 'cpu_idle'])
# Try-except block to handle any runtime errors or interruptions
try:
while True:
# SQL query to get CPU statistics from V$OSSTAT
query = """
SELECT
SUM(CASE WHEN stat_name = 'BUSY_TIME' THEN value ELSE 0 END) AS cpu_busy,
SUM(CASE WHEN stat_name = 'IDLE_TIME' THEN value ELSE 0 END) AS cpu_idle
FROM v$osstat
WHERE stat_name IN ('BUSY_TIME', 'IDLE_TIME')
"""
# Execute the query and fetch the result
cursor = connection.cursor()
cursor.execute(query)
result = cursor.fetchone()
# Get the current timestamp
current_time = pd.Timestamp.now()
# Append the new data to the DataFrame
new_data = pd.DataFrame({
'time': [current_time],
'cpu_busy': [result[0]],
'cpu_idle': [result[1]]
})
# Calculate the delta (CPU usage since the last fetch)
if not chart_data.empty:
last_row = chart_data.iloc[-1]
new_data['delta_busy'] = new_data['cpu_busy'] - last_row['cpu_busy']
new_data['delta_idle'] = new_data['cpu_idle'] - last_row['cpu_idle']
else:
new_data['delta_busy'] = 0
new_data['delta_idle'] = 0
# Append the new data to the chart_data DataFrame
chart_data = pd.concat([chart_data, new_data])
# Create an Altair area chart for CPU usage
chart = alt.Chart(chart_data).transform_fold(
['delta_busy', 'delta_idle'],
as_=['type', 'value']
).mark_area(opacity=0.5).encode(
x='time:T',
y='value:Q',
color='type:N',
tooltip=['time:T', 'value:Q']
).interactive()
# Display the chart in the placeholder
chart_placeholder.altair_chart(chart, use_container_width=True)
# Sleep for 3 seconds before fetching the next data point
time.sleep(3)
except KeyboardInterrupt:
st.write("Monitoring stopped.")
finally:
# Close the cursor and the connection to Oracle DB
cursor.close()
connection.close()
1.1 샘플[편집]
import streamlit as st
import time
import numpy as np
import pandas as pd
st.set_page_config(
page_title='Sirkülasyon Testi',
layout="wide",
initial_sidebar_state="collapsed",
)
st.sidebar.markdown('''
### Sirkülasyon Testi
Yapılacak değişiklikler programı resetler.
''')
marka = st.sidebar.text_input("Pompa Markası:","Brand")
model = st.sidebar.text_input("Pompa Modeli:","Model")
mod = st.sidebar.text_input("Çalıştırma Modu:","dP5.0")
zamandamgasi = time.strftime("]_%Y%b%d_%H.%M.%S")
isim = marka+"_"+model+"_["+mod+zamandamgasi
i = 0
t = 0
st.sidebar.write("Otomatik Vana Modunda Deney")
Dint = st.sidebar.number_input("Başlangıç Noktası:",value=15.0)
Dfin = st.sidebar.number_input("Bitiş Noktası:",value=70.0)
ds = st.sidebar.number_input("Adım Aralığı",value=0.25)
dt = st.sidebar.number_input("Adımlar Arası Periyot",value=8)
if st.sidebar.button("Otomatik Modda Kayıt Başlat"):
kaydet = 1
else:
kaydet = 0,
st.sidebar.write("Manuel Vana Ayarı")
D = st.sidebar.number_input("Vana Açıklığı [0:Kapalı -90:Açık]",value=0.0)
if st.sidebar.button("Ayarla"):
yaz(64,[int(D*4)])
sozluk =[
['Date' ,"Tarih" ,"YMD" ,"" ],
['Time' ,"Zaman" ,"HMS" ,"" ],
['Temperature' ,"Sıcaklık" ,"T" ,"°C" ],
['Flow_Rate' ,"Debi" ,"Q" ,"m³/saat" ],
['Valve_Aperture' ,"Vana Açıklığı" ,"D" ,"°" ],
['Static_Pressure' ,"Durağan Basınç" ,"P" ,"Bar" ],
['Suction_Pressure' ,"Emme Basıncı" ,"P1" ,"Bar" ],
['Discharge_Pressure' ,"Basma Basıncı" ,"P2" ,"Bar" ],
['Voltage' ,"Gerilim" ,"V" ,"V" ],
['Current' ,"Akım" ,"I" ,"A" ],
['Measured_Frequency' ,"Ölçülen Frekans" ,"f" ,"Hz" ],
['Active_Power' ,"Aktif Güç" ,"PAP" ,"W" ],
['Reactive_Power' ,"Reaktif Güç" ,"QRP" ,"VAr" ],
['Apperent_Power' ,"Görünür Güç" ,"SAP" ,"VA" ],
['Phase_Voltage_Angle' ,"Faz Gerilim Açısı" ,"PVA" ,"°" ],
['Phase_Current_Angle' ,"Faz Akım Açısı" ,"PCA" ,"°" ],
['Cos_Phi' ,"Cos_Phi" ,"PHI" ,"" ],
['Power_Factor' ,"Güç Faktörü" ,"PF" ,"" ],
['Differential_Pressure' ,"Fark Basıncı" ,"DP" ,"Bar" ],
['Head' ,"Basma Yüksekliği" ,"H" ,"m" ],
['Hydraulic_Power' ,"Hidrolik Güç" ,"PHYD" ,"W" ],
['Efficiency' ,"Verimlilik" ,"ETA" ,"%" ]
]
varis = []
for i in sozluk: varis.append(i[0]) #sozluk ilk sütun (Türkçe için 0 yerine 1. sütun seçilebilir.)
df = pd.DataFrame(columns=varis)
buffy = pd.DataFrame(columns=varis)
col = st.columns(4)
###################################### G R A F İ K L E R #############################################
col[0].subheader("Debi & Basma Yüksekliği [m³/h,m] \n Flow Rate & Head")
QHt = col[0].line_chart()
col[0].subheader("Hidrolik Güç & Verimlilik [W,%] \n Hydraulic Power & eta")
PHYDETAt = col[0].line_chart()
col[1].subheader("Emme & Basma Basınçları [Bar] \n Suction & Discharge Pressure")
P1P2t = col[1].line_chart()
col[1].subheader("Elektrik Güçleri [W, VA, VAr] \n Active, Reactive & Apperent Power" )
PAPQRPSAP = col[1].line_chart()
col[2].subheader("cos(φ) & Güç Faktörü \n cos(φ) & Power Factor")
PHIPFt = col[2].line_chart()
col[2].subheader("Faz Gerilim & Akım Açıları [°] \n Phase Voltage & Current Angles")
PVAPCAt = col[2].line_chart()
###################################### M E T R İ K L E R ##############################################
col[3].write(f"Deney Kimliği: \n {isim}")
YMDt = col[3].empty()
HMSt = col[3].empty()
Tt = col[3].empty()
Dt = col[3].empty()
Pt = col[3].empty()
Vt = col[3].empty()
ft = col[3].empty()
##################################### D E M O ##########################################################
df = pd.read_excel("demo_data.xlsx",index_col=0)
######################################### D Ö N G Ü ####################################################
for i in range(len(df)):
buff=df.iloc[i]
with Tt:
st.metric("Hat Sıcaklığı",f"{buff[2]} °C")
with YMDt:
st.metric("Tarih",buff[0])
with HMSt:
st.metric("Saat",buff[1])
with Dt:
st.metric("Vana Açıklığı",f"{buff[4]} °")
with Pt:
st.metric("Durağan Basınç",f"{buff[5]} Bar")
with Vt:
st.metric("Gerilim",f"{buff[8]} V")
with ft:
st.metric("Ölçülen Frekans",f"{buff[10]} Hz")
QHt.add_rows([[float(buff[3]),float(buff[19])]])
PHYDETAt.add_rows([[float(buff[20]),float(buff[21])]])
P1P2t.add_rows([[float(buff[6]),float(buff[7])]])
PHIPFt.add_rows([[float(buff[16]),float(buff[17])]])
PAPQRPSAP.add_rows([[float(buff[11]),float(buff[12]),float(buff[13])]])
PVAPCAt.add_rows([[float(buff[14]),float(buff[15])]])
time.sleep(0.1)
# if kaydet == 1:
# if t%dt==0 and Dint<Dfin:
# Dint=Dint+ds
# yaz(64,[int(Dint*4)])
# elif Dint>=Dfin:
# df.to_excel(f"{isim}.xlsx")
# kaydet=0
1.2 oracle ash real-time dash-board[편집]
- matplotlib 이용
pip install streamlit cx_Oracle pandas matplotlib
import cx_Oracle
import pandas as pd
import streamlit as st
import time
# Oracle 데이터베이스 연결 설정
def connect_to_oracle(user, password, dsn):
try:
connection = cx_Oracle.connect(user, password, dsn)
return connection
except cx_Oracle.DatabaseError as e:
st.error(f"Database connection error: {e}")
return None
# ASH 데이터를 가져오는 함수
def fetch_ash_data(connection):
query = """
SELECT sample_time,
session_id,
session_state,
event,
wait_class,
blocking_session,
current_obj#,
current_file#,
current_block#
FROM v$active_session_history
WHERE sample_time > sysdate - INTERVAL '5' MINUTE
"""
df = pd.read_sql(query, connection)
return df
import matplotlib.pyplot as plt
# Streamlit 페이지 구성
def main():
st.title("Oracle ASH Real-time Monitoring")
# Oracle 연결 정보 입력받기
st.sidebar.header("Database Connection")
user = st.sidebar.text_input("Username")
password = st.sidebar.text_input("Password", type="password")
host = st.sidebar.text_input("Host")
port = st.sidebar.text_input("Port", value="1521")
service_name = st.sidebar.text_input("Service Name")
dsn = cx_Oracle.makedsn(host, port, service_name=service_name)
if st.sidebar.button("Connect"):
connection = connect_to_oracle(user, password, dsn)
if connection:
st.success("Connected to Oracle Database!")
# 실시간 모니터링 시작
placeholder = st.empty()
while True:
# ASH 데이터 가져오기
df = fetch_ash_data(connection)
# ASH 데이터 프레임 출력
with placeholder.container():
st.subheader("Active Session History Data (Last 5 Minutes)")
st.dataframe(df)
# 세션 상태 분포 시각화
st.subheader("Session State Distribution")
session_state_counts = df['SESSION_STATE'].value_counts()
st.bar_chart(session_state_counts)
# 이벤트별 분포 시각화
st.subheader("Wait Events Distribution")
event_counts = df['EVENT'].value_counts().head(10)
st.bar_chart(event_counts)
# 주기적으로 데이터 갱신 (5초마다)
time.sleep(5)
if __name__ == "__main__":
main()
- echart 이용
pip install streamlit cx_Oracle pandas pyecharts streamlit-echarts
- 오라클 연결은 위와 동일
import cx_Oracle
import pandas as pd
import streamlit as st
import time
from pyecharts.charts import Bar
from pyecharts import options as opts
from streamlit_echarts import st_pyecharts
from pyecharts.charts import Page, Line
from pyecharts.faker import Faker
import random
# Oracle 데이터베이스 연결 설정
def connect_to_oracle(user, password, dsn):
try:
connection = cx_Oracle.connect(user, password, dsn)
return connection
except cx_Oracle.DatabaseError as e:
st.error(f"Database connection error: {e}")
return None
# CPU 사용률 데이터를 가져오는 함수
def fetch_cpu_usage(connection):
query = """
SELECT metric_name, value
FROM v$sysmetric
WHERE metric_name = 'Host CPU Utilization (%)'
ORDER BY begin_time DESC
FETCH FIRST 1 ROWS ONLY
"""
df = pd.read_sql(query, connection)
if not df.empty:
return df['VALUE'].values[0]
return None
# 실시간 CPU 사용률 차트 생성
def create_cpu_chart(cpu_data):
line = Line()
line.add_xaxis([i for i in range(len(cpu_data))])
line.add_yaxis("CPU Usage (%)", cpu_data)
line.set_global_opts(
title_opts=opts.TitleOpts(title="Real-time CPU Usage"),
yaxis_opts=opts.AxisOpts(max_=100, min_=0)
)
return line
# ASH 데이터를 가져오는 함수
def fetch_ash_data(connection):
query = """
SELECT sample_time,
session_id,
session_state,
event,
wait_class,
blocking_session,
current_obj#,
current_file#,
current_block#
FROM v$active_session_history
WHERE sample_time > sysdate - INTERVAL '5' MINUTE
"""
df = pd.read_sql(query, connection)
return df
# 세션 상태 분포 차트 생성
def create_session_state_chart(session_state_counts):
bar = Bar()
bar.add_xaxis(session_state_counts.index.tolist())
bar.add_yaxis("Session State", session_state_counts.values.tolist())
bar.set_global_opts(title_opts=opts.TitleOpts(title="Session State Distribution"))
return bar
# 이벤트 분포 차트 생성
def create_event_chart(event_counts):
bar = Bar()
bar.add_xaxis(event_counts.index.tolist())
bar.add_yaxis("Wait Events", event_counts.values.tolist())
bar.set_global_opts(title_opts=opts.TitleOpts(title="Wait Events Distribution"))
return bar
# Function to create a stacked area chart
def create_stacked_area_chart():
# Sample data
x_data = Faker.choose()
y_data1 = Faker.values()
y_data2 = Faker.values()
y_data3 = Faker.values()
# Create a line chart with stacked area style
chart = Line()
chart.add_xaxis(x_data)
chart.add_yaxis("Series 1", y_data1, stack="stack1")
chart.add_yaxis("Series 2", y_data2, stack="stack1")
chart.add_yaxis("Series 3", y_data3, stack="stack1")
chart.set_global_opts(title_opts=opts.TitleOpts(title="Stacked Area Chart"))
return chart
# Streamlit 페이지 구성
def main():
st.title("Oracle ASH Real-time Monitoring with ECharts")
# Oracle 연결 정보 입력받기
st.sidebar.header("Database Connection")
user = st.sidebar.text_input("Username", value="dbcafe")
password = st.sidebar.text_input("Password", type="password", value="")
host = st.sidebar.text_input("Host", value="localhost")
port = st.sidebar.text_input("Port", value="1521")
service_name = st.sidebar.text_input("Service Name",value="orcl")
dsn = cx_Oracle.makedsn(host, port, service_name=service_name)
if st.sidebar.button("Connect"):
connection = connect_to_oracle(user, password, dsn)
if connection:
st.success("Connected to Oracle Database!")
# 실시간 모니터링 시작
placeholder = st.empty()
cpu_data = []
while True:
# CPU 사용률 데이터 가져오기
cpu_usage = fetch_cpu_usage(connection)
if cpu_usage is not None:
# 최대 20개의 데이터를 저장하고 그려줌 (실시간 업데이트용)
if len(cpu_data) >= 20:
cpu_data.pop(0)
cpu_data.append(cpu_usage)
# CPU 사용률 차트 생성
cpu_chart = create_cpu_chart(cpu_data)
# 차트 업데이트
with placeholder.container():
st.subheader("CPU Usage % ")
st_pyecharts(cpu_chart,key=f'unique_chart_key_{random.randint(1, 10000)}')
# ASH 데이터 가져오기
df = fetch_ash_data(connection)
# ASH 데이터 프레임 출력
with placeholder.container():
st.subheader("Active Session History Data (Last 5 Minutes)")
st.dataframe(df)
# 세션 상태 분포 시각화
st.title('Stacked Area Chart')
# Generate the chart
stacked_area_chart = create_stacked_area_chart()
# Convert the chart to HTML and display it
# st.components.v1.html(chart.render_embed(), height=500)
st_pyecharts(stacked_area_chart,key=f'unique_chart_key_{random.randint(1, 10000)}')
# 세션 상태 분포 시각화
st.subheader("Session State Distribution")
session_state_counts = df['SESSION_STATE'].value_counts()
session_state_chart = create_session_state_chart(session_state_counts)
st_pyecharts(session_state_chart,key=f'unique_chart_key_{random.randint(1, 10000)}')
# 이벤트별 분포 시각화
st.subheader("Wait Events Distribution")
event_counts = df['EVENT'].value_counts().head(10)
event_chart = create_event_chart(event_counts)
st_pyecharts(event_chart,key=f'unique_chart_key_{random.randint(1, 10000)}')
# 주기적으로 데이터 갱신 (5초마다)
time.sleep(5)
if __name__ == "__main__":
main()