JUNGU commited on
Commit
35745e0
ยท
verified ยท
1 Parent(s): e71bfbd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +82 -64
app.py CHANGED
@@ -11,7 +11,7 @@ from collections import Counter
11
  import json
12
  import os
13
  from datetime import datetime, timedelta
14
- import openai
15
  from dotenv import load_dotenv
16
  import traceback
17
  import plotly.graph_objects as go
@@ -36,65 +36,87 @@ class SchedulerState:
36
  self.scheduled_jobs = []
37
  self.scheduled_results = []
38
 
39
- # ์ „์—ญ ์Šค์ผ€์ค„๋Ÿฌ ์ƒํƒœ ๊ฐ์ฒด ์ƒ์„ฑ (์Šค๋ ˆ๋“œ ์•ˆ์—์„œ ์‚ฌ์šฉ)
40
  global_scheduler_state = SchedulerState()
41
 
42
  # API ํ‚ค ๊ด€๋ฆฌ๋ฅผ ์œ„ํ•œ ์„ธ์…˜ ์ƒํƒœ ์ดˆ๊ธฐํ™”
43
  if 'openai_api_key' not in st.session_state:
44
  st.session_state.openai_api_key = None
 
45
 
46
- # ํ™˜๊ฒฝ ๋ณ€์ˆ˜์—์„œ API ํ‚ค ๋กœ๋“œ ์‹œ๋„
47
- load_dotenv()
48
- if os.getenv('OPENAI_API_KEY'):
49
- st.session_state.openai_api_key = os.getenv('OPENAI_API_KEY')
50
- elif 'OPENAI_API_KEY' in st.secrets:
51
- st.session_state.openai_api_key = st.secrets['OPENAI_API_KEY']
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
52
 
53
  # ํ•„์š”ํ•œ NLTK ๋ฐ์ดํ„ฐ ๋‹ค์šด๋กœ๋“œ
54
  try:
55
  nltk.data.find('tokenizers/punkt')
56
  except LookupError:
57
- nltk.download('punkt')
58
-
59
- try:
60
- nltk.data.find('tokenizers/punkt_tab')
61
- except LookupError:
62
- nltk.download('punkt_tab')
63
 
64
  try:
65
  nltk.data.find('corpora/stopwords')
66
  except LookupError:
67
- nltk.download('stopwords')
68
-
69
- # OpenAI API ํ‚ค ์„ค์ • (์‹ค์ œ ์‚ฌ์šฉ ์‹œ ํ™˜๊ฒฝ ๋ณ€์ˆ˜๋‚˜ Streamlit secrets์—์„œ ๊ฐ€์ ธ์˜ค๋Š” ๊ฒƒ์ด ์ข‹์Šต๋‹ˆ๋‹ค)
70
- if 'OPENAI_API_KEY' in os.environ:
71
- openai.api_key = os.environ['OPENAI_API_KEY']
72
- elif 'OPENAI_API_KEY' in st.secrets:
73
- openai.api_key = st.secrets['OPENAI_API_KEY']
74
- elif os.getenv('OPENAI_API_KEY'):
75
- openai.api_key = os.getenv('OPENAI_API_KEY')
76
 
77
  # ํŽ˜์ด์ง€ ์„ค์ •
78
  st.set_page_config(page_title="๋‰ด์Šค ๊ธฐ์‚ฌ ๋„๊ตฌ", page_icon="๐Ÿ“ฐ", layout="wide")
79
 
80
- # ์‚ฌ์ด๋“œ๋ฐ” ๋ฉ”๋‰ด ์„ค์ •
81
- st.sidebar.title("๋‰ด์Šค ๊ธฐ์‚ฌ ๋„๊ตฌ")
82
- menu = st.sidebar.radio(
83
- "๋ฉ”๋‰ด ์„ ํƒ",
84
- ["๋‰ด์Šค ๊ธฐ์‚ฌ ํฌ๋กค๋ง", "๊ธฐ์‚ฌ ๋ถ„์„ํ•˜๊ธฐ", "์ƒˆ ๊ธฐ์‚ฌ ์ƒ์„ฑํ•˜๊ธฐ", "๋‰ด์Šค ๊ธฐ์‚ฌ ์˜ˆ์•ฝํ•˜๊ธฐ"]
85
- )
 
 
 
 
 
 
 
 
 
 
 
 
86
 
87
  # ์ €์žฅ๋œ ๊ธฐ์‚ฌ๋ฅผ ๋ถˆ๋Ÿฌ์˜ค๋Š” ํ•จ์ˆ˜
88
  def load_saved_articles():
89
- if os.path.exists('saved_articles/articles.json'):
90
- with open('saved_articles/articles.json', 'r', encoding='utf-8') as f:
91
  return json.load(f)
92
  return []
93
 
94
  # ๊ธฐ์‚ฌ๋ฅผ ์ €์žฅํ•˜๋Š” ํ•จ์ˆ˜
95
  def save_articles(articles):
96
- os.makedirs('saved_articles', exist_ok=True)
97
- with open('saved_articles/articles.json', 'w', encoding='utf-8') as f:
98
  json.dump(articles, f, ensure_ascii=False, indent=2)
99
 
100
  @st.cache_data
@@ -249,7 +271,6 @@ def extract_keywords_for_wordcloud(text, top_n=50):
249
 
250
 
251
  # ์›Œ๋“œ ํด๋ผ์šฐ๋“œ ์ƒ์„ฑ ํ•จ์ˆ˜
252
-
253
  def generate_wordcloud(keywords_dict):
254
  if not WordCloud:
255
  st.warning("์›Œ๋“œํด๋ผ์šฐ๋“œ ์„ค์น˜์•ˆ๋˜์–ด ์žˆ์Šต๋‹ˆ๋‹ค.")
@@ -316,10 +337,13 @@ def analyze_news_content(news_df):
316
  results['top_keywords'] = []
317
  return results
318
 
319
- # OpenAI API๋ฅผ ์ด์šฉํ•œ ์ƒˆ ๊ธฐ์‚ฌ ์ƒ์„ฑ
320
  def generate_article(original_content, prompt_text):
321
  try:
322
- response = openai.chat.completions.create(
 
 
 
323
  model="gpt-4.1-mini",
324
  messages=[
325
  {"role": "system", "content": "๋‹น์‹ ์€ ์ „๋ฌธ์ ์ธ ๋‰ด์Šค ๊ธฐ์ž์ž…๋‹ˆ๋‹ค. ์ฃผ์–ด์ง„ ๋‚ด์šฉ์„ ๋ฐ”ํƒ•์œผ๋กœ ์ƒˆ๋กœ์šด ๊ธฐ์‚ฌ๋ฅผ ์ž‘์„ฑํ•ด์ฃผ์„ธ์š”."},
@@ -331,15 +355,19 @@ def generate_article(original_content, prompt_text):
331
  except Exception as e:
332
  return f"๊ธฐ์‚ฌ ์ƒ์„ฑ ์˜ค๋ฅ˜: {str(e)}"
333
 
334
- # OpenAI API๋ฅผ ์ด์šฉํ•œ ์ด๋ฏธ์ง€ ์ƒ์„ฑ
335
  def generate_image(prompt):
336
  try:
337
- response = openai.images.generate(
338
- model="gpt-image-1",
339
- prompt=prompt
 
 
 
 
 
340
  )
341
- image_base64=response.data[0].b64_json
342
- return f"data:image/png;base64,{image_base64}"
343
  except Exception as e:
344
  return f"์ด๋ฏธ์ง€ ์ƒ์„ฑ ์˜ค๋ฅ˜: {str(e)}"
345
 
@@ -370,9 +398,9 @@ def perform_news_task(task_type, keyword, num_articles, file_prefix):
370
  time.sleep(0.5) # ์„œ๋ฒ„ ๋ถ€ํ•˜ ๋ฐฉ์ง€
371
 
372
  # ๊ฒฐ๊ณผ ์ €์žฅ
373
- os.makedirs('scheduled_news', exist_ok=True)
374
  timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
375
- filename = f"scheduled_news/{file_prefix}_{task_type}_{timestamp}.json"
376
 
377
  with open(filename, 'w', encoding='utf-8') as f:
378
  json.dump(articles, f, ensure_ascii=False, indent=2)
@@ -511,7 +539,7 @@ if menu == "๋‰ด์Šค ๊ธฐ์‚ฌ ํฌ๋กค๋ง":
511
  st.write(f"**์š”์•ฝ:** {article['description']}")
512
  st.write(f"**๋งํฌ:** {article['link']}")
513
  st.write("**๋ณธ๋ฌธ ๋ฏธ๋ฆฌ๋ณด๊ธฐ:**")
514
- st.write(article['content'][:300] + "...")
515
 
516
  elif menu == "๊ธฐ์‚ฌ ๋ถ„์„ํ•˜๊ธฐ":
517
  st.header("๊ธฐ์‚ฌ ๋ถ„์„ํ•˜๊ธฐ")
@@ -546,7 +574,6 @@ elif menu == "๊ธฐ์‚ฌ ๋ถ„์„ํ•˜๊ธฐ":
546
  keyword_tab1, keyword_tab2 = st.tabs(["ํ‚ค์›Œ๋“œ ๋นˆ๋„", "์›Œ๋“œํด๋ผ์šฐ๋“œ"])
547
 
548
  with keyword_tab1:
549
-
550
  keywords = analyze_keywords(selected_article['content'])
551
 
552
  # ์‹œ๊ฐํ™”
@@ -622,13 +649,7 @@ elif menu == "๊ธฐ์‚ฌ ๋ถ„์„ํ•˜๊ธฐ":
622
  try:
623
  nltk.data.find('taggers/averaged_perceptron_tagger')
624
  except LookupError:
625
- nltk.download('averaged_perceptron_tagger')
626
-
627
- # Try using the correct resource name as shown in the error message
628
- try:
629
- nltk.data.find('averaged_perceptron_tagger_eng')
630
- except LookupError:
631
- nltk.download('averaged_perceptron_tagger_eng')
632
 
633
  # ์–ธ์–ด ๊ฐ์ง€ (๊ฐ„๋‹จํ•œ ๋ฐฉ์‹)
634
  is_korean = bool(re.search(r'[๊ฐ€-ํžฃ]', content))
@@ -699,13 +720,11 @@ elif menu == "๊ธฐ์‚ฌ ๋ถ„์„ํ•˜๊ธฐ":
699
 
700
  elif analysis_type == "๊ฐ์ • ๋ถ„์„":
701
  if st.button("๊ฐ์ • ๋ถ„์„ํ•˜๊ธฐ"):
702
- if st.session_state.openai_api_key:
703
  with st.spinner("๊ธฐ์‚ฌ์˜ ๊ฐ์ •์„ ๋ถ„์„ ์ค‘์ž…๋‹ˆ๋‹ค..."):
704
  try:
705
- openai.api_key = st.session_state.openai_api_key
706
-
707
- # ๊ฐ์ • ๋ถ„์„ ํ”„๋กฌํ”„ํŠธ ์„ค์ •
708
- response = openai.chat.completions.create(
709
  model="gpt-4.1-mini",
710
  messages=[
711
  {"role": "system", "content": "๋‹น์‹ ์€ ํ…์ŠคํŠธ์˜ ๊ฐ์ •๊ณผ ๋…ผ์กฐ๋ฅผ ๋ถ„์„ํ•˜๋Š” ์ „๋ฌธ๊ฐ€์ž…๋‹ˆ๋‹ค. ๋‹ค์Œ ๋‰ด์Šค ๊ธฐ์‚ฌ์˜ ๊ฐ์ •๊ณผ ๋…ผ์กฐ๋ฅผ ๋ถ„์„ํ•˜๊ณ , '๊ธ์ •์ ', '๋ถ€์ •์ ', '์ค‘๋ฆฝ์ ' ์ค‘ ํ•˜๋‚˜๋กœ ๋ถ„๋ฅ˜ํ•ด ์ฃผ์„ธ์š”. ๋˜ํ•œ ๊ธฐ์‚ฌ์—์„œ ๋“œ๋Ÿฌ๋‚˜๋Š” ํ•ต์‹ฌ ๊ฐ์ • ํ‚ค์›Œ๋“œ๋ฅผ 5๊ฐœ ์ถ”์ถœํ•˜๊ณ , ๊ฐ ํ‚ค์›Œ๋“œ๋ณ„๋กœ 1-10 ์‚ฌ์ด์˜ ๊ฐ•๋„ ์ ์ˆ˜๋ฅผ ๋งค๊ฒจ์ฃผ์„ธ์š”. JSON ํ˜•์‹์œผ๋กœ ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์‘๋‹ตํ•ด์ฃผ์„ธ์š”: {'sentiment': '๊ธ์ •์ /๋ถ€์ •์ /์ค‘๋ฆฝ์ ', 'reason': '์ด์œ  ์„ค๋ช…...', 'keywords': [{'word': 'ํ‚ค์›Œ๋“œ1', 'score': 8}, {'word': 'ํ‚ค์›Œ๋“œ2', 'score': 7}, ...]}"},
@@ -715,7 +734,7 @@ elif menu == "๊ธฐ์‚ฌ ๋ถ„์„ํ•˜๊ธฐ":
715
  response_format={"type": "json_object"}
716
  )
717
 
718
- # JSON ํŒŒ์‹ฑ
719
  analysis_result = json.loads(response.choices[0].message.content)
720
 
721
  # ๊ฒฐ๊ณผ ์‹œ๊ฐํ™”
@@ -902,8 +921,7 @@ elif menu == "์ƒˆ ๊ธฐ์‚ฌ ์ƒ์„ฑํ•˜๊ธฐ":
902
  generate_image_too = st.checkbox("๊ธฐ์‚ฌ ์ƒ์„ฑ ํ›„ ์ด๋ฏธ์ง€๋„ ํ•จ๊ป˜ ์ƒ์„ฑํ•˜๊ธฐ", value=True)
903
 
904
  if st.button("์ƒˆ ๊ธฐ์‚ฌ ์ƒ์„ฑํ•˜๊ธฐ"):
905
- if st.session_state.openai_api_key:
906
- openai.api_key = st.session_state.openai_api_key
907
  with st.spinner("๊ธฐ์‚ฌ๋ฅผ ์ƒ์„ฑ ์ค‘์ž…๋‹ˆ๋‹ค..."):
908
  new_article = generate_article(selected_article['content'], prompt_text)
909
 
@@ -1095,13 +1113,13 @@ elif menu == "๋‰ด์Šค ๊ธฐ์‚ฌ ์˜ˆ์•ฝํ•˜๊ธฐ":
1095
  )
1096
 
1097
  # ์ˆ˜์ง‘๋œ ํŒŒ์ผ ๋ณด๊ธฐ
1098
- if os.path.exists('scheduled_news'):
1099
- files = [f for f in os.listdir('scheduled_news') if f.endswith('.json')]
1100
  if files:
1101
  st.subheader("์ˆ˜์ง‘๋œ ํŒŒ์ผ ์—ด๊ธฐ")
1102
  selected_file = st.selectbox("ํŒŒ์ผ ์„ ํƒ", files, index=len(files)-1)
1103
  if selected_file and st.button("ํŒŒ์ผ ๋‚ด์šฉ ๋ณด๊ธฐ"):
1104
- with open(os.path.join('scheduled_news', selected_file), 'r', encoding='utf-8') as f:
1105
  articles = json.load(f)
1106
 
1107
  st.write(f"**ํŒŒ์ผ๋ช…:** {selected_file}")
 
11
  import json
12
  import os
13
  from datetime import datetime, timedelta
14
+ from openai import OpenAI # ์ตœ์‹  ๋ฐฉ์‹ import
15
  from dotenv import load_dotenv
16
  import traceback
17
  import plotly.graph_objects as go
 
36
  self.scheduled_jobs = []
37
  self.scheduled_results = []
38
 
39
+ # ์ „์—ญ ์Šค์ผ€์ค„๋Ÿฌ ์ƒํƒœ ๊ฐ์ฒด ์ƒ์„ฑ
40
  global_scheduler_state = SchedulerState()
41
 
42
  # API ํ‚ค ๊ด€๋ฆฌ๋ฅผ ์œ„ํ•œ ์„ธ์…˜ ์ƒํƒœ ์ดˆ๊ธฐํ™”
43
  if 'openai_api_key' not in st.session_state:
44
  st.session_state.openai_api_key = None
45
+ st.session_state.openai_client = None
46
 
47
+ # ์—ฌ๋Ÿฌ ๋ฐฉ๋ฒ•์œผ๋กœ API ํ‚ค ๋กœ๋“œ ์‹œ๋„
48
+ load_dotenv() # .env ํŒŒ์ผ์—์„œ ๋กœ๋“œ ์‹œ๋„
49
+
50
+ # 1. ํ™˜๊ฒฝ ๋ณ€์ˆ˜์—์„œ API ํ‚ค ํ™•์ธ
51
+ if os.environ.get('OPENAI_API_KEY'):
52
+ st.session_state.openai_api_key = os.environ.get('OPENAI_API_KEY')
53
+ try:
54
+ # proxies ์ธ์ž ์—†์ด ํด๋ผ์ด์–ธํŠธ ์ƒ์„ฑ
55
+ st.session_state.openai_client = OpenAI(api_key=st.session_state.openai_api_key)
56
+ except Exception as e:
57
+ st.error(f"OpenAI ํด๋ผ์ด์–ธํŠธ ์ดˆ๊ธฐํ™” ์˜ค๋ฅ˜: {str(e)}")
58
+
59
+ # 2. Streamlit secrets์—์„œ API ํ‚ค ํ™•์ธ (try-except๋กœ ์˜ค๋ฅ˜ ๋ฐฉ์ง€)
60
+ if not st.session_state.openai_api_key:
61
+ try:
62
+ if 'OPENAI_API_KEY' in st.secrets:
63
+ st.session_state.openai_api_key = st.secrets['OPENAI_API_KEY']
64
+ try:
65
+ st.session_state.openai_client = OpenAI(api_key=st.session_state.openai_api_key)
66
+ except Exception as e:
67
+ st.error(f"OpenAI ํด๋ผ์ด์–ธํŠธ ์ดˆ๊ธฐํ™” ์˜ค๋ฅ˜: {str(e)}")
68
+ except Exception as e:
69
+ pass # secrets ํŒŒ์ผ์ด ์—†์–ด๋„ ์˜ค๋ฅ˜ ๋ฐœ์ƒํ•˜์ง€ ์•Š์Œ
70
+
71
+ # ์ž„์‹œ ๋””๋ ‰ํ† ๋ฆฌ๋ฅผ ์‚ฌ์šฉํ•˜๋„๋ก NLTK ๋ฐ์ดํ„ฐ ๊ฒฝ๋กœ ์„ค์ •
72
+ nltk_data_dir = '/tmp/nltk_data'
73
+ os.makedirs(nltk_data_dir, exist_ok=True)
74
+ nltk.data.path.insert(0, nltk_data_dir) # ์ด ๊ฒฝ๋กœ๋ฅผ ์šฐ์„  ๊ฒ€์ƒ‰ํ•˜๋„๋ก ์„ค์ •
75
 
76
  # ํ•„์š”ํ•œ NLTK ๋ฐ์ดํ„ฐ ๋‹ค์šด๋กœ๋“œ
77
  try:
78
  nltk.data.find('tokenizers/punkt')
79
  except LookupError:
80
+ nltk.download('punkt', download_dir=nltk_data_dir)
 
 
 
 
 
81
 
82
  try:
83
  nltk.data.find('corpora/stopwords')
84
  except LookupError:
85
+ nltk.download('stopwords', download_dir=nltk_data_dir)
 
 
 
 
 
 
 
 
86
 
87
  # ํŽ˜์ด์ง€ ์„ค์ •
88
  st.set_page_config(page_title="๋‰ด์Šค ๊ธฐ์‚ฌ ๋„๊ตฌ", page_icon="๐Ÿ“ฐ", layout="wide")
89
 
90
+ # ์‚ฌ์ด๋“œ๋ฐ”์— API ํ‚ค ์ž…๋ ฅ ํ•„๋“œ ์ถ”๊ฐ€
91
+ with st.sidebar:
92
+ st.title("๋‰ด์Šค ๊ธฐ์‚ฌ ๋„๊ตฌ")
93
+ menu = st.radio(
94
+ "๋ฉ”๋‰ด ์„ ํƒ",
95
+ ["๋‰ด์Šค ๊ธฐ์‚ฌ ํฌ๋กค๋ง", "๊ธฐ์‚ฌ ๋ถ„์„ํ•˜๊ธฐ", "์ƒˆ ๊ธฐ์‚ฌ ์ƒ์„ฑํ•˜๊ธฐ", "๋‰ด์Šค ๊ธฐ์‚ฌ ์˜ˆ์•ฝํ•˜๊ธฐ"]
96
+ )
97
+
98
+ st.divider()
99
+ api_key = st.text_input("OpenAI API ํ‚ค ์ž…๋ ฅ", type="password")
100
+ if api_key:
101
+ st.session_state.openai_api_key = api_key
102
+ try:
103
+ # proxies ์ธ์ž ์—†์ด ํด๋ผ์ด์–ธํŠธ ์ƒ์„ฑ
104
+ st.session_state.openai_client = OpenAI(api_key=api_key)
105
+ st.success("API ํ‚ค๊ฐ€ ์„ค์ •๋˜์—ˆ์Šต๋‹ˆ๋‹ค!")
106
+ except Exception as e:
107
+ st.error(f"OpenAI ํด๋ผ์ด์–ธํŠธ ์ดˆ๊ธฐํ™” ์˜ค๋ฅ˜: {str(e)}")
108
 
109
  # ์ €์žฅ๋œ ๊ธฐ์‚ฌ๋ฅผ ๋ถˆ๋Ÿฌ์˜ค๋Š” ํ•จ์ˆ˜
110
  def load_saved_articles():
111
+ if os.path.exists('/tmp/saved_articles/articles.json'):
112
+ with open('/tmp/saved_articles/articles.json', 'r', encoding='utf-8') as f:
113
  return json.load(f)
114
  return []
115
 
116
  # ๊ธฐ์‚ฌ๋ฅผ ์ €์žฅํ•˜๋Š” ํ•จ์ˆ˜
117
  def save_articles(articles):
118
+ os.makedirs('/tmp/saved_articles', exist_ok=True)
119
+ with open('/tmp/saved_articles/articles.json', 'w', encoding='utf-8') as f:
120
  json.dump(articles, f, ensure_ascii=False, indent=2)
121
 
122
  @st.cache_data
 
271
 
272
 
273
  # ์›Œ๋“œ ํด๋ผ์šฐ๋“œ ์ƒ์„ฑ ํ•จ์ˆ˜
 
274
  def generate_wordcloud(keywords_dict):
275
  if not WordCloud:
276
  st.warning("์›Œ๋“œํด๋ผ์šฐ๋“œ ์„ค์น˜์•ˆ๋˜์–ด ์žˆ์Šต๋‹ˆ๋‹ค.")
 
337
  results['top_keywords'] = []
338
  return results
339
 
340
+ # OpenAI API๋ฅผ ์ด์šฉํ•œ ์ƒˆ ๊ธฐ์‚ฌ ์ƒ์„ฑ (์ตœ์‹  ๋ฐฉ์‹)
341
  def generate_article(original_content, prompt_text):
342
  try:
343
+ if not st.session_state.openai_client:
344
+ return "OpenAI API ํ‚ค๊ฐ€ ์„ค์ •๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค."
345
+
346
+ response = st.session_state.openai_client.chat.completions.create(
347
  model="gpt-4.1-mini",
348
  messages=[
349
  {"role": "system", "content": "๋‹น์‹ ์€ ์ „๋ฌธ์ ์ธ ๋‰ด์Šค ๊ธฐ์ž์ž…๋‹ˆ๋‹ค. ์ฃผ์–ด์ง„ ๋‚ด์šฉ์„ ๋ฐ”ํƒ•์œผ๋กœ ์ƒˆ๋กœ์šด ๊ธฐ์‚ฌ๋ฅผ ์ž‘์„ฑํ•ด์ฃผ์„ธ์š”."},
 
355
  except Exception as e:
356
  return f"๊ธฐ์‚ฌ ์ƒ์„ฑ ์˜ค๋ฅ˜: {str(e)}"
357
 
358
+ # OpenAI API๋ฅผ ์ด์šฉํ•œ ์ด๋ฏธ์ง€ ์ƒ์„ฑ (์ตœ์‹  ๋ฐฉ์‹)
359
  def generate_image(prompt):
360
  try:
361
+ if not st.session_state.openai_client:
362
+ return "OpenAI API ํ‚ค๊ฐ€ ์„ค์ •๋˜์ง€ ์•Š์•˜์Šต๋‹ˆ๋‹ค."
363
+
364
+ response = st.session_state.openai_client.images.generate(
365
+ model="dall-e-3", # ๋˜๋Š” ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ ๋ชจ๋ธ
366
+ prompt=prompt,
367
+ n=1,
368
+ size="1024x1024"
369
  )
370
+ return response.data[0].url # ์ตœ์‹  API๋Š” URL๋งŒ ๋ฐ˜ํ™˜
 
371
  except Exception as e:
372
  return f"์ด๋ฏธ์ง€ ์ƒ์„ฑ ์˜ค๋ฅ˜: {str(e)}"
373
 
 
398
  time.sleep(0.5) # ์„œ๋ฒ„ ๋ถ€ํ•˜ ๋ฐฉ์ง€
399
 
400
  # ๊ฒฐ๊ณผ ์ €์žฅ
401
+ os.makedirs('/tmp/scheduled_news', exist_ok=True)
402
  timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
403
+ filename = f"/tmp/scheduled_news/{file_prefix}_{task_type}_{timestamp}.json"
404
 
405
  with open(filename, 'w', encoding='utf-8') as f:
406
  json.dump(articles, f, ensure_ascii=False, indent=2)
 
539
  st.write(f"**์š”์•ฝ:** {article['description']}")
540
  st.write(f"**๋งํฌ:** {article['link']}")
541
  st.write("**๋ณธ๋ฌธ ๋ฏธ๋ฆฌ๋ณด๊ธฐ:**")
542
+ st.write(article['content'][:300] + "..." if len(article['content']) > 300 else article['content'])
543
 
544
  elif menu == "๊ธฐ์‚ฌ ๋ถ„์„ํ•˜๊ธฐ":
545
  st.header("๊ธฐ์‚ฌ ๋ถ„์„ํ•˜๊ธฐ")
 
574
  keyword_tab1, keyword_tab2 = st.tabs(["ํ‚ค์›Œ๋“œ ๋นˆ๋„", "์›Œ๋“œํด๋ผ์šฐ๋“œ"])
575
 
576
  with keyword_tab1:
 
577
  keywords = analyze_keywords(selected_article['content'])
578
 
579
  # ์‹œ๊ฐํ™”
 
649
  try:
650
  nltk.data.find('taggers/averaged_perceptron_tagger')
651
  except LookupError:
652
+ nltk.download('averaged_perceptron_tagger', download_dir=nltk_data_dir)
 
 
 
 
 
 
653
 
654
  # ์–ธ์–ด ๊ฐ์ง€ (๊ฐ„๋‹จํ•œ ๋ฐฉ์‹)
655
  is_korean = bool(re.search(r'[๊ฐ€-ํžฃ]', content))
 
720
 
721
  elif analysis_type == "๊ฐ์ • ๋ถ„์„":
722
  if st.button("๊ฐ์ • ๋ถ„์„ํ•˜๊ธฐ"):
723
+ if st.session_state.openai_client:
724
  with st.spinner("๊ธฐ์‚ฌ์˜ ๊ฐ์ •์„ ๋ถ„์„ ์ค‘์ž…๋‹ˆ๋‹ค..."):
725
  try:
726
+ # ๊ฐ์ • ๋ถ„์„ ํ”„๋กฌํ”„ํŠธ ์„ค์ • (์ตœ์‹  ๋ฐฉ์‹)
727
+ response = st.session_state.openai_client.chat.completions.create(
 
 
728
  model="gpt-4.1-mini",
729
  messages=[
730
  {"role": "system", "content": "๋‹น์‹ ์€ ํ…์ŠคํŠธ์˜ ๊ฐ์ •๊ณผ ๋…ผ์กฐ๋ฅผ ๋ถ„์„ํ•˜๋Š” ์ „๋ฌธ๊ฐ€์ž…๋‹ˆ๋‹ค. ๋‹ค์Œ ๋‰ด์Šค ๊ธฐ์‚ฌ์˜ ๊ฐ์ •๊ณผ ๋…ผ์กฐ๋ฅผ ๋ถ„์„ํ•˜๊ณ , '๊ธ์ •์ ', '๋ถ€์ •์ ', '์ค‘๋ฆฝ์ ' ์ค‘ ํ•˜๋‚˜๋กœ ๋ถ„๋ฅ˜ํ•ด ์ฃผ์„ธ์š”. ๋˜ํ•œ ๊ธฐ์‚ฌ์—์„œ ๋“œ๋Ÿฌ๋‚˜๋Š” ํ•ต์‹ฌ ๊ฐ์ • ํ‚ค์›Œ๋“œ๋ฅผ 5๊ฐœ ์ถ”์ถœํ•˜๊ณ , ๊ฐ ํ‚ค์›Œ๋“œ๋ณ„๋กœ 1-10 ์‚ฌ์ด์˜ ๊ฐ•๋„ ์ ์ˆ˜๋ฅผ ๋งค๊ฒจ์ฃผ์„ธ์š”. JSON ํ˜•์‹์œผ๋กœ ๋‹ค์Œ๊ณผ ๊ฐ™์ด ์‘๋‹ตํ•ด์ฃผ์„ธ์š”: {'sentiment': '๊ธ์ •์ /๋ถ€์ •์ /์ค‘๋ฆฝ์ ', 'reason': '์ด์œ  ์„ค๋ช…...', 'keywords': [{'word': 'ํ‚ค์›Œ๋“œ1', 'score': 8}, {'word': 'ํ‚ค์›Œ๋“œ2', 'score': 7}, ...]}"},
 
734
  response_format={"type": "json_object"}
735
  )
736
 
737
+ # JSON ํŒŒ์‹ฑ (์ตœ์‹  ๋ฐฉ์‹)
738
  analysis_result = json.loads(response.choices[0].message.content)
739
 
740
  # ๊ฒฐ๊ณผ ์‹œ๊ฐํ™”
 
921
  generate_image_too = st.checkbox("๊ธฐ์‚ฌ ์ƒ์„ฑ ํ›„ ์ด๋ฏธ์ง€๋„ ํ•จ๊ป˜ ์ƒ์„ฑํ•˜๊ธฐ", value=True)
922
 
923
  if st.button("์ƒˆ ๊ธฐ์‚ฌ ์ƒ์„ฑํ•˜๊ธฐ"):
924
+ if st.session_state.openai_client:
 
925
  with st.spinner("๊ธฐ์‚ฌ๋ฅผ ์ƒ์„ฑ ์ค‘์ž…๋‹ˆ๋‹ค..."):
926
  new_article = generate_article(selected_article['content'], prompt_text)
927
 
 
1113
  )
1114
 
1115
  # ์ˆ˜์ง‘๋œ ํŒŒ์ผ ๋ณด๊ธฐ
1116
+ if os.path.exists('/tmp/scheduled_news'):
1117
+ files = [f for f in os.listdir('/tmp/scheduled_news') if f.endswith('.json')]
1118
  if files:
1119
  st.subheader("์ˆ˜์ง‘๋œ ํŒŒ์ผ ์—ด๊ธฐ")
1120
  selected_file = st.selectbox("ํŒŒ์ผ ์„ ํƒ", files, index=len(files)-1)
1121
  if selected_file and st.button("ํŒŒ์ผ ๋‚ด์šฉ ๋ณด๊ธฐ"):
1122
+ with open(os.path.join('/tmp/scheduled_news', selected_file), 'r', encoding='utf-8') as f:
1123
  articles = json.load(f)
1124
 
1125
  st.write(f"**ํŒŒ์ผ๋ช…:** {selected_file}")