baohuynhbk14 commited on
Commit
24a0f48
·
1 Parent(s): 4d0481d

Add logging for modified conversation history in prediction function

Browse files
Files changed (1) hide show
  1. app.py +10 -9
app.py CHANGED
@@ -210,6 +210,9 @@ def predict(message,
210
  DEFAULT_IMAGE_TOKEN not in first_user_message:
211
  state.messages[index]['content'] = DEFAULT_IMAGE_TOKEN + "\n" + first_user_message
212
 
 
 
 
213
  response, conv_history = model.chat(tokenizer, pixel_values, question, generation_config, history=history, return_history=True)
214
  return response, conv_history
215
 
@@ -265,15 +268,13 @@ def http_bot(
265
  logger.info(f"==== Image paths ====\n{all_image_paths}")
266
 
267
  response, _ = predict(message,
268
- all_image_paths[0],
269
- state,
270
- max_input_tiles,
271
- temperature,
272
- max_new_tokens,
273
- top_p,
274
- repetition_penalty)
275
- # logger.info(f"==== AI history ====\n{conv_history}")
276
-
277
 
278
  # response = "This is a test response"
279
  buffer = ""
 
210
  DEFAULT_IMAGE_TOKEN not in first_user_message:
211
  state.messages[index]['content'] = DEFAULT_IMAGE_TOKEN + "\n" + first_user_message
212
 
213
+ history = state.get_prompt()
214
+ logger.info(f"==== Modified History ====\n{history}")
215
+
216
  response, conv_history = model.chat(tokenizer, pixel_values, question, generation_config, history=history, return_history=True)
217
  return response, conv_history
218
 
 
268
  logger.info(f"==== Image paths ====\n{all_image_paths}")
269
 
270
  response, _ = predict(message,
271
+ all_image_paths if len(all_image_paths) > 0 else None,
272
+ state,
273
+ max_input_tiles,
274
+ temperature,
275
+ max_new_tokens,
276
+ top_p,
277
+ repetition_penalty)
 
 
278
 
279
  # response = "This is a test response"
280
  buffer = ""