Fix bug of ppt and googlescholar (#167)
* fix bug of ppt and googlescholar * Format required parameters
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -160,3 +160,4 @@ cython_debug/
|
||||
#.idea/
|
||||
.vscode/
|
||||
docs/*/_build/
|
||||
tmp_dir/
|
||||
|
||||
@@ -142,7 +142,7 @@ class StreamlitUI:
|
||||
belong='assistant',
|
||||
end='<|action_end|>\n',
|
||||
), ),
|
||||
)
|
||||
max_turn=7)
|
||||
|
||||
def render_user(self, prompt: str):
|
||||
with st.chat_message('user'):
|
||||
|
||||
@@ -8,7 +8,7 @@ THEME_MAPPING = {
|
||||
'template': None,
|
||||
'title': 'Title Slide',
|
||||
'single': 'Title and Content',
|
||||
'two': 'Tow content',
|
||||
'two': 'Two Content',
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,7 +31,7 @@ class PPT(BaseAction):
|
||||
"""Create a pptx file with specific themes.
|
||||
|
||||
Args:
|
||||
theme (:class:`str`): the theme used
|
||||
theme (:class:`str`): the theme used. The value should be one of ['Default'].
|
||||
abs_location (:class:`str`): the ppt file's absolute location
|
||||
|
||||
Returns:
|
||||
@@ -115,6 +115,7 @@ class PPT(BaseAction):
|
||||
:class:`dict`: operation status
|
||||
* status: the result of the execution
|
||||
"""
|
||||
from PIL import Image
|
||||
layout_name = self.theme_mapping[self.pointer.slide_master.name]['two']
|
||||
layout = next(i for i in self.pointer.slide_master.slide_layouts
|
||||
if i.name == layout_name)
|
||||
@@ -122,6 +123,7 @@ class PPT(BaseAction):
|
||||
ph_title, ph_body1, ph_body2 = slide.placeholders
|
||||
ph_title.text = title
|
||||
ph = ph_body2
|
||||
image = Image.open(image)
|
||||
image_pil = image.to_pil()
|
||||
left = ph.left
|
||||
width = ph.width
|
||||
|
||||
@@ -141,6 +141,12 @@ class Internlm2Protocol:
|
||||
tool_name = api_info['name'].split('.')[0]
|
||||
plugin['description'] = API_PREFIX.format(
|
||||
tool_name=tool_name, description=plugin['description'])
|
||||
# only keep required parameters
|
||||
required_parameters = [
|
||||
param for param in plugin['parameters']
|
||||
if param['name'] in plugin['required']
|
||||
]
|
||||
plugin['parameters'] = required_parameters
|
||||
plugin_descriptions.append(plugin)
|
||||
plugin_prompt = self.plugin_prompt.format(
|
||||
prompt=json.dumps(
|
||||
|
||||
@@ -50,8 +50,8 @@ class HFTransformer(BaseModel):
|
||||
self.gen_params.update(stop_words_id=stop_words_id)
|
||||
if self.gen_params['stop_words'] is not None and \
|
||||
self.gen_params['stop_words_id'] is not None:
|
||||
logger.warning("Both stop_words and stop_words_id are specified,"
|
||||
"only stop_words_id will be used.")
|
||||
logger.warning('Both stop_words and stop_words_id are specified,'
|
||||
'only stop_words_id will be used.')
|
||||
|
||||
self._load_tokenizer(
|
||||
path=path,
|
||||
@@ -80,7 +80,7 @@ class HFTransformer(BaseModel):
|
||||
tokenizer_path if tokenizer_path else path,
|
||||
trust_remote_code=True,
|
||||
**tokenizer_kwargs)
|
||||
|
||||
|
||||
if self.tokenizer.pad_token_id is None:
|
||||
if self.tokenizer.eos_token is not None:
|
||||
logger.warning(
|
||||
@@ -101,7 +101,7 @@ class HFTransformer(BaseModel):
|
||||
'pad_token_id is not set for this tokenizer. Try to '
|
||||
'set pad_token_id via passing '
|
||||
'`pad_token_id={PAD_TOKEN_ID}` in model_cfg.')
|
||||
|
||||
|
||||
def _load_model(self, path: str, model_kwargs: dict):
|
||||
import torch
|
||||
from transformers import AutoModel
|
||||
@@ -302,13 +302,16 @@ class HFTransformerCasualLM(HFTransformer):
|
||||
path, trust_remote_code=True, **model_kwargs)
|
||||
self.model.eval()
|
||||
|
||||
|
||||
class HFTransformerChat(HFTransformerCasualLM):
|
||||
def __init__(self,
|
||||
template_parser=APITemplateParser,
|
||||
**kwargs):
|
||||
|
||||
def __init__(self, template_parser=APITemplateParser, **kwargs):
|
||||
super().__init__(template_parser=template_parser, **kwargs)
|
||||
|
||||
def chat(self, inputs: Union[List[dict], List[List[dict]]], do_sample: bool = True, **kwargs):
|
||||
def chat(self,
|
||||
inputs: Union[List[dict], List[List[dict]]],
|
||||
do_sample: bool = True,
|
||||
**kwargs):
|
||||
"""Return the chat completions in stream mode.
|
||||
|
||||
Args:
|
||||
@@ -327,12 +330,10 @@ class HFTransformerChat(HFTransformerCasualLM):
|
||||
query = prompt[-1]['content']
|
||||
history = prompt[:-1]
|
||||
try:
|
||||
response, history = self.model.chat(self.tokenizer,
|
||||
query,
|
||||
history=history)
|
||||
response, history = self.model.chat(
|
||||
self.tokenizer, query, history=history)
|
||||
except Exception as e:
|
||||
# handle over-length input error
|
||||
logger.warning(str(e))
|
||||
response = ""
|
||||
response = ''
|
||||
return response
|
||||
|
||||
|
||||
Reference in New Issue
Block a user