Fix bug of ppt and googlescholar (#167)
* fix bug of ppt and googlescholar * Format required parameters
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -160,3 +160,4 @@ cython_debug/
|
|||||||
#.idea/
|
#.idea/
|
||||||
.vscode/
|
.vscode/
|
||||||
docs/*/_build/
|
docs/*/_build/
|
||||||
|
tmp_dir/
|
||||||
|
|||||||
@@ -142,7 +142,7 @@ class StreamlitUI:
|
|||||||
belong='assistant',
|
belong='assistant',
|
||||||
end='<|action_end|>\n',
|
end='<|action_end|>\n',
|
||||||
), ),
|
), ),
|
||||||
)
|
max_turn=7)
|
||||||
|
|
||||||
def render_user(self, prompt: str):
|
def render_user(self, prompt: str):
|
||||||
with st.chat_message('user'):
|
with st.chat_message('user'):
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ THEME_MAPPING = {
|
|||||||
'template': None,
|
'template': None,
|
||||||
'title': 'Title Slide',
|
'title': 'Title Slide',
|
||||||
'single': 'Title and Content',
|
'single': 'Title and Content',
|
||||||
'two': 'Tow content',
|
'two': 'Two Content',
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -31,7 +31,7 @@ class PPT(BaseAction):
|
|||||||
"""Create a pptx file with specific themes.
|
"""Create a pptx file with specific themes.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
theme (:class:`str`): the theme used
|
theme (:class:`str`): the theme used. The value should be one of ['Default'].
|
||||||
abs_location (:class:`str`): the ppt file's absolute location
|
abs_location (:class:`str`): the ppt file's absolute location
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
@@ -115,6 +115,7 @@ class PPT(BaseAction):
|
|||||||
:class:`dict`: operation status
|
:class:`dict`: operation status
|
||||||
* status: the result of the execution
|
* status: the result of the execution
|
||||||
"""
|
"""
|
||||||
|
from PIL import Image
|
||||||
layout_name = self.theme_mapping[self.pointer.slide_master.name]['two']
|
layout_name = self.theme_mapping[self.pointer.slide_master.name]['two']
|
||||||
layout = next(i for i in self.pointer.slide_master.slide_layouts
|
layout = next(i for i in self.pointer.slide_master.slide_layouts
|
||||||
if i.name == layout_name)
|
if i.name == layout_name)
|
||||||
@@ -122,6 +123,7 @@ class PPT(BaseAction):
|
|||||||
ph_title, ph_body1, ph_body2 = slide.placeholders
|
ph_title, ph_body1, ph_body2 = slide.placeholders
|
||||||
ph_title.text = title
|
ph_title.text = title
|
||||||
ph = ph_body2
|
ph = ph_body2
|
||||||
|
image = Image.open(image)
|
||||||
image_pil = image.to_pil()
|
image_pil = image.to_pil()
|
||||||
left = ph.left
|
left = ph.left
|
||||||
width = ph.width
|
width = ph.width
|
||||||
|
|||||||
@@ -141,6 +141,12 @@ class Internlm2Protocol:
|
|||||||
tool_name = api_info['name'].split('.')[0]
|
tool_name = api_info['name'].split('.')[0]
|
||||||
plugin['description'] = API_PREFIX.format(
|
plugin['description'] = API_PREFIX.format(
|
||||||
tool_name=tool_name, description=plugin['description'])
|
tool_name=tool_name, description=plugin['description'])
|
||||||
|
# only keep required parameters
|
||||||
|
required_parameters = [
|
||||||
|
param for param in plugin['parameters']
|
||||||
|
if param['name'] in plugin['required']
|
||||||
|
]
|
||||||
|
plugin['parameters'] = required_parameters
|
||||||
plugin_descriptions.append(plugin)
|
plugin_descriptions.append(plugin)
|
||||||
plugin_prompt = self.plugin_prompt.format(
|
plugin_prompt = self.plugin_prompt.format(
|
||||||
prompt=json.dumps(
|
prompt=json.dumps(
|
||||||
|
|||||||
@@ -50,8 +50,8 @@ class HFTransformer(BaseModel):
|
|||||||
self.gen_params.update(stop_words_id=stop_words_id)
|
self.gen_params.update(stop_words_id=stop_words_id)
|
||||||
if self.gen_params['stop_words'] is not None and \
|
if self.gen_params['stop_words'] is not None and \
|
||||||
self.gen_params['stop_words_id'] is not None:
|
self.gen_params['stop_words_id'] is not None:
|
||||||
logger.warning("Both stop_words and stop_words_id are specified,"
|
logger.warning('Both stop_words and stop_words_id are specified,'
|
||||||
"only stop_words_id will be used.")
|
'only stop_words_id will be used.')
|
||||||
|
|
||||||
self._load_tokenizer(
|
self._load_tokenizer(
|
||||||
path=path,
|
path=path,
|
||||||
@@ -80,7 +80,7 @@ class HFTransformer(BaseModel):
|
|||||||
tokenizer_path if tokenizer_path else path,
|
tokenizer_path if tokenizer_path else path,
|
||||||
trust_remote_code=True,
|
trust_remote_code=True,
|
||||||
**tokenizer_kwargs)
|
**tokenizer_kwargs)
|
||||||
|
|
||||||
if self.tokenizer.pad_token_id is None:
|
if self.tokenizer.pad_token_id is None:
|
||||||
if self.tokenizer.eos_token is not None:
|
if self.tokenizer.eos_token is not None:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
@@ -101,7 +101,7 @@ class HFTransformer(BaseModel):
|
|||||||
'pad_token_id is not set for this tokenizer. Try to '
|
'pad_token_id is not set for this tokenizer. Try to '
|
||||||
'set pad_token_id via passing '
|
'set pad_token_id via passing '
|
||||||
'`pad_token_id={PAD_TOKEN_ID}` in model_cfg.')
|
'`pad_token_id={PAD_TOKEN_ID}` in model_cfg.')
|
||||||
|
|
||||||
def _load_model(self, path: str, model_kwargs: dict):
|
def _load_model(self, path: str, model_kwargs: dict):
|
||||||
import torch
|
import torch
|
||||||
from transformers import AutoModel
|
from transformers import AutoModel
|
||||||
@@ -302,13 +302,16 @@ class HFTransformerCasualLM(HFTransformer):
|
|||||||
path, trust_remote_code=True, **model_kwargs)
|
path, trust_remote_code=True, **model_kwargs)
|
||||||
self.model.eval()
|
self.model.eval()
|
||||||
|
|
||||||
|
|
||||||
class HFTransformerChat(HFTransformerCasualLM):
|
class HFTransformerChat(HFTransformerCasualLM):
|
||||||
def __init__(self,
|
|
||||||
template_parser=APITemplateParser,
|
def __init__(self, template_parser=APITemplateParser, **kwargs):
|
||||||
**kwargs):
|
|
||||||
super().__init__(template_parser=template_parser, **kwargs)
|
super().__init__(template_parser=template_parser, **kwargs)
|
||||||
|
|
||||||
def chat(self, inputs: Union[List[dict], List[List[dict]]], do_sample: bool = True, **kwargs):
|
def chat(self,
|
||||||
|
inputs: Union[List[dict], List[List[dict]]],
|
||||||
|
do_sample: bool = True,
|
||||||
|
**kwargs):
|
||||||
"""Return the chat completions in stream mode.
|
"""Return the chat completions in stream mode.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -327,12 +330,10 @@ class HFTransformerChat(HFTransformerCasualLM):
|
|||||||
query = prompt[-1]['content']
|
query = prompt[-1]['content']
|
||||||
history = prompt[:-1]
|
history = prompt[:-1]
|
||||||
try:
|
try:
|
||||||
response, history = self.model.chat(self.tokenizer,
|
response, history = self.model.chat(
|
||||||
query,
|
self.tokenizer, query, history=history)
|
||||||
history=history)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# handle over-length input error
|
# handle over-length input error
|
||||||
logger.warning(str(e))
|
logger.warning(str(e))
|
||||||
response = ""
|
response = ''
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user