mirror of
https://github.com/abetlen/llama-cpp-python.git
synced 2023-09-07 17:34:22 +03:00
Fixed too many newlines, now onto args.
Still needs shipping work so you could do "python -m llama_cpp.examples." etc.
This commit is contained in:
@@ -183,7 +183,7 @@ n_keep = {self.params.n_keep}
|
||||
|
||||
def set_color(self, c):
|
||||
if (self.params.use_color):
|
||||
print(c)
|
||||
print(c, end="")
|
||||
|
||||
# generate tokens
|
||||
def generate(self):
|
||||
|
||||
Reference in New Issue
Block a user