Skip to content

Commit a75c5e4

Browse files
committed
WIP message on llama.cpp
1 parent 4febd2e commit a75c5e4

File tree

2 files changed

+9
-2
lines changed

2 files changed

+9
-2
lines changed

llama2_c/scripts/nft_update_story.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -156,12 +156,14 @@ def main() -> int:
156156
print(response)
157157
if "Ok" in response[0].keys():
158158
# Check if the number of generated tokens is less than the requested tokens
159-
if response[0]["Ok"]["num_tokens"] < prompt['steps']:
159+
if response[0]["Ok"]["num_tokens"] < prompt["steps"]:
160160
print(f'The end! - num_tokens = {response[0]["Ok"]["num_tokens"]}')
161161
break
162162
# Check if the response is an empty string. If it is, break out of the loop.
163163
if response[0]["Ok"]["inference"] == "":
164-
print("The end! - we got an empty string. THIS IS AN ERROR ACTUALLY. WE SHOULD NOT GET HERE..")
164+
print(
165+
"The end! - we got an empty string. ERROR. WE SHOULD NOT GET HERE.."
166+
)
165167
print("Something went wrong:")
166168
sys.exit(1)
167169
else:

llama_cpp/README.md

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
# WIP
2+
3+
We are hard at work to port llama.cpp to the Internet Computer as part of a DFINITY Foundation AI grant.
4+
5+
Stay tuned...

0 commit comments

Comments
 (0)