Spaces:
Runtime error
Runtime error
Upgrade version
Browse files- README.md +1 -1
- app.py +2 -2
- utils/generation.py +6 -3
- utils/tree_utils.py +5 -3
README.md
CHANGED
@@ -4,7 +4,7 @@ emoji: 💻
|
|
4 |
colorFrom: green
|
5 |
colorTo: red
|
6 |
sdk: gradio
|
7 |
-
sdk_version: 3.
|
8 |
app_file: app.py
|
9 |
pinned: true
|
10 |
license: mit
|
|
|
4 |
colorFrom: green
|
5 |
colorTo: red
|
6 |
sdk: gradio
|
7 |
+
sdk_version: 3.47.1
|
8 |
app_file: app.py
|
9 |
pinned: true
|
10 |
license: mit
|
app.py
CHANGED
@@ -187,7 +187,7 @@ def alter_body(old_code, func_id, funcs_list: list, prompt="", temperature=0.2,
|
|
187 |
print(f"using for generation: {func_node=}")
|
188 |
|
189 |
generation_kwargs = combine_generation_kwargs(temperature, max_new_tokens, top_p, repetition_penalty)
|
190 |
-
model_context = construct_model_context(func_node, prompt=prompt)
|
191 |
print(f"{model_context=}")
|
192 |
|
193 |
body_node = func_node.child_by_field_name("body")
|
@@ -216,7 +216,7 @@ def list_dropdown(in_code): #only used for auto update, not on sample pick?
|
|
216 |
func_identifiers = [f"{idx:2d}: {n.child_by_field_name('declarator').text.decode()}" for idx, n in enumerate(funcs)]
|
217 |
# funcs = [n for n in funcs] #wrapped as set to avoid json issues?
|
218 |
print(f"updating drop down to:{func_identifiers}")
|
219 |
-
return funcs, gr.Dropdown
|
220 |
|
221 |
if __name__ == "__main__": #works on huggingface?
|
222 |
passes_dataset = datasets.load_dataset("Vipitis/Shadertoys")
|
|
|
187 |
print(f"using for generation: {func_node=}")
|
188 |
|
189 |
generation_kwargs = combine_generation_kwargs(temperature, max_new_tokens, top_p, repetition_penalty)
|
190 |
+
model_context = construct_model_context(func_node, prompt=prompt)[0]
|
191 |
print(f"{model_context=}")
|
192 |
|
193 |
body_node = func_node.child_by_field_name("body")
|
|
|
216 |
func_identifiers = [f"{idx:2d}: {n.child_by_field_name('declarator').text.decode()}" for idx, n in enumerate(funcs)]
|
217 |
# funcs = [n for n in funcs] #wrapped as set to avoid json issues?
|
218 |
print(f"updating drop down to:{func_identifiers}")
|
219 |
+
return funcs, gr.Dropdown(choices=func_identifiers)
|
220 |
|
221 |
if __name__ == "__main__": #works on huggingface?
|
222 |
passes_dataset = datasets.load_dataset("Vipitis/Shadertoys")
|
utils/generation.py
CHANGED
@@ -8,6 +8,7 @@ def combine_generation_kwargs(temperature=2.0, max_new_tokens=512, top_p=0.95, r
|
|
8 |
Combines the generation kwargs into a single dict.
|
9 |
"""
|
10 |
gen_kwargs = {}
|
|
|
11 |
gen_kwargs["temperature"] = temperature
|
12 |
gen_kwargs["max_new_tokens"] = max_new_tokens
|
13 |
gen_kwargs["top_p"] = top_p
|
@@ -48,12 +49,14 @@ def stream_generation(prompt:str, pipe, gen_kwargs:dict):
|
|
48 |
streamer.on_finalized_text("stream reached the end.")
|
49 |
return model_output #is this ever reached?
|
50 |
|
51 |
-
def construct_model_context(func_node, prompt="")
|
52 |
"""
|
53 |
Constructs the model context from a function node.
|
|
|
54 |
"""
|
55 |
-
model_context = grab_before_comments(func_node)
|
|
|
56 |
if prompt != "":
|
57 |
model_context = "//Title: " + prompt + "\n" + model_context #prepend user prompt/title
|
58 |
model_context = "//Language: Shadertoy GLSL fragment shader\n" + model_context #prepend system prompt, language hint
|
59 |
-
return model_context
|
|
|
8 |
Combines the generation kwargs into a single dict.
|
9 |
"""
|
10 |
gen_kwargs = {}
|
11 |
+
gen_kwargs["do_sample"] = True
|
12 |
gen_kwargs["temperature"] = temperature
|
13 |
gen_kwargs["max_new_tokens"] = max_new_tokens
|
14 |
gen_kwargs["top_p"] = top_p
|
|
|
49 |
streamer.on_finalized_text("stream reached the end.")
|
50 |
return model_output #is this ever reached?
|
51 |
|
52 |
+
def construct_model_context(func_node, prompt=""):
|
53 |
"""
|
54 |
Constructs the model context from a function node.
|
55 |
+
returns: model_context, start_byte
|
56 |
"""
|
57 |
+
model_context, start_byte = grab_before_comments(func_node)
|
58 |
+
model_context += full_func_head(func_node)
|
59 |
if prompt != "":
|
60 |
model_context = "//Title: " + prompt + "\n" + model_context #prepend user prompt/title
|
61 |
model_context = "//Language: Shadertoy GLSL fragment shader\n" + model_context #prepend system prompt, language hint
|
62 |
+
return model_context, start_byte
|
utils/tree_utils.py
CHANGED
@@ -89,21 +89,23 @@ def grab_before_comments(func_node):
|
|
89 |
"""
|
90 |
precomment = ""
|
91 |
last_comment_line = 0
|
|
|
92 |
for node in func_node.parent.children: #could you optimize where to iterated from? directon?
|
93 |
if node.start_point[0] != last_comment_line + 1:
|
94 |
precomment = ""
|
95 |
if node.type == "comment":
|
|
|
96 |
precomment += node.text.decode() + "\n"
|
97 |
last_comment_line = node.start_point[0]
|
98 |
elif node == func_node:
|
99 |
-
return precomment
|
100 |
-
return precomment
|
101 |
|
102 |
def has_docstrings(func_node):
|
103 |
"""
|
104 |
returns whether a function node has a docstring
|
105 |
"""
|
106 |
-
return get_docstrings(func_node).strip() != "{" or grab_before_comments(func_node) != ""
|
107 |
|
108 |
|
109 |
def line_chr2char(text, line_idx, chr_idx):
|
|
|
89 |
"""
|
90 |
precomment = ""
|
91 |
last_comment_line = 0
|
92 |
+
start_byte = func_node.start_byte
|
93 |
for node in func_node.parent.children: #could you optimize where to iterated from? directon?
|
94 |
if node.start_point[0] != last_comment_line + 1:
|
95 |
precomment = ""
|
96 |
if node.type == "comment":
|
97 |
+
start_byte = node.start_byte # reset this here to an earlier one?
|
98 |
precomment += node.text.decode() + "\n"
|
99 |
last_comment_line = node.start_point[0]
|
100 |
elif node == func_node:
|
101 |
+
return precomment, start_byte
|
102 |
+
return precomment, start_byte
|
103 |
|
104 |
def has_docstrings(func_node):
|
105 |
"""
|
106 |
returns whether a function node has a docstring
|
107 |
"""
|
108 |
+
return get_docstrings(func_node).strip() != "{" or grab_before_comments(func_node)[0] != ""
|
109 |
|
110 |
|
111 |
def line_chr2char(text, line_idx, chr_idx):
|