|
@@ -71,9 +71,50 @@ python -m autopr.gh_actions_entrypoint
|
|
|
)
|
|
|
```
|
|
|
|
|
|
+获取提取词:
|
|
|
|
|
|
```
|
|
|
+ prompt = rail.get_prompt_message()
|
|
|
+
|
|
|
+
|
|
|
+ def get_string_params(self) -> dict[str, str]:
|
|
|
+ return {
|
|
|
+ 'issue': str(self.issue),
|
|
|
+ 'filepaths_with_token_lengths': '\n'.join([
|
|
|
+ file_descriptor.filepaths_with_token_lengths_to_str()
|
|
|
+ for file_descriptor in self.file_descriptors
|
|
|
+ ]),
|
|
|
+ 'token_limit': str(self.token_limit),
|
|
|
+ }
|
|
|
+```
|
|
|
+可以看到提取词是 issue ,其中调用 openai 接口参数如下:
|
|
|
+
|
|
|
+```
|
|
|
+ messages = [
|
|
|
+ {"role": "system", "content": system_prompt},
|
|
|
+ ]
|
|
|
+ for example in examples:
|
|
|
+ messages.append({"role": "user", "content": example[0]})
|
|
|
+ messages.append({"role": "assistant", "content": example[1]})
|
|
|
+ messages.append({"role": "user", "content": prompt})
|
|
|
+
|
|
|
+ openai_response = openai.ChatCompletion.create(
|
|
|
+ model=self.model,
|
|
|
+ messages=messages,
|
|
|
+ temperature=temperature,
|
|
|
+ max_tokens=max_tokens,
|
|
|
+ )
|
|
|
|
|
|
+```
|
|
|
+
|
|
|
+先循环遍历 examples ,然后再提问, 下面是 system_prompt 系统 example 提取词:
|
|
|
+
|
|
|
+```
|
|
|
+ raw_system_prompt: str = 'You are a software developer and git nerd, a helpful planning and coding assistant.',
|
|
|
+ rail_system_prompt: str = "You are a helpful assistant, "
|
|
|
+ "able to express yourself purely through JSON, "
|
|
|
+ "strictly and precisely adhering to the provided XML schemas.",
|
|
|
+ ):
|
|
|
|
|
|
```
|
|
|
|