teamalphabmsit commited on
Commit
44504f7
·
verified ·
1 Parent(s): 83e1003

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitignore +16 -0
  2. LICENSE +21 -0
  3. README.md +131 -8
  4. app.py +196 -0
  5. cache/cache.py +30 -0
  6. cache/oss.py +71 -0
  7. config.yaml +40 -0
  8. conversation.py +453 -0
  9. front_end/css.css +3 -0
  10. front_end/javascript.js +49 -0
  11. front_end/lambda.jpg +0 -0
  12. front_end/user.jpg +0 -0
  13. function_calling.py +697 -0
  14. inspector.py +44 -0
  15. kernel.py +337 -0
  16. knowledge_integration/N_correlation_matrix.py +510 -0
  17. knowledge_integration/knowledge/nn_networks/models.py +130 -0
  18. knowledge_integration/knowledge/nn_networks/train.py +52 -0
  19. knowledge_integration/knowledge/nn_networks/utils.py +76 -0
  20. knowledge_integration/knw.py +96 -0
  21. knowledge_integration/nearest_correlation_matrix.py +528 -0
  22. knowledge_integration/nn_network.py +261 -0
  23. knowledge_integration/pami.py +32 -0
  24. knowledge_integration/test.yaml +20 -0
  25. knw.py +96 -0
  26. knw_in.py +87 -0
  27. lambda_utils.py +320 -0
  28. myenv/Lib/site-packages/pip-23.2.1.dist-info/AUTHORS.txt +738 -0
  29. myenv/Lib/site-packages/pip-23.2.1.dist-info/INSTALLER +1 -0
  30. myenv/Lib/site-packages/pip-23.2.1.dist-info/LICENSE.txt +20 -0
  31. myenv/Lib/site-packages/pip-23.2.1.dist-info/METADATA +90 -0
  32. myenv/Lib/site-packages/pip-23.2.1.dist-info/RECORD +1003 -0
  33. myenv/Lib/site-packages/pip-23.2.1.dist-info/REQUESTED +0 -0
  34. myenv/Lib/site-packages/pip-23.2.1.dist-info/WHEEL +5 -0
  35. myenv/Lib/site-packages/pip-23.2.1.dist-info/entry_points.txt +4 -0
  36. myenv/Lib/site-packages/pip-23.2.1.dist-info/top_level.txt +1 -0
  37. myenv/Lib/site-packages/pip/__init__.py +13 -0
  38. myenv/Lib/site-packages/pip/__main__.py +24 -0
  39. myenv/Lib/site-packages/pip/__pip-runner__.py +50 -0
  40. myenv/Lib/site-packages/pip/_internal/__init__.py +19 -0
  41. myenv/Lib/site-packages/pip/_internal/build_env.py +311 -0
  42. myenv/Lib/site-packages/pip/_internal/cache.py +292 -0
  43. myenv/Lib/site-packages/pip/_internal/cli/__init__.py +4 -0
  44. myenv/Lib/site-packages/pip/_internal/cli/autocompletion.py +171 -0
  45. myenv/Lib/site-packages/pip/_internal/cli/base_command.py +236 -0
  46. myenv/Lib/site-packages/pip/_internal/cli/cmdoptions.py +1074 -0
  47. myenv/Lib/site-packages/pip/_internal/cli/command_context.py +27 -0
  48. myenv/Lib/site-packages/pip/_internal/cli/main.py +79 -0
  49. myenv/Lib/site-packages/pip/_internal/cli/main_parser.py +134 -0
  50. myenv/Lib/site-packages/pip/_internal/cli/parser.py +294 -0
.gitignore ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .DS_Store
2
+ .idea
3
+ __pycache__/
4
+ application
5
+ back
6
+ cache/conv_cache
7
+ experiments/
8
+ exported/
9
+ function_calling/
10
+ MNIST/
11
+ utils/
12
+ others/
13
+ test/
14
+ example_config.yaml
15
+
16
+
LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2024 MJ.Sun
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
README.md CHANGED
@@ -1,12 +1,135 @@
1
  ---
2
- title: Project1
3
- emoji: ⚡
4
- colorFrom: purple
5
- colorTo: green
6
- sdk: gradio
7
- sdk_version: 5.9.1
8
  app_file: app.py
9
- pinned: false
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  ---
11
 
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
+ title: project1
 
 
 
 
 
3
  app_file: app.py
4
+ sdk: gradio
5
+ sdk_version: 4.43.0
6
+ ---
7
+ # [LAMBDA](https://www.polyu.edu.hk/ama/cmfai/lambda.html) - Multi-Agent Data Analysis System
8
+ <body>
9
+ <!-- <img src="https://github.com/user-attachments/assets/df454158-79e4-4da4-ae03-eb687fe02f16" style="width: 80%"> -->
10
+ <!-- <p align="center">
11
+ <img src="https://github.com/user-attachments/assets/6f6d49ef-40b7-46f2-88ae-b8f6d9719c3a" style="width: 600px;">
12
+ </p> -->
13
+
14
+ ![lambda_mix](https://github.com/user-attachments/assets/db5574aa-9441-4c9d-b44d-3b225d11e0cc)
15
+
16
+
17
+ We introduce **LAMBDA**, a novel open-source, code-free multi-agent data analysis system that harnesses the power of large models. LAMBDA is designed to address data analysis challenges in complex data-driven applications through the use of innovatively designed data agents that operate iteratively and generatively using natural language.
18
+
19
+ ## Key Features
20
+
21
+ - **Code-Free Data Analysis**: Perform complex data analysis tasks through human language instruction.
22
+ - **Multi-Agent System**: Utilizes two key agent roles, the programmer and the inspector, to generate and debug code seamlessly.
23
+ - **User Interface**: This includes a robust user interface that allows direct user intervention in the operational loop.
24
+ - **Model Integration**: Flexibly integrates external models and algorithms to cater to customized data analysis needs.
25
+ - **Automatic Report Generation**: Concentrate on high-value tasks, rather than spending time and resources on report writing and formatting.
26
+
27
+ ## Getting Started
28
+ ### Installation
29
+ First, clone the repository.
30
+
31
+ ```bash
32
+ git clone https://github.com/Stephen-SMJ/LAMBDA.git
33
+ cd LAMBDA
34
+ ```
35
+
36
+ Then, we recommend creating a [Conda](https://docs.conda.io/en/latest/) environment for this project and install the dependencies by following commands:
37
+ ```bash
38
+ conda create -n lambda python=3.10
39
+ conda activate lambda
40
+ ```
41
+
42
+ Next, you should install the Jupyter kernel to create a local Code Interpreter:
43
+ ```bash
44
+ ipython kernel install --name lambda --user
45
+ ```
46
+
47
+ ### Configuration
48
+ 1. To use the Large Language Model, you should have an API key from [OpenAI](https://platform.openai.com/docs/guides/authentication) or other companies. Also, you can call your local LLMs once deployed by frameworks such as [LLaMA-Factory](https://github.com/hiyouga/LLaMA-Factory).
49
+ 2. **We used Aliyun Cloud Server to store the caches (like showing figures, models and so on). Currently, you should buy a [OSS(Object Storage Service](https://cn.aliyun.com/product/oss?from_alibabacloud=) from Aliyun to use it. But we will release a new version without the cloud server for easier use soon.**
50
+
51
+ 3. Set your API key, models, working path, and OSS-related items in the config.yaml:
52
+ ```bash
53
+ #================================================================================================
54
+ # Config of the LLMs
55
+ #================================================================================================
56
+ conv_model : "gpt-4o-mini" # the conversation model
57
+ programmer_model : "gpt-4o-mini"
58
+ inspector_model : "gpt-4o-mini"
59
+ api_key : "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"
60
+ base_url_conv_model : 'https://api.openai.com/v1'
61
+ base_url_programmer : 'https://api.openai.com/v1'
62
+ base_url_inspector : 'htts://api.openai.com/v1'
63
+ max_token_conv_model: 4096 # the max token of the conversation model, this will determine the maximum length of the report.
64
+
65
+
66
+ #================================================================================================
67
+ # Config of the system
68
+ #================================================================================================
69
+ streaming : True
70
+
71
+ #cache_related
72
+ oss_endpoint: ""
73
+ oss_access_key_id: ""
74
+ oss_access_secret: ""
75
+ oss_bucket_name: ""
76
+ expired_time: 36000 # The expired time of the link in cache
77
+ cache_dir : "" # local cache dir
78
+ max_attempts : 5 # The max attempts of self-correcting
79
+ max_exe_time: 18000 # max time for the execution
80
+
81
+ #knowledge integration
82
+ retrieval : False # whether to start a knowledge retrieval. If you don't create your knowledge base, you should set it to False
83
+ mode : "full" # the mode of the #knowledge integration
84
+ ```
85
+
86
+ Finally, Run the following command to start the demo with GUI:
87
+ ```bash
88
+ python app.py
89
+ ```
90
+
91
+
92
+ ## Demonstration Videos
93
+
94
+ The performance of LAMBDA in solving data science problems is demonstrated in several case studies including:
95
+ - **[Data Analysis](https://www.polyu.edu.hk/ama/cmfai/files/lambda/lambda.mp4)**
96
+ - **[Integrating Human Intelligence](https://www.polyu.edu.hk/ama/cmfai/files/lambda/knw.mp4)**
97
+ - **[Education](https://www.polyu.edu.hk/ama/cmfai/files/lambda/LAMBDA_education.mp4)**
98
+
99
+ ## Planning works
100
+
101
+ - Code refactoring.
102
+ - Remove the cloud cache module for easier use.
103
+ - Add a Docker image for easier use.
104
+ - Documentation writing.
105
+ - Replace Gradio with other GUI frameworks.
106
+
107
+
108
+ ## License
109
+
110
+ This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
111
+
112
+
113
+
114
+ ## Acknowledgements
115
+
116
+ Thank the contributors and the communities for their support and feedback.
117
+
118
  ---
119
 
120
+ > If you find our work useful in your research, consider citing our paper by:
121
+
122
+
123
+
124
+ ```bash
125
+ @article{sun2024lambda,
126
+ title={LAMBDA: A Large Model Based Data Agent},
127
+ author={Sun, Maojun and Han, Ruijian and Jiang, Binyan and Qi, Houduo and Sun, Defeng and Yuan, Yancheng and Huang, Jian},
128
+ journal={arXiv preprint arXiv:2407.17535},
129
+ year={2024}
130
+ }
131
+ ```
132
+ ## Star History
133
+
134
+ [![Star History Chart](https://api.star-history.com/svg?repos=Stephen-SMJ/LAMBDA&type=Timeline)](https://star-history.com/#Stephen-SMJ/LAMBDA&Timeline)
135
+ </body>
app.py ADDED
@@ -0,0 +1,196 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os.path
2
+ import shutil
3
+ import sys
4
+ import traceback
5
+ import gradio as gr
6
+ import json
7
+ import random
8
+ import time
9
+ import uuid
10
+ import pandas as pd
11
+ from conversation import Conversation
12
+ from cache.oss import init_dir, init_oss, upload_oss_file, get_download_link
13
+ from cache.cache import data_cache
14
+ from prompt_engineering.prompts import *
15
+ from kernel import CodeKernel
16
+ import yaml
17
+ # from config import conv_model, streaming
18
+ import atexit
19
+ import signal
20
+
21
+ class app():
22
+ def __init__(self, config_path='config.yaml'):
23
+ print("Load config: ", config_path)
24
+ self.config = yaml.load(open(config_path, 'r'), Loader=yaml.FullLoader)
25
+ self.conv = Conversation(self.config)
26
+
27
+ self.conv.programmer.messages = [
28
+ {
29
+ "role": "system",
30
+ "content": SYSTEM_PROMPT
31
+ }
32
+ ]
33
+
34
+ def open_board(self):
35
+ return self.conv.show_data()
36
+
37
+ def cache_file(self,file):
38
+ try:
39
+ self.oss_file_dir, self.local_cache_dir = init_dir(self.config['cache_dir'])
40
+ init_oss(self.config['oss_endpoint'], self.config['oss_access_key_id'], self.config['oss_access_secret'], self.config['oss_bucket_name'], self.config['expired_time'])
41
+ self.conv.oss_dir, self.conv.local_cache_dir = self.oss_file_dir, self.local_cache_dir
42
+ file_info = upload_oss_file(self.oss_file_dir, file)
43
+ local_file_path = os.path.join(self.local_cache_dir, file_info['file_name'])
44
+ shutil.copy2(file, local_file_path)
45
+ self.conv.add_data(file)
46
+ gen_info = self.conv.my_data_cache.get_description()
47
+ self.conv.programmer.messages[0]["content"] = (PROGRAMMER_PROMPT.format(working_path=self.local_cache_dir) +
48
+ f"\nNow, user uploads the datasets in {local_file_path}\n, and here is the general information of the dataset:\n {gen_info}")
49
+ if self.conv.retrieval:
50
+ self.conv.programmer.messages[0]["content"] += KNOWLEDGE_INTEGRATION_SYSTEM
51
+ print(self.conv.programmer.messages[0]["content"])
52
+
53
+ # self.conv.run(function_lib)
54
+ # self.conv.messages.append({"role": "assistant", "content": response})
55
+ # chat_history.append((message, response))
56
+ except Exception as e:
57
+ traceback.print_exc()
58
+ print(f"Uploaded file error: {e}")
59
+
60
+ # def show_data(self):
61
+ # return self.conv.show_data()
62
+
63
+ def rendering_code(self):
64
+ return self.conv.rendering_code()
65
+
66
+ # def report_test(self):
67
+ # with open("/Users/stephensun/Desktop/pypro/dsagent_ci/cache/conv_cache/2c23cf4b-31a8-4aeb-8689-13b9fbdfb7fc-2024-04-29/programmer_msg.json", "r") as f:
68
+ # msg = json.load(f)
69
+ # print(msg)
70
+ # self.conv.programmer.messages = msg
71
+ # down_path = self.conv.document_generation()
72
+ # return [gr.Button(visible=False), gr.DownloadButton(label=f"Download Report", value=down_path, visible=True)]
73
+
74
+ def generate_report(self):
75
+ down_path = self.conv.document_generation()
76
+ return [gr.Button(visible=False), gr.DownloadButton(label=f"Download Report", value=down_path, visible=True)]
77
+
78
+ def export_code(self):
79
+ down_path = self.conv.export_code()
80
+ return [gr.Button(visible=False), gr.DownloadButton(label=f"Download Notebook", value=down_path, visible=True)]
81
+
82
+ def down_report(self):
83
+ return [gr.Button(visible=True), gr.DownloadButton(visible=False)]
84
+ def down_notebook(self):
85
+ return [gr.Button(visible=True), gr.DownloadButton(visible=False)]
86
+
87
+ # def human_loop(self, code):
88
+ # result = self.conv.run_workflow(code)
89
+ # self.conv.programmer.messages.append({"role": "assistant", "content": response})
90
+ # return response
91
+ def call_llm(self, message, chat_history, code=None):
92
+ '''
93
+ :param message: input of user
94
+ :param chat_history: the history of whole dialogue. But this variable was only used to show in the UI (not record in LLM)
95
+ :return: chat_history in the Chatbot component
96
+ '''
97
+ if not code:
98
+ self.conv.programmer.messages.append({"role": "user", "content": message})
99
+ else:
100
+ message = code
101
+ response = self.conv.run_workflow(function_lib=None,code=code)
102
+ chat_history.append((message, response))
103
+ self.conv.chat_history = chat_history
104
+ # chat_history = self.conv.messages
105
+ # print(chat_history)
106
+ return "", chat_history # return "" to let msg box be clear
107
+
108
+ # def load_chat_history(self):
109
+ # # filtered_messages = [msg for msg in self.conv.programmer.messages if isinstance(msg, dict) and msg.get("role") in ["user", "assistant"]]
110
+ # # # [{"role": "user", "content": "qqq"},{"role": "assistant", "content": "aaa"}]
111
+ # # chat_history = [(filtered_messages[i].get('content'), filtered_messages[i + 1].get('content')) for i in range(0, len(filtered_messages), 2)]
112
+ # return self.conv.chat_history
113
+
114
+ def save_dialogue(self, chat_history):
115
+ self.conv.save_conv()
116
+ with open(os.path.join(self.local_cache_dir,'system_dialogue.json'), 'w') as f:
117
+ json.dump(chat_history, f, indent=4)
118
+ f.close()
119
+ print(f"Dialogue saved in {os.path.join(self.local_cache_dir,'system_dialogue.json')}.")
120
+
121
+ def load_dialogue(self, json_file):
122
+ with open(json_file, 'r') as f:
123
+ chat_history = json.load(f)
124
+ f.close()
125
+ self.conv.chat_history = chat_history
126
+ return chat_history
127
+
128
+ def clear_all(self, message, chat_history):
129
+ self.conv.clear()
130
+ return "", []
131
+
132
+ my_app = app()
133
+
134
+ with gr.Blocks(theme=gr.themes.Soft(), css='front_end/css.css', js='front_end/javascript.js') as demo:
135
+ # get history for chatbot.
136
+ #history = my_app.load_dialogue("system_dialogue.json") # load history in previous conversation by indicate system_dialogue.json
137
+ chatbot = gr.Chatbot(value=my_app.conv.chat_history, height=600, label="LAMBDA", avatar_images=["front_end/user.jpg", "front_end/lambda.jpg"], show_copy_button=True)
138
+ with gr.Group():
139
+ with gr.Row():
140
+ upload_btn = gr.UploadButton(label="Upload Data", file_types=["csv", "xlsx"], scale=1)
141
+ # datasets = gr.File(file_types=["csv", "xlsx"], elem_id="file_upload",height=80)
142
+ msg = gr.Textbox(show_label=False, placeholder="Sent message to LLM", scale=6)
143
+ submit = gr.Button("Submit", scale=1)
144
+ with gr.Row():
145
+ board = gr.Button(value="Show/Update DataFrame", elem_id="df_btn", elem_classes="df_btn")
146
+ export_notebook = gr.Button(value="Notebook")
147
+ down_notebook = gr.DownloadButton("Download Notebook", visible=False)
148
+ generate_report = gr.Button(value="Generate Report")
149
+ down_report = gr.DownloadButton("Download Report",visible=False)
150
+
151
+ edit = gr.Button(value="Edit Code",elem_id="ed_btn", elem_classes="ed_btn")
152
+ save = gr.Button(value="Save Dialogue")
153
+ clear = gr.ClearButton(value="Clear All")
154
+
155
+ with gr.Group():
156
+ with gr.Row(visible=False, elem_id="ed",elem_classes="ed"):
157
+ code = gr.Code(label="Code",scale=6)
158
+ code_btn = gr.Button("Submit Code",scale=1)
159
+ code_btn.click(fn=my_app.call_llm, inputs=[msg, chatbot, code], outputs=[msg, chatbot])
160
+
161
+
162
+ df = gr.Dataframe(visible=False,elem_id="df",elem_classes="df")
163
+
164
+ upload_btn.upload(fn=my_app.cache_file, inputs=upload_btn)
165
+ if my_app.config['streaming']:
166
+ def chat_streaming(message, chat_history, code=None):
167
+ if not code:
168
+ my_app.conv.programmer.messages.append({"role": "user", "content": message})
169
+ else:
170
+ message = code
171
+ #my_app.conv.stream_workflow(function_lib=None, code=code)
172
+ return "", chat_history + [[message, None]]
173
+
174
+ msg.submit(chat_streaming, [msg, chatbot], [msg, chatbot], queue=False).then(
175
+ my_app.conv.stream_workflow, chatbot, chatbot
176
+ )
177
+ submit.click(chat_streaming, [msg, chatbot], [msg, chatbot], queue=False).then(
178
+ my_app.conv.stream_workflow, chatbot, chatbot
179
+ )
180
+ else:
181
+ msg.submit(my_app.call_llm, [msg, chatbot], [msg, chatbot])
182
+ submit.click(my_app.call_llm, [msg, chatbot], [msg, chatbot])
183
+ board.click(my_app.open_board, inputs=[], outputs=df)
184
+ edit.click(my_app.rendering_code, inputs=None, outputs=code)
185
+ export_notebook.click(my_app.export_code, inputs=None, outputs=[export_notebook, down_notebook])
186
+ down_notebook.click(my_app.down_notebook, inputs=None, outputs=[export_notebook, down_notebook])
187
+ generate_report.click(my_app.generate_report,inputs=None,outputs=[generate_report,down_report])
188
+ down_report.click(my_app.down_report,inputs=None,outputs=[generate_report,down_report])
189
+ save.click(my_app.save_dialogue, inputs=chatbot)
190
+ clear.click(fn=my_app.clear_all, inputs=[msg, chatbot], outputs=[msg, chatbot])
191
+ #demo.load(fn=my_app.load_chat_history, inputs=None, outputs=chatbot)
192
+
193
+ if __name__ == '__main__':
194
+ demo.launch(server_name="0.0.0.0",server_port=8000, debug=True, share=True)
195
+
196
+
cache/cache.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import os
3
+ import shutil
4
+
5
+
6
+ class data_cache:
7
+
8
+ def __init__(self, file_path) -> None:
9
+ self.file_path = file_path
10
+ self.data_cache_path = os.path.dirname(file_path)
11
+ self.data = pd.read_csv(self.file_path, encoding='gbk')
12
+ self.general_info = {}
13
+
14
+ def get_description(self) -> dict:
15
+ # self.general_info["info"] = get_info(self.data)
16
+ general_info = get_general_info(self.data)
17
+ self.general_info["num_rows"], self.general_info["num_features"], self.general_info["features"], \
18
+ self.general_info["col_type"], self.general_info["missing_val"] = general_info["num_rows"], \
19
+ general_info["num_features"], general_info["features"], general_info["col_type"], general_info[
20
+ "missing_val"]
21
+
22
+ self.general_info["describe"] = self.data.describe()
23
+ # self.general_info["label_counts"] = self.data["label"].value_counts()
24
+ return self.general_info
25
+
26
+
27
+ def get_general_info(data: pd.DataFrame):
28
+ return {"num_rows": data.shape[0], "num_features": data.shape[1], "features": data.columns,
29
+ "col_type": data.dtypes, "missing_val": data.isnull().sum()}
30
+
cache/oss.py ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import oss2
2
+ import time
3
+ import uuid
4
+ import os
5
+ from oss2.credentials import EnvironmentVariableCredentialsProvider
6
+ import requests
7
+ from joblib import load
8
+
9
+ bucket = None
10
+ expired_time = 3600
11
+
12
+
13
+ def init_dir(cache_dir):
14
+ '''
15
+ initialize the folders for conversation
16
+ :return: the folder path of oss
17
+ '''
18
+
19
+ new_uuid = str(uuid.uuid4())
20
+ current_fold = time.strftime('%Y-%m-%d', time.localtime())
21
+ oss_file_dir = 'user_tmp/' + new_uuid + '-' + current_fold
22
+ # local_cache_dir = '/home/maojsun/proj/dsagent_ci/cache/conv_cache/' + new_uuid + '-' + current_fold
23
+ local_cache_dir = cache_dir + new_uuid + '-' + current_fold
24
+ os.makedirs(local_cache_dir)
25
+ return oss_file_dir, local_cache_dir
26
+
27
+
28
+ def init_oss(endpoint, access_key_id, access_key_secret, bucket_name, expired):
29
+ auth = oss2.Auth(access_key_id, access_key_secret)
30
+ global bucket
31
+ bucket = oss2.Bucket(auth, endpoint, bucket_name)
32
+ global expired_time
33
+ expired_time = expired
34
+
35
+
36
+ def upload_oss_file(oss_file_dir, local_file_path):
37
+ '''
38
+ upload a local file to oss
39
+ :param local_file_path: local file path
40
+ :return: file name and download link, which can directly download the file by clicking the link.
41
+ '''
42
+ file_name = os.path.basename(local_file_path)
43
+
44
+ object_name = os.path.join(oss_file_dir, file_name)
45
+
46
+ bucket.put_object_from_file(object_name, local_file_path)
47
+ download_link = get_download_link(object_name)
48
+ return {'file_name': file_name, 'download_link': download_link}
49
+
50
+
51
+ def get_download_link(object_name):
52
+ # auth = oss2.ProviderAuth(EnvironmentVariableCredentialsProvider())
53
+ # 填写Object完整路径,例如exampledir/exampleobject.txt。Object完整路径中不能包含Bucket名称。 object_name = 'exampleobject.txt'
54
+ # 生成下载文件的签名URL,有效时间为3600秒。 # 设置slash_safe为True,OSS不会对Object完整路径中的正斜线(/)进行转义,此时生成的签名URL可以直接使用。
55
+ url = bucket.sign_url('GET', object_name, expired_time, slash_safe=True)
56
+ return url
57
+
58
+
59
+ if __name__ == '__main__':
60
+ # local_file = "/Users/stephensun/Desktop/pypro/darob/models/LogisticRegression.pkl"
61
+ # model = load(local_file)
62
+ # print(model)
63
+ # upload_oss_file(local_file)
64
+ before_files = os.listdir(
65
+ '/Users/stephensun/Desktop/pypro/dsagent_ci/cache/conv_cache/6fa78267-4e0b-418e-ac47-c9d99b6bbe3b-2024-04-18')
66
+ # with open('/Users/stephensun/Desktop/pypro/dsagent_ci/cache/conv_cache/6fa78267-4e0b-418e-ac47-c9d99b6bbe3b-2024-04-18/test.txt', 'w') as f:
67
+ # f.write("test")
68
+
69
+ after_files = os.listdir(
70
+ '/Users/stephensun/Desktop/pypro/dsagent_ci/cache/conv_cache/6fa78267-4e0b-418e-ac47-c9d99b6bbe3b-2024-04-18')
71
+ # print(check_folder(before_files, after_files))
config.yaml ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #================================================================================================
2
+ # Config of the LLMs
3
+ #================================================================================================
4
+ conv_model : "gpt-4o-mini" # the conversation model
5
+ programmer_model : "gpt-4o-mini"
6
+ inspector_model : "gpt-4o-mini"
7
+ api_key : ""
8
+ base_url_conv_model : 'https://api.openai.com/v1'
9
+ base_url_programmer : 'https://api.openai.com/v1'
10
+ base_url_inspector : 'htts://api.openai.com/v1'
11
+ max_token_conv_model: 4096 # the max token of the conversation model, this will determine the maximum length of the report.
12
+
13
+ # conv_model: "gemini-1.5-flash" # the conversation model for high-quality responses
14
+ # programmer_model: "gemini-1.5-flash" # model for code-related tasks
15
+ # inspector_model: "gemini-1.5-flash" # model for error-checking and debugging
16
+ # api_key: "AIzaSyA_BrRio97V_i7-sQ41EzhNqNqyPE5Ny9E" # replace with your API key
17
+ # base_url_conv_model: 'https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash'
18
+ # base_url_programmer: 'https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash'
19
+ # base_url_inspector: 'https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash'
20
+ # max_token_conv_model: 4096 # the maximum token limit for generating comprehensive reports
21
+
22
+
23
+ #================================================================================================
24
+ # Config of the system
25
+ #================================================================================================
26
+ streaming : True
27
+
28
+ #cache_related
29
+ oss_endpoint: ""
30
+ oss_access_key_id: ""
31
+ oss_access_secret: ""
32
+ oss_bucket_name: ""
33
+ expired_time: 36000 # The expired time of the link in cache
34
+ cache_dir : "" # local cache dir
35
+ max_attempts : 5 # The max attempts of self-correcting
36
+ max_exe_time: 18000 # max time for the execution
37
+
38
+ #knowledge integration
39
+ retrieval : False
40
+ mode : "full"
conversation.py ADDED
@@ -0,0 +1,453 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+ import openai
4
+ import json
5
+ # sys.path.append(r"D:\python\Lib\site-packages")
6
+ import sys
7
+ if 'D:\\python\\Lib\\site-packages' in sys.path:
8
+ sys.path.remove('D:\\python\\Lib\\site-packages')
9
+
10
+
11
+ from programmer import Programmer
12
+ from inspector import Inspector
13
+ from function_calling import *
14
+ from cache.cache import *
15
+ from prompt_engineering.prompts import *
16
+ import warnings
17
+ import traceback
18
+ # import pdfkit
19
+ import zipfile
20
+ from kernel import *
21
+ from lambda_utils import *
22
+ from cache.oss import upload_oss_file
23
+ # from utilities import function_calling
24
+
25
+
26
+ warnings.filterwarnings("ignore")
27
+
28
+ class Conversation():
29
+
30
+ def __init__(self, config) -> None:
31
+ self.config = config
32
+ self.model = config['conv_model']
33
+ self.client = openai.OpenAI(api_key=config['api_key'], base_url=config['base_url_conv_model'])
34
+ self.programmer = Programmer(api_key=config['api_key'], model=config['programmer_model'], base_url=config['base_url_programmer'])
35
+ self.inspector = Inspector(api_key=config['api_key'], model=config['inspector_model'], base_url=config['base_url_inspector'])
36
+ self.messages = []
37
+ self.chat_history = []
38
+ self.retrieval = self.config['retrieval']
39
+ self.kernel = CodeKernel(config['max_exe_time'])
40
+ self.function_repository = {}
41
+ self.my_data_cache = None
42
+ self.max_attempts = config['max_attempts'] # maximum attempts of self-correcting
43
+ self.error_count = 0 # error count of self-correcting
44
+ self.repair_count = 0 # repair count of self-correcting
45
+ self.oss_dir = None
46
+ self.local_cache_dir = None
47
+ self.run_code(IMPORT)
48
+
49
+
50
+ def add_functions(self, function_lib: dict) -> None:
51
+ self.function_repository = function_lib
52
+
53
+ def add_data(self, data_path) -> None:
54
+ self.my_data_cache = data_cache(data_path)
55
+
56
+ def check_folder(self, before_files, after_files):
57
+ new_files = set(after_files) - set(before_files)
58
+ cloud_cache_info = []
59
+ if new_files:
60
+ for file in new_files:
61
+ cache_info = upload_oss_file(self.oss_dir, os.path.join(self.local_cache_dir, file))
62
+ cloud_cache_info.append(cache_info)
63
+ return cloud_cache_info
64
+
65
+ def save_conv(self):
66
+ with open(os.path.join(self.local_cache_dir,'programmer_msg.json'), 'w') as f:
67
+ json.dump(self.programmer.messages, f, indent=4)
68
+ f.close()
69
+ with open(os.path.join(self.local_cache_dir,'inspector_msg.json'), 'w') as f:
70
+ json.dump(self.inspector.messages, f, indent=4)
71
+ f.close()
72
+ print(f"Conversation saved in {os.path.join(self.local_cache_dir,'programmer_msg.json')}")
73
+ print(f"Conversation saved in {os.path.join(self.local_cache_dir, 'inspector_msg.json')}")
74
+
75
+ def add_programmer_msg(self, message: dict):
76
+ self.programmer.messages.append(message)
77
+
78
+ def add_programmer_repair_msg(self, bug_code:str, error_msg:str, fix_method:str, role="user"):
79
+ message = {"role": role, "content": CODE_FIX.format(bug_code=bug_code, error_message=error_msg, fix_method=fix_method)}
80
+ self.programmer.messages.append(message)
81
+ def add_inspector_msg(self, bug_code:str, error_msg:str, role="user"):
82
+ message = {"role": role, "content": CODE_INSPECT.format(bug_code=bug_code,error_message=error_msg)}
83
+ self.inspector.messages.append(message)
84
+
85
+ def run_code(self,code):
86
+ try:
87
+ res_type, res = execute(code, self.kernel) # error in code of kernel will result in res_type:None
88
+ except Exception as e:
89
+ print(f'Error when executing code in kernel: {e}')
90
+ res_type, res = 'error', str(e)
91
+ self.add_inspector_msg(code, str(e))
92
+ return res_type, res
93
+
94
+ def rendering_code(self):
95
+ for i in range(len(self.programmer.messages)-1, 0, -1):
96
+ if self.programmer.messages[i]["role"] == "assistant":
97
+ is_python, code = extract_code(self.programmer.messages[i]["content"])
98
+ if is_python:
99
+ return code
100
+ return None
101
+
102
+ # def human_loop(self, code):
103
+ # self.programmer.messages.append({"role": "user", "content": HUMAN_LOOP.format(code=code)})
104
+ #
105
+ # result = self.run_code(code)
106
+ # exec_result = f"Execution result of user's code:\n{result}."
107
+ # self.programmer.messages.append({"role": "system", "content": exec_result})
108
+ # return response
109
+
110
+ def show_data(self) -> pd.DataFrame:
111
+ return self.my_data_cache.data
112
+
113
+ def document_generation(self): # use conv model to generate report.
114
+ print("Report generating...")
115
+ self.messages = self.programmer.messages + [{"role": "user", "content": Academic_Report}]
116
+ # self.programmer.messages.append({"role": "user", "content": DOCUMENT})
117
+ report = self.call_chat_model(max_tokens=self.config["max_token_conv_model"]).choices[0].message.content
118
+ self.messages.append({"role": "assistant", "content": report})
119
+ #self.local_cache_dir = 'xxx'
120
+ mkd_path = os.path.join(self.local_cache_dir,'report.md')
121
+ # pdf_path = os.path.join(self.local_cache_dir,'report.pdf')
122
+ # zip_path = os.path.join(self.local_cache_dir,'report.zip')
123
+ with open(mkd_path, "w") as f:
124
+ f.write(report)
125
+ f.close()
126
+ # pdfkit.from_file(mkd_path, pdf_path)
127
+ # with zipfile.ZipFile(zip_path, 'w') as zipf:
128
+ # zipf.write('report.md')
129
+ # zipf.write('report.pdf')
130
+ return mkd_path
131
+
132
+ def export_code(self):
133
+ print("Exporting notebook...")
134
+ notebook_path = os.path.join(self.local_cache_dir, 'notebook.ipynb')
135
+ try:
136
+ self.kernel.export(notebook_path)
137
+ except Exception as e:
138
+ print(f"An error occurred when exporting notebook: {e}")
139
+ return notebook_path
140
+
141
+ def call_chat_model(self, max_tokens, functions=None, include_functions=False, ) -> dict:
142
+ params = {
143
+ "model": self.model,
144
+ "messages": self.messages,
145
+ "max_tokens": None if max_tokens is None else max_tokens
146
+ }
147
+
148
+ if include_functions:
149
+ params["functions"] = functions
150
+ #params["function_call"] = "auto"
151
+
152
+ try:
153
+ return self.client.chat.completions.create(**params)
154
+ except Exception as e:
155
+ self.messages = [self.messages[0], self.messages[-1]]
156
+ params = {
157
+ "model": self.model,
158
+ "messages": self.messages
159
+ }
160
+ print(f"Occurs error of calling chat model: {e}")
161
+ return self.client.chat.completions.create(**params)
162
+
163
+ def clear(self):
164
+ self.messages = []
165
+ self.programmer.clear()
166
+ self.inspector.clear()
167
+ self.kernel.shutdown()
168
+ del self.kernel
169
+ self.kernel = CodeKernel()
170
+ self.my_data_cache = None
171
+ self.my_data_description = None
172
+ self.oss_dir = None
173
+ self.local_cache_dir = None
174
+
175
+ def stream_workflow(self, chat_history, function_lib: dict=None, code=None) -> object:
176
+ try:
177
+ chat_history[-1][1] = ""
178
+ if not self.my_data_cache and code is None: #todo: remove this restriction
179
+ for message in self.programmer._call_chat_model_streaming():
180
+ chat_history[-1][1] += message
181
+ yield chat_history
182
+ #chat_history += "\nNote, no data found. Please upload the data manually to start your task."
183
+ yield chat_history
184
+ final_response = chat_history[-1][1] #''.join(chat_history[-1][1])
185
+ self.add_programmer_msg({"role": "assistant", "content": final_response})
186
+
187
+ else:
188
+ if not code:
189
+ #_, _ = execute(IMPORT, self.kernel) #todo install packages
190
+
191
+ prog_response1_content = ''
192
+ for message in self.programmer._call_chat_model_streaming(retrieval=self.retrieval, kernel=self.kernel):
193
+ chat_history[-1][1] += message
194
+ prog_response1_content += message
195
+ yield chat_history
196
+
197
+ self.add_programmer_msg({"role": "assistant", "content": prog_response1_content})
198
+ else:
199
+ prog_response1_content = HUMAN_LOOP.format(code=code)
200
+ self.add_programmer_msg({"role": "user", "content": prog_response1_content})
201
+ is_python, code = extract_code(prog_response1_content)
202
+ print("is_python:",is_python)
203
+
204
+ if is_python:
205
+ chat_history[-1][1] += '\n🖥️ Execute code...\n'
206
+ yield chat_history
207
+ before_files = os.listdir(self.local_cache_dir)
208
+ res_type, res = self.run_code(code)
209
+ if res_type and res_type != 'error' or not res: # no error in code not res = res is None
210
+ after_files = os.listdir(self.local_cache_dir)
211
+ cloud_cache_info = self.check_folder(before_files, after_files)
212
+ link_info = '\n'.join([f"![{info['file_name']}]({info['download_link']})" if info['file_name'].endswith(
213
+ ('.jpg', '.jpeg', '.png', '.gif')) else f"[{info['file_name']}]({info['download_link']})" for info in
214
+ cloud_cache_info])
215
+ print("res:", res)
216
+
217
+ chat_history[-1][1] += f"\n✅ Execution result:\n{res}\n\n"
218
+ yield chat_history
219
+ self.add_programmer_msg({"role": "user", "content": RESULT_PROMPT.format(res)})
220
+
221
+ prog_response2 = ''
222
+ for message in self.programmer._call_chat_model_streaming():
223
+ chat_history[-1][1] += message
224
+ prog_response2 += message
225
+ yield chat_history
226
+
227
+ self.add_programmer_msg({"role": "assistant", "content": prog_response2})
228
+
229
+ chat_history[-1][1] += f"\n{link_info}"
230
+ yield chat_history
231
+
232
+ #elif not res_type and res: #todo: problems related the image output in the result.
233
+
234
+ # self-correcting
235
+ else:
236
+ self.error_count += 1
237
+ round = 0
238
+ while res and round < self.max_attempts: #max 5 round
239
+ chat_history[-1][1] = f'⭕ Execution error, try to repair the code, attempts: {round+1}....\n'
240
+ yield chat_history
241
+ self.add_inspector_msg(code, res)
242
+ if round == 3:
243
+ insp_response1_content = "Try other packages or methods."
244
+ else:
245
+ insp_response1 = self.inspector._call_chat_model()
246
+ insp_response1_content = insp_response1.choices[0].message.content
247
+ self.inspector.messages.append({"role": "assistant", "content": insp_response1_content})
248
+
249
+ self.add_programmer_repair_msg(code, res, insp_response1_content)
250
+
251
+ prog_response1_content = ''
252
+ for message in self.programmer._call_chat_model_streaming():
253
+ chat_history[-1][1] += message
254
+ prog_response1_content += message
255
+ yield chat_history
256
+ chat_history[-1][1] += '\n🖥️ Execute code...\n'
257
+ yield chat_history
258
+
259
+ self.add_programmer_msg({"role": "assistant", "content": prog_response1_content})
260
+ is_python, code = extract_code(prog_response1_content)
261
+ if is_python:
262
+ before_files = os.listdir(self.local_cache_dir)
263
+ res_type, res = self.run_code(code)
264
+ if res_type and res_type != 'error' or not res: # execute successfully
265
+ self.repair_count += 1
266
+ break
267
+ round += 1
268
+ if round == self.max_attempts: # maximum retry
269
+ return prog_response1_content + "\nSorry, I can't fix the code, can you help me to modified it or give some suggestions?"
270
+ #self.add_programmer_msg({"role": "user", "content": "You can not tackle the error, you should ask me to give suggestions or modify the code."})
271
+
272
+ # revision successful
273
+ after_files = os.listdir(self.local_cache_dir)
274
+ cloud_cache_info = self.check_folder(before_files, after_files)
275
+ link_info = '\n'.join([f"![{info['file_name']}]({info['download_link']})" if info['file_name'].endswith(
276
+ ('.jpg', '.jpeg', '.png', '.gif')) else f"[{info['file_name']}]({info['download_link']})" for info in
277
+ cloud_cache_info])
278
+ print("res:", res)
279
+ chat_history[-1][1] += f"\n✅ Execution result:\n{res}\n\n"
280
+ yield chat_history
281
+
282
+ self.add_programmer_msg({"role": "user", "content": RESULT_PROMPT.format(res)})
283
+
284
+ prog_response2 = ''
285
+ for message in self.programmer._call_chat_model_streaming():
286
+ chat_history[-1][1] += message
287
+ prog_response2 += message
288
+ yield chat_history
289
+ self.add_programmer_msg({"role": "assistant", "content": prog_response2})
290
+ chat_history[-1][1] += f"\n{link_info}"
291
+ yield chat_history
292
+
293
+ # else: #save file, figure..
294
+ # after_files = os.listdir(self.local_cache_dir)
295
+ # cloud_cache_info = self.check_folder(before_files, after_files)
296
+ # link_info = '\n'.join(
297
+ # [f"![{info['file_name']}]({info['download_link']})" if info['file_name'].endswith(
298
+ # (
299
+ # '.jpg', '.jpeg', '.png', '.gif')) else f"[{info['file_name']}]({info['download_link']})"
300
+ # for info in
301
+ # cloud_cache_info])
302
+ # print("res:", res)
303
+ # self.add_programmer_msg({"role": "system", "content": FIG_PROMPT})
304
+ # prog_response2 = self.programmer._call_chat_model().choices[0].message.content
305
+ # final_response = prog_response1_content + "\n" + prog_response2
306
+ # final_response += f"\n{link_info}"
307
+
308
+ else:
309
+ chat_history[-1][1] += "\nNo code detected or code is not python code." #todo : delete printing this
310
+ yield chat_history
311
+ final_response = prog_response1_content + "\nNo code detected or code is not python code."
312
+ if self.programmer.messages[-1]["role"] == "assistant":
313
+ self.programmer.messages[-1]["content"] = final_response
314
+
315
+ except Exception as e:
316
+ chat_history[-1][1] += "\nSorry, there is an error in the program, please try again."
317
+ print(f"An error occurred: {e}")
318
+ traceback.print_exc()
319
+ if self.programmer.messages[-1]["role"] == "user":
320
+ self.programmer.messages.append({"role": "assistant", "content": f"An error occurred in program: {e}"})
321
+ #return "Sorry, there is an error in the program, please try again."
322
+
323
+ #return final_response
324
+
325
+
326
+ # a previous code without streaming
327
+ def run_workflow(self,function_lib: dict=None, code=None) -> object:
328
+ try:
329
+ if not self.my_data_cache and code is None:
330
+ final_response = self.programmer._call_chat_model().choices[0].message.content
331
+ self.add_programmer_msg({"role": "assistant", "content": final_response})
332
+ else:
333
+ if not code:
334
+ #_, _ = execute(IMPORT, self.kernel) #todo prompt of install packages
335
+ prog_response1 = self.programmer._call_chat_model()
336
+ prog_response1_content = prog_response1.choices[0].message.content
337
+ self.add_programmer_msg({"role": "assistant", "content": prog_response1_content})
338
+ else:
339
+ prog_response1_content = HUMAN_LOOP.format(code=code)
340
+ self.add_programmer_msg({"role": "user", "content": prog_response1_content})
341
+ self.add_programmer_msg({"role": "assistant", "content": "I got the code, let me execute it."})
342
+ is_python, code = extract_code(prog_response1_content)
343
+ print("is_python:",is_python)
344
+
345
+ if is_python:
346
+ before_files = os.listdir(self.local_cache_dir)
347
+ res_type, res = self.run_code(code)
348
+ if res_type and res_type != 'error' or not res: # no error in code not res = res is None
349
+ after_files = os.listdir(self.local_cache_dir)
350
+ cloud_cache_info = self.check_folder(before_files, after_files)
351
+ link_info = '\n'.join([f"![{info['file_name']}]({info['download_link']})" if info['file_name'].endswith(
352
+ ('.jpg', '.jpeg', '.png', '.gif')) else f"[{info['file_name']}]({info['download_link']})" for info in
353
+ cloud_cache_info])
354
+ print("res:", res)
355
+
356
+ self.add_programmer_msg({"role": "user", "content": RESULT_PROMPT.format(res)})
357
+
358
+ prog_response2 = self.programmer._call_chat_model().choices[0].message.content
359
+
360
+ final_response = prog_response1_content + "\n" + f"Execution result:\n{res}\n\n{prog_response2}"
361
+ final_response += f"\n{link_info}"
362
+
363
+ #elif not res_type and res: #error occurs in code, only image
364
+ else:
365
+ self.error_count += 1
366
+ round = 0
367
+ while res and round < self.max_attempts: #max 5 round
368
+ self.add_inspector_msg(code, res)
369
+ if round == 3:
370
+ insp_response1_content = "Try other packages or methods."
371
+ else:
372
+ insp_response1 = self.inspector._call_chat_model()
373
+ insp_response1_content = insp_response1.choices[0].message.content
374
+ self.inspector.messages.append({"role": "assistant", "content": insp_response1_content})
375
+ #self.add_programmer_msg(modify_msg)
376
+
377
+ self.add_programmer_repair_msg(code, res, insp_response1_content)
378
+ prog_response1 = self.programmer._call_chat_model()
379
+ prog_response1_content = prog_response1.choices[0].message.content
380
+ self.add_programmer_msg({"role": "assistant", "content": prog_response1_content})
381
+ is_python, code = extract_code(prog_response1_content)
382
+ if is_python:
383
+ before_files = os.listdir(self.local_cache_dir)
384
+ res_type, res = self.run_code(code)
385
+ if res_type or not res: # execute successfully
386
+ self.repair_count += 1
387
+ break
388
+ round += 1
389
+ if round == self.max_attempts: # max retry
390
+ # self.add_programmer_msg({"role": "assistant", "content": "Sorry, I can't fix the code, can you help me to modified it or give some suggestions?"})
391
+ return prog_response1_content + "\nSorry, I can't fix the code, can you help me to modified it or give some suggestions?"
392
+
393
+ # revision successful
394
+ after_files = os.listdir(self.local_cache_dir)
395
+ cloud_cache_info = self.check_folder(before_files, after_files)
396
+ link_info = '\n'.join([f"![{info['file_name']}]({info['download_link']})" if info['file_name'].endswith(
397
+ ('.jpg', '.jpeg', '.png', '.gif')) else f"[{info['file_name']}]({info['download_link']})" for info in
398
+ cloud_cache_info])
399
+ print("res:", res)
400
+ if 'head()' in code:
401
+ self.messages.append({"role": "user", "content": MKD_PROMPT.format(res)})
402
+ mkd_res = self.call_chat_model(max_tokens=512).choices[0].message.content
403
+ res = mkd_res
404
+ self.messages.pop()
405
+
406
+ self.add_programmer_msg({"role": "user", "content": RESULT_PROMPT.format(res)})
407
+
408
+ prog_response2 = self.programmer._call_chat_model().choices[0].message.content
409
+ # todo: delete code in second response
410
+ # prog_response2 = remove_code_blocks(prog_response2)
411
+ self.add_programmer_msg({"role": "assistant", "content": prog_response2})
412
+ # final_response = prog_response1_content + "\n" + prog_response2
413
+ final_response = prog_response1_content + "\n" + f"Execution result:\n{res}\n\n{prog_response2}"
414
+ final_response += f"\n{link_info}"
415
+
416
+ # else: #save file, figure..
417
+ # after_files = os.listdir(self.local_cache_dir)
418
+ # cloud_cache_info = self.check_folder(before_files, after_files)
419
+ # link_info = '\n'.join(
420
+ # [f"![{info['file_name']}]({info['download_link']})" if info['file_name'].endswith(
421
+ # (
422
+ # '.jpg', '.jpeg', '.png', '.gif')) else f"[{info['file_name']}]({info['download_link']})"
423
+ # for info in
424
+ # cloud_cache_info])
425
+ # print("res:", res)
426
+ # self.add_programmer_msg({"role": "system", "content": FIG_PROMPT})
427
+ # prog_response2 = self.programmer._call_chat_model().choices[0].message.content
428
+ # final_response = prog_response1_content + "\n" + prog_response2
429
+ # final_response += f"\n{link_info}"
430
+
431
+
432
+ else:
433
+ final_response = prog_response1_content + "\n" + "No code detected or code is not python code."
434
+ if self.programmer.messages[-1]["role"] == "assistant":
435
+ self.programmer.messages[-1]["content"] = final_response
436
+
437
+ except Exception as e:
438
+ print(f"An error occurred: {e}")
439
+ traceback.print_exc()
440
+ if self.programmer.messages[-1]["role"] == "user":
441
+ self.programmer.messages.append({"role": "assistant", "content": f"An error occurred in program: {e}"})
442
+ return "Sorry, there is an error in the program, please try again."
443
+
444
+ return final_response
445
+
446
+ # function_lib = {
447
+ #
448
+ # }
449
+
450
+
451
+
452
+
453
+
front_end/css.css ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ h1 {margin: 20px auto; text-align: center;}
2
+ img {max-height: 400px!important; max-width: 50vw;!important;}
3
+ /*footer {display: none !important}*/
front_end/javascript.js ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ function createGradioAnimation() {
3
+ var container = document.createElement('div');
4
+ container.id = 'gradio-animation';
5
+ container.style.fontSize = '2em';
6
+ container.style.fontWeight = 'bold';
7
+ container.style.textAlign = 'center';
8
+ container.style.marginBottom = '20px';
9
+
10
+ var text = 'Welcome to LAMBDA! Easy Data Analytics!';
11
+ for (var i = 0; i < text.length; i++) {
12
+ (function(i){
13
+ setTimeout(function(){
14
+ var letter = document.createElement('span');
15
+ letter.style.opacity = '0';
16
+ letter.style.transition = 'opacity 0.5s';
17
+ letter.innerText = text[i];
18
+
19
+ container.appendChild(letter);
20
+
21
+ setTimeout(function() {
22
+ letter.style.opacity = '1';
23
+ }, 50);
24
+ }, i * 250);
25
+ })(i);
26
+ }
27
+
28
+ var gradioContainer = document.querySelector('.gradio-container');
29
+ gradioContainer.insertBefore(container, gradioContainer.firstChild);
30
+
31
+
32
+ var ed_btn = document.querySelector('.ed_btn');
33
+ var ed = document.querySelector('.ed');
34
+ //console.log(button)
35
+ ed_btn.addEventListener('click', function() {
36
+ ed.style.display = 'block';
37
+ });
38
+
39
+ var button = document.querySelector('.df_btn');
40
+ var df = document.querySelector('.df');
41
+ //console.log(button)
42
+ button.addEventListener('click', function() {
43
+ df.style.display = 'block';
44
+ });
45
+
46
+
47
+
48
+ return 'Animation created';
49
+ }
front_end/lambda.jpg ADDED
front_end/user.jpg ADDED
function_calling.py ADDED
@@ -0,0 +1,697 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Methods for creating function specs in the style of OpenAI Functions"""
2
+
3
+ from __future__ import annotations
4
+
5
+ import collections
6
+ import inspect
7
+ import logging
8
+ import types
9
+ import typing
10
+ import uuid
11
+ from typing import (
12
+ TYPE_CHECKING,
13
+ Annotated,
14
+ Any,
15
+ Callable,
16
+ Literal,
17
+ Optional,
18
+ Union,
19
+ cast,
20
+ )
21
+
22
+ from pydantic import BaseModel
23
+ from typing_extensions import TypedDict, get_args, get_origin, is_typeddict
24
+
25
+ from langchain_core._api import beta, deprecated
26
+ from langchain_core.messages import AIMessage, BaseMessage, HumanMessage, ToolMessage
27
+ from langchain_core.utils.json_schema import dereference_refs
28
+ from langchain_core.utils.pydantic import is_basemodel_subclass
29
+
30
+ if TYPE_CHECKING:
31
+ from langchain_core.tools import BaseTool
32
+
33
+ logger = logging.getLogger(__name__)
34
+
35
+ PYTHON_TO_JSON_TYPES = {
36
+ "str": "string",
37
+ "int": "integer",
38
+ "float": "number",
39
+ "bool": "boolean",
40
+ }
41
+
42
+
43
+ class FunctionDescription(TypedDict):
44
+ """Representation of a callable function to send to an LLM."""
45
+
46
+ name: str
47
+ """The name of the function."""
48
+ description: str
49
+ """A description of the function."""
50
+ parameters: dict
51
+ """The parameters of the function."""
52
+
53
+
54
+ class ToolDescription(TypedDict):
55
+ """Representation of a callable function to the OpenAI API."""
56
+
57
+ type: Literal["function"]
58
+ """The type of the tool."""
59
+ function: FunctionDescription
60
+ """The function description."""
61
+
62
+
63
+ def _rm_titles(kv: dict, prev_key: str = "") -> dict:
64
+ new_kv = {}
65
+ for k, v in kv.items():
66
+ if k == "title":
67
+ if isinstance(v, dict) and prev_key == "properties" and "title" in v:
68
+ new_kv[k] = _rm_titles(v, k)
69
+ else:
70
+ continue
71
+ elif isinstance(v, dict):
72
+ new_kv[k] = _rm_titles(v, k)
73
+ else:
74
+ new_kv[k] = v
75
+ return new_kv
76
+
77
+
78
+ @deprecated(
79
+ "0.1.16",
80
+ alternative="langchain_core.utils.function_calling.convert_to_openai_function()",
81
+ removal="1.0",
82
+ )
83
+ def convert_pydantic_to_openai_function(
84
+ model: type,
85
+ *,
86
+ name: Optional[str] = None,
87
+ description: Optional[str] = None,
88
+ rm_titles: bool = True,
89
+ ) -> FunctionDescription:
90
+ """Converts a Pydantic model to a function description for the OpenAI API.
91
+
92
+ Args:
93
+ model: The Pydantic model to convert.
94
+ name: The name of the function. If not provided, the title of the schema will be
95
+ used.
96
+ description: The description of the function. If not provided, the description
97
+ of the schema will be used.
98
+ rm_titles: Whether to remove titles from the schema. Defaults to True.
99
+
100
+ Returns:
101
+ The function description.
102
+ """
103
+ if hasattr(model, "model_json_schema"):
104
+ schema = model.model_json_schema() # Pydantic 2
105
+ elif hasattr(model, "schema"):
106
+ schema = model.schema() # Pydantic 1
107
+ else:
108
+ msg = "Model must be a Pydantic model."
109
+ raise TypeError(msg)
110
+ schema = dereference_refs(schema)
111
+ if "definitions" in schema: # pydantic 1
112
+ schema.pop("definitions", None)
113
+ if "$defs" in schema: # pydantic 2
114
+ schema.pop("$defs", None)
115
+ title = schema.pop("title", "")
116
+ default_description = schema.pop("description", "")
117
+ return {
118
+ "name": name or title,
119
+ "description": description or default_description,
120
+ "parameters": _rm_titles(schema) if rm_titles else schema,
121
+ }
122
+
123
+
124
+ @deprecated(
125
+ "0.1.16",
126
+ alternative="langchain_core.utils.function_calling.convert_to_openai_tool()",
127
+ removal="1.0",
128
+ )
129
+ def convert_pydantic_to_openai_tool(
130
+ model: type[BaseModel],
131
+ *,
132
+ name: Optional[str] = None,
133
+ description: Optional[str] = None,
134
+ ) -> ToolDescription:
135
+ """Converts a Pydantic model to a function description for the OpenAI API.
136
+
137
+ Args:
138
+ model: The Pydantic model to convert.
139
+ name: The name of the function. If not provided, the title of the schema will be
140
+ used.
141
+ description: The description of the function. If not provided, the description
142
+ of the schema will be used.
143
+
144
+ Returns:
145
+ The tool description.
146
+ """
147
+ function = convert_pydantic_to_openai_function(
148
+ model, name=name, description=description
149
+ )
150
+ return {"type": "function", "function": function}
151
+
152
+
153
+ def _get_python_function_name(function: Callable) -> str:
154
+ """Get the name of a Python function."""
155
+ return function.__name__
156
+
157
+
158
+ @deprecated(
159
+ "0.1.16",
160
+ alternative="langchain_core.utils.function_calling.convert_to_openai_function()",
161
+ removal="1.0",
162
+ )
163
+ def convert_python_function_to_openai_function(
164
+ function: Callable,
165
+ ) -> FunctionDescription:
166
+ """Convert a Python function to an OpenAI function-calling API compatible dict.
167
+
168
+ Assumes the Python function has type hints and a docstring with a description. If
169
+ the docstring has Google Python style argument descriptions, these will be
170
+ included as well.
171
+
172
+ Args:
173
+ function: The Python function to convert.
174
+
175
+ Returns:
176
+ The OpenAI function description.
177
+ """
178
+ from langchain_core.tools.base import create_schema_from_function
179
+
180
+ func_name = _get_python_function_name(function)
181
+ model = create_schema_from_function(
182
+ func_name,
183
+ function,
184
+ filter_args=(),
185
+ parse_docstring=True,
186
+ error_on_invalid_docstring=False,
187
+ include_injected=False,
188
+ )
189
+ return convert_pydantic_to_openai_function(
190
+ model,
191
+ name=func_name,
192
+ description=model.__doc__,
193
+ )
194
+
195
+
196
+ def _convert_typed_dict_to_openai_function(typed_dict: type) -> FunctionDescription:
197
+ visited: dict = {}
198
+ from pydantic.v1 import BaseModel
199
+
200
+ model = cast(
201
+ type[BaseModel],
202
+ _convert_any_typed_dicts_to_pydantic(typed_dict, visited=visited),
203
+ )
204
+ return convert_pydantic_to_openai_function(model) # type: ignore
205
+
206
+
207
+ _MAX_TYPED_DICT_RECURSION = 25
208
+
209
+
210
+ def _convert_any_typed_dicts_to_pydantic(
211
+ type_: type,
212
+ *,
213
+ visited: dict,
214
+ depth: int = 0,
215
+ ) -> type:
216
+ from pydantic.v1 import Field as Field_v1
217
+ from pydantic.v1 import create_model as create_model_v1
218
+
219
+ if type_ in visited:
220
+ return visited[type_]
221
+ elif depth >= _MAX_TYPED_DICT_RECURSION:
222
+ return type_
223
+ elif is_typeddict(type_):
224
+ typed_dict = type_
225
+ docstring = inspect.getdoc(typed_dict)
226
+ annotations_ = typed_dict.__annotations__
227
+ description, arg_descriptions = _parse_google_docstring(
228
+ docstring, list(annotations_)
229
+ )
230
+ fields: dict = {}
231
+ for arg, arg_type in annotations_.items():
232
+ if get_origin(arg_type) is Annotated:
233
+ annotated_args = get_args(arg_type)
234
+ new_arg_type = _convert_any_typed_dicts_to_pydantic(
235
+ annotated_args[0], depth=depth + 1, visited=visited
236
+ )
237
+ field_kwargs = dict(zip(("default", "description"), annotated_args[1:]))
238
+ if (field_desc := field_kwargs.get("description")) and not isinstance(
239
+ field_desc, str
240
+ ):
241
+ msg = (
242
+ f"Invalid annotation for field {arg}. Third argument to "
243
+ f"Annotated must be a string description, received value of "
244
+ f"type {type(field_desc)}."
245
+ )
246
+ raise ValueError(msg)
247
+ elif arg_desc := arg_descriptions.get(arg):
248
+ field_kwargs["description"] = arg_desc
249
+ else:
250
+ pass
251
+ fields[arg] = (new_arg_type, Field_v1(**field_kwargs))
252
+ else:
253
+ new_arg_type = _convert_any_typed_dicts_to_pydantic(
254
+ arg_type, depth=depth + 1, visited=visited
255
+ )
256
+ field_kwargs = {"default": ...}
257
+ if arg_desc := arg_descriptions.get(arg):
258
+ field_kwargs["description"] = arg_desc
259
+ fields[arg] = (new_arg_type, Field_v1(**field_kwargs))
260
+ model = create_model_v1(typed_dict.__name__, **fields)
261
+ model.__doc__ = description
262
+ visited[typed_dict] = model
263
+ return model
264
+ elif (origin := get_origin(type_)) and (type_args := get_args(type_)):
265
+ subscriptable_origin = _py_38_safe_origin(origin)
266
+ type_args = tuple(
267
+ _convert_any_typed_dicts_to_pydantic(arg, depth=depth + 1, visited=visited)
268
+ for arg in type_args # type: ignore[index]
269
+ )
270
+ return subscriptable_origin[type_args] # type: ignore[index]
271
+ else:
272
+ return type_
273
+
274
+
275
+ @deprecated(
276
+ "0.1.16",
277
+ alternative="langchain_core.utils.function_calling.convert_to_openai_function()",
278
+ removal="1.0",
279
+ )
280
+ def format_tool_to_openai_function(tool: BaseTool) -> FunctionDescription:
281
+ """Format tool into the OpenAI function API.
282
+
283
+ Args:
284
+ tool: The tool to format.
285
+
286
+ Returns:
287
+ The function description.
288
+ """
289
+ from langchain_core.tools import simple
290
+
291
+ is_simple_oai_tool = isinstance(tool, simple.Tool) and not tool.args_schema
292
+ if tool.tool_call_schema and not is_simple_oai_tool:
293
+ return convert_pydantic_to_openai_function(
294
+ tool.tool_call_schema, name=tool.name, description=tool.description
295
+ )
296
+ else:
297
+ return {
298
+ "name": tool.name,
299
+ "description": tool.description,
300
+ "parameters": {
301
+ # This is a hack to get around the fact that some tools
302
+ # do not expose an args_schema, and expect an argument
303
+ # which is a string.
304
+ # And Open AI does not support an array type for the
305
+ # parameters.
306
+ "properties": {
307
+ "__arg1": {"title": "__arg1", "type": "string"},
308
+ },
309
+ "required": ["__arg1"],
310
+ "type": "object",
311
+ },
312
+ }
313
+
314
+
315
+ @deprecated(
316
+ "0.1.16",
317
+ alternative="langchain_core.utils.function_calling.convert_to_openai_tool()",
318
+ removal="1.0",
319
+ )
320
+ def format_tool_to_openai_tool(tool: BaseTool) -> ToolDescription:
321
+ """Format tool into the OpenAI function API.
322
+
323
+ Args:
324
+ tool: The tool to format.
325
+
326
+ Returns:
327
+ The tool description.
328
+ """
329
+ function = format_tool_to_openai_function(tool)
330
+ return {"type": "function", "function": function}
331
+
332
+
333
+ def convert_to_openai_function(
334
+ function: Union[dict[str, Any], type, Callable, BaseTool],
335
+ *,
336
+ strict: Optional[bool] = None,
337
+ ) -> dict[str, Any]:
338
+ """Convert a raw function/class to an OpenAI function.
339
+ Args:
340
+ function:
341
+ A dictionary, Pydantic BaseModel class, TypedDict class, a LangChain
342
+ Tool object, or a Python function. If a dictionary is passed in, it is
343
+ assumed to already be a valid OpenAI function, a JSON schema with
344
+ top-level 'title' key specified, an Anthropic format
345
+ tool, or an Amazon Bedrock Converse format tool.
346
+ strict:
347
+ If True, model output is guaranteed to exactly match the JSON Schema
348
+ provided in the function definition. If None, ``strict`` argument will not
349
+ be included in function definition.
350
+
351
+ Returns:
352
+ A dict version of the passed in function which is compatible with the OpenAI
353
+ function-calling API.
354
+
355
+ Raises:
356
+ ValueError: If function is not in a supported format.
357
+
358
+ .. versionchanged:: 0.2.29
359
+
360
+ ``strict`` arg added.
361
+
362
+ .. versionchanged:: 0.3.13
363
+
364
+ Support for Anthropic format tools added.
365
+
366
+ .. versionchanged:: 0.3.14
367
+
368
+ Support for Amazon Bedrock Converse format tools added.
369
+
370
+ .. versionchanged:: 0.3.16
371
+
372
+ 'description' and 'parameters' keys are now optional. Only 'name' is
373
+ required and guaranteed to be part of the output.
374
+ """
375
+ from langchain_core.tools import BaseTool
376
+
377
+ # an Anthropic format tool
378
+ if isinstance(function, dict) and all(
379
+ k in function for k in ("name", "input_schema")
380
+ ):
381
+ oai_function = {
382
+ "name": function["name"],
383
+ "parameters": function["input_schema"],
384
+ }
385
+ if "description" in function:
386
+ oai_function["description"] = function["description"]
387
+ # an Amazon Bedrock Converse format tool
388
+ elif isinstance(function, dict) and "toolSpec" in function:
389
+ oai_function = {
390
+ "name": function["toolSpec"]["name"],
391
+ "parameters": function["toolSpec"]["inputSchema"]["json"],
392
+ }
393
+ if "description" in function["toolSpec"]:
394
+ oai_function["description"] = function["toolSpec"]["description"]
395
+ # already in OpenAI function format
396
+ elif isinstance(function, dict) and "name" in function:
397
+ oai_function = {
398
+ k: v
399
+ for k, v in function.items()
400
+ if k in ("name", "description", "parameters", "strict")
401
+ }
402
+ # a JSON schema with title and description
403
+ elif isinstance(function, dict) and "title" in function:
404
+ function_copy = function.copy()
405
+ oai_function = {"name": function_copy.pop("title")}
406
+ if "description" in function_copy:
407
+ oai_function["description"] = function_copy.pop("description")
408
+ if function_copy and "properties" in function_copy:
409
+ oai_function["parameters"] = function_copy
410
+ elif isinstance(function, type) and is_basemodel_subclass(function):
411
+ oai_function = cast(dict, convert_pydantic_to_openai_function(function))
412
+ elif is_typeddict(function):
413
+ oai_function = cast(
414
+ dict, _convert_typed_dict_to_openai_function(cast(type, function))
415
+ )
416
+ elif isinstance(function, BaseTool):
417
+ oai_function = cast(dict, format_tool_to_openai_function(function))
418
+ elif callable(function):
419
+ oai_function = cast(dict, convert_python_function_to_openai_function(function))
420
+ else:
421
+ msg = (
422
+ f"Unsupported function\n\n{function}\n\nFunctions must be passed in"
423
+ " as Dict, pydantic.BaseModel, or Callable. If they're a dict they must"
424
+ " either be in OpenAI function format or valid JSON schema with top-level"
425
+ " 'title' and 'description' keys."
426
+ )
427
+ raise ValueError(msg)
428
+
429
+ if strict is not None:
430
+ if "strict" in oai_function and oai_function["strict"] != strict:
431
+ msg = (
432
+ f"Tool/function already has a 'strict' key wth value "
433
+ f"{oai_function['strict']} which is different from the explicit "
434
+ f"`strict` arg received {strict=}."
435
+ )
436
+ raise ValueError(msg)
437
+ oai_function["strict"] = strict
438
+ if strict:
439
+ # As of 08/06/24, OpenAI requires that additionalProperties be supplied and
440
+ # set to False if strict is True.
441
+ # All properties layer needs 'additionalProperties=False'
442
+ oai_function["parameters"] = _recursive_set_additional_properties_false(
443
+ oai_function["parameters"]
444
+ )
445
+ return oai_function
446
+
447
+
448
+ def convert_to_openai_tool(
449
+ tool: Union[dict[str, Any], type[BaseModel], Callable, BaseTool],
450
+ *,
451
+ strict: Optional[bool] = None,
452
+ ) -> dict[str, Any]:
453
+ """Convert a tool-like object to an OpenAI tool schema.
454
+
455
+ OpenAI tool schema reference:
456
+ https://platform.openai.com/docs/api-reference/chat/create#chat-create-tools
457
+
458
+ Args:
459
+ tool:
460
+ Either a dictionary, a pydantic.BaseModel class, Python function, or
461
+ BaseTool. If a dictionary is passed in, it is
462
+ assumed to already be a valid OpenAI function, a JSON schema with
463
+ top-level 'title' key specified, an Anthropic format
464
+ tool, or an Amazon Bedrock Converse format tool.
465
+ strict:
466
+ If True, model output is guaranteed to exactly match the JSON Schema
467
+ provided in the function definition. If None, ``strict`` argument will not
468
+ be included in tool definition.
469
+
470
+ Returns:
471
+ A dict version of the passed in tool which is compatible with the
472
+ OpenAI tool-calling API.
473
+
474
+ .. versionchanged:: 0.2.29
475
+
476
+ ``strict`` arg added.
477
+
478
+ .. versionchanged:: 0.3.13
479
+
480
+ Support for Anthropic format tools added.
481
+
482
+ .. versionchanged:: 0.3.14
483
+
484
+ Support for Amazon Bedrock Converse format tools added.
485
+
486
+ .. versionchanged:: 0.3.16
487
+
488
+ 'description' and 'parameters' keys are now optional. Only 'name' is
489
+ required and guaranteed to be part of the output.
490
+ """
491
+ if isinstance(tool, dict) and tool.get("type") == "function" and "function" in tool:
492
+ return tool
493
+ oai_function = convert_to_openai_function(tool, strict=strict)
494
+ return {"type": "function", "function": oai_function}
495
+
496
+
497
+ @beta()
498
+ def tool_example_to_messages(
499
+ input: str,
500
+ tool_calls: list[BaseModel],
501
+ tool_outputs: Optional[list[str]] = None,
502
+ *,
503
+ ai_response: Optional[str] = None,
504
+ ) -> list[BaseMessage]:
505
+ """Convert an example into a list of messages that can be fed into an LLM.
506
+
507
+ This code is an adapter that converts a single example to a list of messages
508
+ that can be fed into a chat model.
509
+
510
+ The list of messages per example by default corresponds to:
511
+
512
+ 1) HumanMessage: contains the content from which content should be extracted.
513
+ 2) AIMessage: contains the extracted information from the model
514
+ 3) ToolMessage: contains confirmation to the model that the model requested a tool
515
+ correctly.
516
+
517
+ If `ai_response` is specified, there will be a final AIMessage with that response.
518
+
519
+ The ToolMessage is required because some chat models are hyper-optimized for agents
520
+ rather than for an extraction use case.
521
+
522
+ Arguments:
523
+ input: string, the user input
524
+ tool_calls: List[BaseModel], a list of tool calls represented as Pydantic
525
+ BaseModels
526
+ tool_outputs: Optional[List[str]], a list of tool call outputs.
527
+ Does not need to be provided. If not provided, a placeholder value
528
+ will be inserted. Defaults to None.
529
+ ai_response: Optional[str], if provided, content for a final AIMessage.
530
+
531
+ Returns:
532
+ A list of messages
533
+
534
+ Examples:
535
+
536
+ .. code-block:: python
537
+
538
+ from typing import List, Optional
539
+ from pydantic import BaseModel, Field
540
+ from langchain_openai import ChatOpenAI
541
+
542
+ class Person(BaseModel):
543
+ '''Information about a person.'''
544
+ name: Optional[str] = Field(..., description="The name of the person")
545
+ hair_color: Optional[str] = Field(
546
+ ..., description="The color of the person's hair if known"
547
+ )
548
+ height_in_meters: Optional[str] = Field(
549
+ ..., description="Height in METERs"
550
+ )
551
+
552
+ examples = [
553
+ (
554
+ "The ocean is vast and blue. It's more than 20,000 feet deep.",
555
+ Person(name=None, height_in_meters=None, hair_color=None),
556
+ ),
557
+ (
558
+ "Fiona traveled far from France to Spain.",
559
+ Person(name="Fiona", height_in_meters=None, hair_color=None),
560
+ ),
561
+ ]
562
+
563
+
564
+ messages = []
565
+
566
+ for txt, tool_call in examples:
567
+ messages.extend(
568
+ tool_example_to_messages(txt, [tool_call])
569
+ )
570
+ """
571
+ messages: list[BaseMessage] = [HumanMessage(content=input)]
572
+ openai_tool_calls = []
573
+ for tool_call in tool_calls:
574
+ openai_tool_calls.append(
575
+ {
576
+ "id": str(uuid.uuid4()),
577
+ "type": "function",
578
+ "function": {
579
+ # The name of the function right now corresponds to the name
580
+ # of the pydantic model. This is implicit in the API right now,
581
+ # and will be improved over time.
582
+ "name": tool_call.__class__.__name__,
583
+ "arguments": tool_call.model_dump_json(),
584
+ },
585
+ }
586
+ )
587
+ messages.append(
588
+ AIMessage(content="", additional_kwargs={"tool_calls": openai_tool_calls})
589
+ )
590
+ tool_outputs = tool_outputs or ["You have correctly called this tool."] * len(
591
+ openai_tool_calls
592
+ )
593
+ for output, tool_call_dict in zip(tool_outputs, openai_tool_calls):
594
+ messages.append(ToolMessage(content=output, tool_call_id=tool_call_dict["id"])) # type: ignore
595
+
596
+ if ai_response:
597
+ messages.append(AIMessage(content=ai_response))
598
+ return messages
599
+
600
+
601
+ def _parse_google_docstring(
602
+ docstring: Optional[str],
603
+ args: list[str],
604
+ *,
605
+ error_on_invalid_docstring: bool = False,
606
+ ) -> tuple[str, dict]:
607
+ """Parse the function and argument descriptions from the docstring of a function.
608
+
609
+ Assumes the function docstring follows Google Python style guide.
610
+ """
611
+ if docstring:
612
+ docstring_blocks = docstring.split("\n\n")
613
+ if error_on_invalid_docstring:
614
+ filtered_annotations = {
615
+ arg for arg in args if arg not in ("run_manager", "callbacks", "return")
616
+ }
617
+ if filtered_annotations and (
618
+ len(docstring_blocks) < 2
619
+ or not any(block.startswith("Args:") for block in docstring_blocks[1:])
620
+ ):
621
+ msg = "Found invalid Google-Style docstring."
622
+ raise ValueError(msg)
623
+ descriptors = []
624
+ args_block = None
625
+ past_descriptors = False
626
+ for block in docstring_blocks:
627
+ if block.startswith("Args:"):
628
+ args_block = block
629
+ break
630
+ elif block.startswith(("Returns:", "Example:")):
631
+ # Don't break in case Args come after
632
+ past_descriptors = True
633
+ elif not past_descriptors:
634
+ descriptors.append(block)
635
+ else:
636
+ continue
637
+ description = " ".join(descriptors)
638
+ else:
639
+ if error_on_invalid_docstring:
640
+ msg = "Found invalid Google-Style docstring."
641
+ raise ValueError(msg)
642
+ description = ""
643
+ args_block = None
644
+ arg_descriptions = {}
645
+ if args_block:
646
+ arg = None
647
+ for line in args_block.split("\n")[1:]:
648
+ if ":" in line:
649
+ arg, desc = line.split(":", maxsplit=1)
650
+ arg = arg.strip()
651
+ arg_name, _, _annotations = arg.partition(" ")
652
+ if _annotations.startswith("(") and _annotations.endswith(")"):
653
+ arg = arg_name
654
+ arg_descriptions[arg] = desc.strip()
655
+ elif arg:
656
+ arg_descriptions[arg] += " " + line.strip()
657
+ return description, arg_descriptions
658
+
659
+
660
+ def _py_38_safe_origin(origin: type) -> type:
661
+ origin_union_type_map: dict[type, Any] = (
662
+ {types.UnionType: Union} if hasattr(types, "UnionType") else {}
663
+ )
664
+
665
+ origin_map: dict[type, Any] = {
666
+ dict: dict,
667
+ list: list,
668
+ tuple: tuple,
669
+ set: set,
670
+ collections.abc.Iterable: typing.Iterable,
671
+ collections.abc.Mapping: typing.Mapping,
672
+ collections.abc.Sequence: typing.Sequence,
673
+ collections.abc.MutableMapping: typing.MutableMapping,
674
+ **origin_union_type_map,
675
+ }
676
+ return cast(type, origin_map.get(origin, origin))
677
+
678
+
679
+ def _recursive_set_additional_properties_false(
680
+ schema: dict[str, Any],
681
+ ) -> dict[str, Any]:
682
+ if isinstance(schema, dict):
683
+ # Check if 'required' is a key at the current level or if the schema is empty,
684
+ # in which case additionalProperties still needs to be specified.
685
+ if "required" in schema or (
686
+ "properties" in schema and not schema["properties"]
687
+ ):
688
+ schema["additionalProperties"] = False
689
+
690
+ # Recursively check 'properties' and 'items' if they exist
691
+ if "properties" in schema:
692
+ for value in schema["properties"].values():
693
+ _recursive_set_additional_properties_false(value)
694
+ if "items" in schema:
695
+ _recursive_set_additional_properties_false(schema["items"])
696
+
697
+ return schema
inspector.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import openai
2
+
3
+
4
+ class Inspector:
5
+
6
+ def __init__(self, api_key , model="gpt-3.5-turbo-1106", base_url=''):
7
+ self.client = openai.OpenAI(api_key=api_key, base_url=base_url)
8
+ self.model = model
9
+ self.messages = []
10
+ self.function_repository = {}
11
+
12
+ def add_functions(self, function_lib: dict) -> None:
13
+ self.function_repository = function_lib
14
+
15
+ def _call_chat_model(self, functions=None, include_functions=False):
16
+ params = {
17
+ "model": self.model,
18
+ "messages": self.messages,
19
+ }
20
+
21
+ if include_functions:
22
+ params['functions'] = functions
23
+ params['function_call'] = "auto"
24
+
25
+ try:
26
+ return self.client.chat.completions.create(**params)
27
+ except Exception as e:
28
+ print(f"Error calling chat model: {e}")
29
+ return None
30
+
31
+ def run(self, function_lib=None):
32
+ try:
33
+ if function_lib is None:
34
+ response = self._call_chat_model()
35
+ final_response = response.choices[0].message.content
36
+ return final_response
37
+
38
+ except Exception as e:
39
+ print(f"An error occurred: {e}")
40
+ return None
41
+
42
+ def clear(self):
43
+ self.messages = []
44
+ self.function_repository = {}
kernel.py ADDED
@@ -0,0 +1,337 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import queue
2
+ # import streamlit as st
3
+ import base64
4
+ from io import BytesIO
5
+ import re
6
+ import os
7
+ import jupyter_client
8
+ from PIL import Image
9
+ from subprocess import PIPE
10
+ from pprint import pprint
11
+ import nbformat as nbf
12
+
13
+ IPYKERNEL = os.environ.get('IPYKERNEL', 'dsagent')
14
+
15
+ # client = get_client()
16
+
17
+
18
+ class CodeKernel(object):
19
+ def __init__(self,
20
+ kernel_name='kernel',
21
+ kernel_id=None,
22
+ kernel_config_path="",
23
+ python_path=None,
24
+ ipython_path=None,
25
+ init_file_path="./startup.py",
26
+ verbose=1,
27
+ max_exe_time=6000):
28
+
29
+ self.kernel_name = kernel_name
30
+ self.kernel_id = kernel_id
31
+ self.kernel_config_path = kernel_config_path
32
+ self.python_path = python_path
33
+ self.ipython_path = ipython_path
34
+ self.init_file_path = init_file_path
35
+ self.executed_cells = []
36
+ self.verbose = verbose
37
+ self.interrupt_signal = False
38
+ self.max_exe_time = max_exe_time
39
+
40
+
41
+ if python_path is None and ipython_path is None:
42
+ env = None
43
+ else:
44
+ env = {"PATH": self.python_path + ":$PATH", "PYTHONPATH": self.python_path}
45
+
46
+ # Initialize the backend kernel
47
+ self.kernel_manager = jupyter_client.KernelManager(kernel_name=IPYKERNEL,
48
+ connection_file=self.kernel_config_path,
49
+ exec_files=[self.init_file_path],
50
+ env=env)
51
+ if self.kernel_config_path:
52
+ self.kernel_manager.load_connection_file()
53
+ self.kernel_manager.start_kernel(stdout=PIPE, stderr=PIPE)
54
+ print("Backend kernel started with the configuration: {}".format(
55
+ self.kernel_config_path))
56
+ else:
57
+ self.kernel_manager.start_kernel(stdout=PIPE, stderr=PIPE)
58
+ print("Backend kernel started with the configuration: {}".format(
59
+ self.kernel_manager.connection_file))
60
+
61
+ if verbose:
62
+ pprint(self.kernel_manager.get_connection_info())
63
+
64
+ # Initialize the code kernel
65
+ self.kernel = self.kernel_manager.blocking_client()
66
+ # self.kernel.load_connection_file()
67
+ self.kernel.start_channels()
68
+ print("Code kernel started.")
69
+
70
+ def execute(self, code):
71
+ self.kernel.execute(code)
72
+ try:
73
+ shell_msg = self.kernel.get_shell_msg(timeout=self.max_exe_time)
74
+ io_msg_content = self.kernel.get_iopub_msg(timeout=self.max_exe_time)['content']
75
+ msg_out_list = []
76
+ while True:
77
+ msg_out = io_msg_content
78
+ if 'name' in msg_out and msg_out['name'] != 'stderr':
79
+ msg_out_list.append(msg_out)
80
+ elif 'data' in msg_out and 'image/png' in msg_out['data']:
81
+ msg_out_list.append(msg_out)
82
+ ### Poll the message
83
+ try:
84
+ io_msg_content = self.kernel.get_iopub_msg(timeout=self.max_exe_time)['content']
85
+ if 'execution_state' in io_msg_content and io_msg_content['execution_state'] == 'idle':
86
+ msg_out_list.append(msg_out)
87
+ break
88
+ except queue.Empty:
89
+ break
90
+ # msg_out = '\n'.join(msg_out_list)
91
+ return shell_msg, msg_out_list
92
+ except Exception as e:
93
+ print(e)
94
+ return None
95
+
96
+ def execute_code_(self, code):
97
+ msg_id = self.kernel.execute(code)
98
+
99
+ # Get the output of the code
100
+ msg_list = []
101
+ while True:
102
+ try:
103
+ iopub_msg = self.kernel.get_iopub_msg(timeout=self.max_exe_time)
104
+ msg_list.append(iopub_msg)
105
+ if iopub_msg['msg_type'] == 'status' and iopub_msg['content'].get('execution_state') == 'idle':
106
+ break
107
+ except:
108
+ if self.interrupt_signal:
109
+ self.kernel_manager.interrupt_kernel()
110
+ self.interrupt_signal = False
111
+ continue
112
+
113
+ all_output = []
114
+ for iopub_msg in msg_list:
115
+ if iopub_msg['msg_type'] == 'stream':
116
+ if iopub_msg['content'].get('name') == 'stdout':
117
+ output = iopub_msg['content']['text']
118
+ all_output.append(('stdout', output))
119
+ elif iopub_msg['msg_type'] == 'execute_result':
120
+ if 'data' in iopub_msg['content']:
121
+ if 'text/plain' in iopub_msg['content']['data']:
122
+ output = iopub_msg['content']['data']['text/plain']
123
+ all_output.append(('execute_result_text', output))
124
+ if 'text/html' in iopub_msg['content']['data']:
125
+ output = iopub_msg['content']['data']['text/html']
126
+ all_output.append(('execute_result_html', output))
127
+ if 'image/png' in iopub_msg['content']['data']:
128
+ output = iopub_msg['content']['data']['image/png']
129
+ all_output.append(('execute_result_png', output))
130
+ if 'image/jpeg' in iopub_msg['content']['data']:
131
+ output = iopub_msg['content']['data']['image/jpeg']
132
+ all_output.append(('execute_result_jpeg', output))
133
+ elif iopub_msg['msg_type'] == 'display_data':
134
+ if 'data' in iopub_msg['content']:
135
+ if 'text/plain' in iopub_msg['content']['data']:
136
+ output = iopub_msg['content']['data']['text/plain']
137
+ all_output.append(('display_text', output))
138
+ if 'text/html' in iopub_msg['content']['data']:
139
+ output = iopub_msg['content']['data']['text/html']
140
+ all_output.append(('display_html', output))
141
+ if 'image/png' in iopub_msg['content']['data']:
142
+ output = iopub_msg['content']['data']['image/png']
143
+ all_output.append(('display_png', output))
144
+ if 'image/jpeg' in iopub_msg['content']['data']:
145
+ output = iopub_msg['content']['data']['image/jpeg']
146
+ all_output.append(('display_jpeg', output))
147
+ elif iopub_msg['msg_type'] == 'error':
148
+ if 'traceback' in iopub_msg['content']:
149
+ output = '\n'.join(iopub_msg['content']['traceback'])
150
+ all_output.append(('error', output))
151
+
152
+ return all_output
153
+
154
+
155
+ def export(self, file_path):
156
+ nb = nbf.v4.new_notebook()
157
+ nb.cells = self.executed_cells
158
+ print("cell: ",nb.cells)
159
+ with open(file_path, 'w', encoding='utf-8') as f:
160
+ nbf.write(nb, f)
161
+ print(f"Notebook exported to {file_path}")
162
+
163
+ def execute_interactive(self, code, verbose=False):
164
+ shell_msg = self.kernel.execute_interactive(code)
165
+ if shell_msg is queue.Empty:
166
+ if verbose:
167
+ print("Timeout waiting for shell message.")
168
+ self.check_msg(shell_msg, verbose=verbose)
169
+
170
+ return shell_msg
171
+
172
+ def inspect(self, code, verbose=False):
173
+ msg_id = self.kernel.inspect(code)
174
+ shell_msg = self.kernel.get_shell_msg(timeout=30)
175
+ if shell_msg is queue.Empty:
176
+ if verbose:
177
+ print("Timeout waiting for shell message.")
178
+ self.check_msg(shell_msg, verbose=verbose)
179
+
180
+ return shell_msg
181
+
182
+ def get_error_msg(self, msg, verbose=False) -> str | None:
183
+ if msg['content']['status'] == 'error':
184
+ try:
185
+ error_msg = msg['content']['traceback']
186
+ except:
187
+ try:
188
+ error_msg = msg['content']['traceback'][-1].strip()
189
+ except:
190
+ error_msg = "Traceback Error"
191
+ if verbose:
192
+ print("Error: ", error_msg)
193
+ return error_msg
194
+ return None
195
+
196
+ def check_msg(self, msg, verbose=False):
197
+ status = msg['content']['status']
198
+ if status == 'ok':
199
+ if verbose:
200
+ print("Execution succeeded.")
201
+ elif status == 'error':
202
+ for line in msg['content']['traceback']:
203
+ if verbose:
204
+ print(line)
205
+
206
+ def shutdown(self):
207
+ # Shutdown the backend kernel
208
+ self.kernel_manager.shutdown_kernel(now=True)
209
+ print("Backend kernel shutdown.")
210
+ # Shutdown the code kernel
211
+ self.kernel.shutdown()
212
+ print("Code kernel shutdown.")
213
+
214
+ def restart(self):
215
+ # Restart the backend kernel
216
+ self.kernel_manager.restart_kernel()
217
+ print("Backend kernel restarted.")
218
+
219
+ def start(self):
220
+ # Initialize the code kernel
221
+ self.kernel = self.kernel_manager.blocking_client()
222
+ # self.kernel.load_connection_file()
223
+ self.kernel.start_channels()
224
+ print("Code kernel started.")
225
+
226
+ def interrupt(self):
227
+ # Interrupt the backend kernel
228
+ self.kernel_manager.interrupt_kernel()
229
+ print("Backend kernel interrupted.")
230
+
231
+ def is_alive(self):
232
+ return self.kernel.is_alive()
233
+
234
+
235
+ def b64_2_img(data):
236
+ buff = BytesIO(base64.b64decode(data))
237
+ return Image.open(buff)
238
+
239
+
240
+ def clean_ansi_codes(input_string):
241
+ ansi_escape = re.compile(r'(\x9B|\x1B\[|\u001b\[)[0-?]*[ -/]*[@-~]')
242
+ return ansi_escape.sub('', input_string)
243
+
244
+
245
+ def execute(code, kernel: CodeKernel) -> tuple[str, str | Image.Image]:
246
+ res = ""
247
+ res_type = None
248
+ # code = code.replace("<|observation|>", "")
249
+ # code = code.replace("<|assistant|>interpreter", "")
250
+ # code = code.replace("<|assistant|>", "")
251
+ # code = code.replace("<|user|>", "")
252
+ # code = code.replace("<|system|>", "")
253
+ msg, output_list = kernel.execute(code)
254
+
255
+ if msg['metadata']['status'] == "timeout":
256
+ return res_type, 'Timed out'
257
+ elif msg['metadata']['status'] == 'error':
258
+ return res_type, clean_ansi_codes('\n'.join(kernel.get_error_msg(msg, verbose=True))) #todo: change the res_type to error
259
+
260
+ if len(output_list) > 1:
261
+ res_list = []
262
+ for output in output_list:
263
+ if 'text' in output:
264
+ res_type = "text"
265
+ res = output['text']
266
+ elif 'data' in output:
267
+ for key in output['data']:
268
+ if 'text/plain' in key:
269
+ res_type = "text"
270
+ res = output['data'][key]
271
+ elif 'image/png' in key:
272
+ res_type = "image"
273
+ res = output['data'][key]
274
+ break
275
+
276
+ if res_type == "image":
277
+ #return res_type, b64_2_img(res)
278
+ res_list.append(b64_2_img(res))
279
+ elif res_type == "text" or res_type == "traceback": #traceback is error?
280
+ #res = res
281
+ res_list.append(res)
282
+ if len(res_list) > 1 and res_list[-1] == res_list[-2]:
283
+ res_list.pop()
284
+
285
+ res = '\n'.join(res_list)
286
+ return res_type, res
287
+ else:
288
+ output = output_list[0]
289
+ if 'text' in output: #output is a dict, what situation is 'text' in output?
290
+ res_type = "text"
291
+ res = output['text']
292
+ elif 'data' in output:
293
+ for key in output['data']:
294
+ if 'text/plain' in key: #key = 'text/plain'
295
+ res_type = "text"
296
+ res = output['data'][key]
297
+ elif 'image/png' in key: # image will be present as html file?
298
+ res_type = "image"
299
+ res = output['data'][key]
300
+ break
301
+
302
+ if res_type == "image":
303
+ return res_type, b64_2_img(res)
304
+ elif res_type == "text" or res_type == "traceback": # traceback is error?
305
+ res = res
306
+ return res_type, res
307
+
308
+
309
+ # @st.cache_resource
310
+ def get_kernel():
311
+ kernel = CodeKernel()
312
+ return kernel
313
+
314
+
315
+ if __name__ == '__main__':
316
+ kernel = CodeKernel()
317
+ table_code = """
318
+ import pandas as pd
319
+ data = pd.read_csv('/Users/stephensun/Desktop/pypro/LAMBDA/cache/conv_cache/c1829f26-f6b3-4cbc-96a7-88aef2c33df2-2024-08-13/wine.csv')
320
+ data.head()
321
+ """
322
+ img_code = """
323
+ import matplotlib.pyplot as plt
324
+ x = [1, 2, 3, 4, 5]
325
+ y = [2, 3, 5, 7, 6]
326
+
327
+ plt.plot(x, y)
328
+
329
+ plt.title('Simple Line Plot')
330
+ plt.xlabel('X-axis')
331
+ plt.ylabel('Y-axis')
332
+ plt.show()
333
+ """
334
+ res_type, res = execute(img_code, kernel)
335
+ print(res_type,res)
336
+ # kernel.export("my_notebook.ipynb")
337
+
knowledge_integration/N_correlation_matrix.py ADDED
@@ -0,0 +1,510 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ################# This Python code is designed by Yancheng Yuan at National University of Singapore to solve ##
2
+ ################# min 0.5*<X-Sigma, X-Sigma>
3
+ ################# s.t. X_ii =b_i, i=1,2,...,n
4
+ ################# X>=tau*I (symmetric and positive semi-definite) ########
5
+ #################
6
+ ################# based on the algorithm in #################
7
+ ################# ``A Quadratically Convergent Newton Method fo r#################
8
+ ################# Computing the Nearest Correlation Matrix #################
9
+ ################# By Houduo Qi and Defeng Sun #################
10
+ ################# SIAM J. Matrix Anal. Appl. 28 (2006) 360--385.
11
+ #################
12
+ ################# Last modified date: March 19, 2016 #################
13
+ ################# #################
14
+ ################# The input arguments Sigma, b>0, tau>=0, and tol (tolerance error) #################
15
+ ################# #################
16
+ ################# #################
17
+ ################# For the correlation matrix problem, set b = np.ones((n,1)) #################
18
+ ################# #################
19
+ ################# For a positive definite matrix #################
20
+ ################# set tau = 1.0e-5 for example #################
21
+ ################# set tol = 1.0e-6 or lower if no very high accuracy required #################
22
+ ################# If the algorithm terminates with the given tol, then it means ##################
23
+ ################# the relative gap between the objective function value of the obtained solution ###
24
+ ################# and the objective function value of the global solution is no more than tol or ###
25
+ ################# the violation of the feasibility is smaller than tol*(1+np.linalg.norm(b)) ############
26
+ #################
27
+ ################# The outputs include the optimal primal solution, the dual solution and others ########
28
+ ################# Diagonal Preconditioner is used #################
29
+ #################
30
+ ################# Send your comments to [email protected] or [email protected] #################
31
+ #################
32
+ ################# Warning: Though the code works extremely well, it is your call to use it or not. #################
33
+
34
+ import numpy as np
35
+ import scipy.io as sio
36
+ import scipy
37
+ import sys
38
+ import time
39
+
40
+
41
+ # Generate F(y) Compute the gradient
42
+
43
+
44
+ def my_gradient(y_input, lamb, p_input, b_0, n):
45
+ f = 0.0
46
+ Fy = np.zeros((n, 1))
47
+ p_input_copy = (p_input.copy()).transpose()
48
+ for i in range(0, n):
49
+ p_input_copy[i, :] = ((np.maximum(lamb[i], 0).astype(float)) ** 0.5) * p_input_copy[i, :]
50
+
51
+ for i in range(0, n):
52
+ Fy[i] = np.sum(p_input_copy[:, i] * p_input_copy[:, i])
53
+
54
+ for i in range(0, n):
55
+ f = f + np.square((np.maximum(lamb[i], 0)))
56
+
57
+ f = 0.5 * f - np.dot(b_0.transpose(), y_input)
58
+
59
+ return f, Fy
60
+
61
+
62
+ # use PCA to generate a primal feasible solution checked
63
+
64
+ def my_pca(x_input, lamb, p_input, b_0, n):
65
+ x_pca = x_input
66
+ lamb = np.asarray(lamb)
67
+ lp = lamb > 0
68
+ r = lamb[lp].size
69
+ if r == 0:
70
+ x_pca = np.zeros((n, n))
71
+ elif r == n:
72
+ x_pca = x_input
73
+ elif r < (n / 2.0):
74
+ lamb1 = lamb[lp].copy()
75
+ lamb1 = lamb1.transpose()
76
+ lamb1 = np.sqrt(lamb1.astype(float))
77
+ P1 = p_input[:, 0:r].copy()
78
+ if r > 1:
79
+ P1 = np.dot(P1, np.diagflat(lamb1))
80
+ x_pca = np.dot(P1, P1.transpose())
81
+ else:
82
+ x_pca = np.dot(np.dot(np.square(lamb1), P1), P1.transpose())
83
+
84
+ else:
85
+ lamb2 = -lamb[r:n].copy()
86
+ lamb2 = np.sqrt(lamb2.astype(float))
87
+ p_2 = p_input[:, r:n]
88
+ p_2 = np.dot(p_2, np.diagflat(lamb2))
89
+ x_pca = x_pca + np.dot(p_2, p_2.transpose())
90
+
91
+ # To make x_pca positive semidefinite with diagonal elements exactly b0
92
+ d = x_pca.diagonal()
93
+ d = d.reshape((d.size, 1))
94
+ d = np.maximum(d, b_0.reshape(d.shape))
95
+ x_pca = x_pca - np.diagflat(x_pca.diagonal()) + np.diagflat(d)
96
+ d = d.astype(float) ** (-0.5)
97
+ d = d * ((np.sqrt(b_0.astype(float))).reshape(d.shape))
98
+ x_pca = x_pca * (np.dot(d, d.reshape(1, d.size)))
99
+
100
+ return x_pca
101
+
102
+
103
+ # end of PCA
104
+
105
+ # To generate the first order difference of lambda
106
+ # To generate the first order essential part of d
107
+
108
+
109
+ def my_omega_mat(p_input, lamb, n):
110
+ idx_idp = np.where(lamb > 0)
111
+ idx_idp = idx_idp[0]
112
+ idx_idm = np.setdiff1d(range(0, n), idx_idp)
113
+ n = lamb.size
114
+ r = idx_idp.size
115
+ if r > 0:
116
+ if r == n:
117
+ omega_12 = np.ones((n, n))
118
+ else:
119
+ s = n - r
120
+ dp = lamb[0:r].copy()
121
+ dp = dp.reshape(dp.size, 1)
122
+ dn = lamb[r:n].copy()
123
+ dn = dn.reshape((dn.size, 1))
124
+ omega_12 = np.dot(dp, np.ones((1, s)))
125
+ omega_12 = omega_12 / (np.dot(np.abs(dp), np.ones((1, s))) + np.dot(np.ones((r, 1)), abs(dn.transpose())))
126
+ omega_12 = omega_12.reshape((r, s))
127
+
128
+ else:
129
+ omega_12 = np.array([])
130
+
131
+ return omega_12
132
+
133
+
134
+ # End of my_omega_mat
135
+
136
+
137
+ # To generate Jacobian
138
+
139
+
140
+ def my_jacobian_matrix(x, omega_12, p_input, n):
141
+ x_result = np.zeros((n, 1))
142
+ [r, s] = omega_12.shape
143
+ if r > 0:
144
+ hmat_1 = p_input[:, 0:r].copy()
145
+ if r < n / 2.0:
146
+ i = 0
147
+ while i < n:
148
+ hmat_1[i, :] = x[i] * hmat_1[i, :]
149
+ i = i + 1
150
+
151
+ omega_12 = omega_12 * (np.dot(hmat_1.transpose(), p_input[:, r:n]))
152
+ hmat = np.dot(hmat_1.transpose(), np.dot(p_input[:, 0:r], p_input[:, 0:r].transpose()))
153
+ hmat = hmat + np.dot(omega_12, p_input[:, r:n].transpose())
154
+ hmat = np.vstack((hmat, np.dot(omega_12.transpose(), p_input[:, 0:r].transpose())))
155
+ i = 0
156
+ while i < n:
157
+ x_result[i] = np.dot(p_input[i, :], hmat[:, i])
158
+ x_result[i] = x_result[i] + 1.0e-10 * x[i]
159
+ i = i + 1
160
+
161
+ else:
162
+ if r == n:
163
+ x_result = 1.0e-10 * x
164
+ else:
165
+ hmat_2 = p_input[:, r:n].copy()
166
+ i = 0
167
+ while i < n:
168
+ hmat_2[i, :] = x[i] * hmat_2[i, :]
169
+ i = i + 1
170
+
171
+ omega_12 = np.ones((r, s)) - omega_12
172
+ omega_12 = omega_12 * (np.dot(p_input[:, 0:r].transpose(), hmat_2))
173
+ hmat = np.dot(p_input[:, r:n].transpose(), hmat_2)
174
+ hmat = np.dot(hmat, p_input[:, r:n].transpose())
175
+ hmat = hmat + np.dot(omega_12.transpose(), p_input[:, 0:r].transpose())
176
+ hmat = np.vstack((np.dot(omega_12, p_input[:, r:n].transpose()), hmat))
177
+ i = 0
178
+ while i < n:
179
+ x_result[i] = np.dot(-p_input[i, :], hmat[:, i])
180
+ x_result[i] = x[i] + x_result[i] + 1.0e-10 * x[i]
181
+ i = i + 1
182
+
183
+ return x_result
184
+
185
+
186
+ # end of Jacobian
187
+ # PCG Method
188
+
189
+
190
+ def my_pre_cg(b, tol, maxit, c, omega_12, p_input, n):
191
+ # Initializations
192
+ r = b.copy()
193
+ r = r.reshape(r.size, 1)
194
+ c = c.reshape(c.size, 1)
195
+ n2b = np.linalg.norm(b)
196
+ tolb = tol * n2b
197
+ p = np.zeros((n, 1))
198
+ flag = 1
199
+ iterk = 0
200
+ relres = 1000
201
+ # Precondition
202
+ z = r / c
203
+ rz_1 = np.dot(r.transpose(), z)
204
+ rz_2 = 1
205
+ d = z.copy()
206
+ # d = d.reshape(z.shape)
207
+ # CG Iteration
208
+ for k in range(0, maxit):
209
+ if k > 0:
210
+ beta = rz_1 / rz_2
211
+ d = z + beta * d
212
+
213
+ w = my_jacobian_matrix(d, omega_12, p_input, n)
214
+ denom = np.dot(d.transpose(), w)
215
+ iterk = k + 1
216
+ relres = np.linalg.norm(r) / n2b
217
+ if denom <= 0:
218
+ ss = 0 # don't know the usage, check the paper
219
+ p = d / np.linalg.norm(d)
220
+ break
221
+ else:
222
+ alpha = rz_1 / denom
223
+ p = p + alpha * d
224
+ r = r - alpha * w
225
+
226
+ z = r / c
227
+ if np.linalg.norm(r) <= tolb: # exit if hmat p = b solved in relative error tolerance
228
+ iterk = k + 1
229
+ relres = np.linalg.norm(r) / n2b
230
+ flag = 0
231
+ break
232
+
233
+ rz_2 = rz_1
234
+ rz_1 = np.dot(r.transpose(), z)
235
+
236
+ return p, flag, relres, iterk
237
+
238
+
239
+ # end of pre_cg
240
+
241
+ # to generate the diagonal preconditioner
242
+
243
+
244
+ def my_precond_matrix(omega_12, p_input, n):
245
+ [r, s] = omega_12.shape
246
+ c = np.ones((n, 1))
247
+ if r > 0:
248
+ if r < n / 2.0:
249
+ hmat = (p_input.copy()).transpose()
250
+ hmat = hmat * hmat
251
+ hmat_12 = np.dot(hmat[0:r, :].transpose(), omega_12)
252
+ d = np.ones((r, 1))
253
+ for i in range(0, n):
254
+ c_temp = np.dot(d.transpose(), hmat[0:r, i])
255
+ c_temp = c_temp * hmat[0:r, i]
256
+ c[i] = np.sum(c_temp)
257
+ c[i] = c[i] + 2.0 * np.dot(hmat_12[i, :], hmat[r:n, i])
258
+ if c[i] < 1.0e-8:
259
+ c[i] = 1.0e-8
260
+
261
+ else:
262
+ if r < n:
263
+ hmat = (p_input.copy()).transpose()
264
+ hmat = hmat * hmat
265
+ omega_12 = np.ones((r, s)) - omega_12
266
+ hmat_12 = np.dot(omega_12, hmat[r:n, :])
267
+ d = np.ones((s, 1))
268
+ dd = np.ones((n, 1))
269
+
270
+ for i in range(0, n):
271
+ c_temp = np.dot(d.transpose(), hmat[r:n, i])
272
+ c[i] = np.sum(c_temp * hmat[r:n, i])
273
+ c[i] = c[i] + 2.0 * np.dot(hmat[0:r, i].transpose(), hmat_12[:, i])
274
+ alpha = np.sum(hmat[:, i])
275
+ c[i] = alpha * np.dot(hmat[:, i].transpose(), dd) - c[i]
276
+ if c[i] < 1.0e-8:
277
+ c[i] = 1.0e-8
278
+
279
+ return c
280
+
281
+
282
+ # end of precond_matrix
283
+
284
+
285
+ # my_issorted()
286
+
287
+ def my_issorted(x_input, flag):
288
+ n = x_input.size
289
+ tf_value = False
290
+ if n < 2:
291
+ tf_value = True
292
+ else:
293
+ if flag == 1:
294
+ i = 0
295
+ while i < n - 1:
296
+ if x_input[i] <= x_input[i + 1]:
297
+ i = i + 1
298
+ else:
299
+ break
300
+
301
+ if i == n - 1:
302
+ tf_value = True
303
+ elif i < n - 1:
304
+ tf_value = False
305
+
306
+ elif flag == -1:
307
+ i = n - 1
308
+ while i > 0:
309
+ if x_input[i] <= x_input[i - 1]:
310
+ i = i - 1
311
+ else:
312
+ break
313
+
314
+ if i == 0:
315
+ tf_value = True
316
+ elif i > 0:
317
+ tf_value = False
318
+
319
+ return tf_value
320
+
321
+
322
+ # end of my_issorted()
323
+
324
+
325
+ def my_mexeig(x_input):
326
+ [n, m] = x_input.shape
327
+ [lamb, p_x] = np.linalg.eigh(x_input)
328
+ # lamb = lamb.reshape((lamb.size, 1))
329
+ p_x = p_x.real
330
+ lamb = lamb.real
331
+ if my_issorted(lamb, 1):
332
+ lamb = lamb[::-1]
333
+ p_x = np.fliplr(p_x)
334
+ elif my_issorted(lamb, -1):
335
+ return p_x, lamb
336
+ else:
337
+ idx = np.argsort(-lamb)
338
+ # lamb_old = lamb # add for debug
339
+ lamb = lamb[idx]
340
+ # p_x_old = p_x add for debug
341
+ p_x = p_x[:, idx]
342
+
343
+ lamb = lamb.reshape((n, 1))
344
+ p_x = p_x.reshape((n, n))
345
+
346
+ return p_x, lamb
347
+
348
+
349
+ # end of my_mymexeig()
350
+
351
+
352
+ # begin of the main function
353
+
354
+
355
+ def my_correlationmatrix(g_input, b_input=None, tau=None, tol=None):
356
+ print('-- Semismooth Newton-CG method starts -- \n')
357
+ [n, m] = g_input.shape
358
+ g_input = g_input.copy()
359
+ t0 = time.time() # time start
360
+ g_input = (g_input + g_input.transpose()) / 2.0
361
+ b_g = np.ones((n, 1))
362
+ error_tol = 1.0e-6
363
+ if b_input is None:
364
+ tau = 0
365
+ elif tau is None:
366
+ b_g = b_input.copy()
367
+ tau = 0
368
+ elif tol is None:
369
+ b_g = b_input.copy() - tau * np.ones((n, 1))
370
+ g_input = g_input - tau * np.eye(n, n)
371
+ else:
372
+ b_g = b_input.copy() - tau * np.ones((n, 1))
373
+ g_input = g_input - tau * np.eye(n, n)
374
+ error_tol = np.maximum(1.0e-12, tol)
375
+
376
+ res_b = np.zeros((300, 1))
377
+ norm_b0 = np.linalg.norm(b_g)
378
+ y = np.zeros((n, 1))
379
+ f_y = np.zeros((n, 1))
380
+ k = 0
381
+ f_eval = 0
382
+ iter_whole = 200
383
+ iter_inner = 20 # maximum number of line search in Newton method
384
+ maxit = 200 # maximum number of iterations in PCG
385
+ iterk = 0
386
+ inner = 0
387
+ tol_cg = 1.0e-2 # relative accuracy for CGs
388
+ sigma_1 = 1.0e-4
389
+ x0 = y.copy()
390
+ prec_time = 0
391
+ pcg_time = 0
392
+ eig_time = 0
393
+ c = np.ones((n, 1))
394
+ d = np.zeros((n, 1))
395
+ val_g = np.sum((g_input.astype(float)) * (g_input.astype(float)))
396
+ val_g = val_g * 0.5
397
+ x_result = g_input + np.diagflat(y)
398
+ x_result = (x_result + x_result.transpose()) / 2.0
399
+ eig_time0 = time.time()
400
+ [p_x, lamb] = my_mexeig(x_result)
401
+ eig_time = eig_time + (time.time() - eig_time0)
402
+ [f_0, f_y] = my_gradient(y, lamb, p_x, b_g, n)
403
+ initial_f = val_g - f_0
404
+ x_result = my_pca(x_result, lamb, p_x, b_g, n)
405
+ val_obj = np.sum(((x_result - g_input) * (x_result - g_input))) / 2.0
406
+ gap = (val_obj - initial_f) / (1.0 + np.abs(initial_f) + np.abs(val_obj))
407
+ f = f_0.copy()
408
+ f_eval = f_eval + 1
409
+ b_input = b_g - f_y
410
+ norm_b = np.linalg.norm(b_input)
411
+ time_used = time.time() - t0
412
+
413
+ print('Newton-CG: Initial Dual objective function value: %s \n' % initial_f)
414
+ print('Newton-CG: Initial Primal objective function value: %s \n' % val_obj)
415
+ print('Newton-CG: Norm of Gradient: %s \n' % norm_b)
416
+ print('Newton-CG: computing time used so far: %s \n' % time_used)
417
+
418
+ omega_12 = my_omega_mat(p_x, lamb, n)
419
+ x0 = y.copy()
420
+
421
+ while np.abs(gap) > error_tol and norm_b / (1 + norm_b0) > error_tol and k < iter_whole:
422
+ prec_time0 = time.time()
423
+ c = my_precond_matrix(omega_12, p_x, n)
424
+ prec_time = prec_time + (time.time() - prec_time0)
425
+
426
+ pcg_time0 = time.time()
427
+ [d, flag, relres, iterk] = my_pre_cg(b_input, tol_cg, maxit, c, omega_12, p_x, n)
428
+ pcg_time = pcg_time + (time.time() - pcg_time0)
429
+ print('Newton-CG: Number of CG Iterations=== %s \n' % iterk)
430
+ if flag == 1:
431
+ print('=== Not a completed Newton-CG step === \n')
432
+
433
+ slope = np.dot((f_y - b_g).transpose(), d)
434
+
435
+ y = (x0 + d).copy()
436
+ x_result = g_input + np.diagflat(y)
437
+ x_result = (x_result + x_result.transpose()) / 2.0
438
+ eig_time0 = time.time()
439
+ [p_x, lamb] = my_mexeig(x_result)
440
+ eig_time = eig_time + (time.time() - eig_time0)
441
+ [f, f_y] = my_gradient(y, lamb, p_x, b_g, n)
442
+
443
+ k_inner = 0
444
+ while k_inner <= iter_inner and f > f_0 + sigma_1 * (np.power(0.5, k_inner)) * slope + 1.0e-6:
445
+ k_inner = k_inner + 1
446
+ y = x0 + (np.power(0.5, k_inner)) * d
447
+ x_result = g_input + np.diagflat(y)
448
+ x_result = (x_result + x_result.transpose()) / 2.0
449
+ eig_time0 = time.time()
450
+ [p_x, lamb] = my_mexeig(x_result)
451
+ eig_time = eig_time + (time.time() - eig_time0)
452
+ [f, f_y] = my_gradient(y, lamb, p_x, b_g, n)
453
+
454
+ f_eval = f_eval + k_inner + 1
455
+ x0 = y.copy()
456
+ f_0 = f.copy()
457
+ val_dual = val_g - f_0
458
+ x_result = my_pca(x_result, lamb, p_x, b_g, n)
459
+ val_obj = np.sum((x_result - g_input) * (x_result - g_input)) / 2.0
460
+ gap = (val_obj - val_dual) / (1 + np.abs(val_dual) + np.abs(val_obj))
461
+ print('Newton-CG: The relative duality gap: %s \n' % gap)
462
+ print('Newton-CG: The Dual objective function value: %s \n' % val_dual)
463
+ print('Newton-CG: The Primal objective function value: %s \n' % val_obj)
464
+
465
+ b_input = b_g - f_y
466
+ norm_b = np.linalg.norm(b_input)
467
+ time_used = time.time() - t0
468
+ rel_norm_b = norm_b / (1 + norm_b0)
469
+ print('Newton-CG: Norm of Gradient: %s \n' % norm_b)
470
+ print('Newton-CG: Norm of Relative Gradient: %s \n' % rel_norm_b)
471
+ print('Newton-CG: Computing time used so for %s \n' % time_used)
472
+ res_b[k] = norm_b
473
+ k = k + 1
474
+ omega_12 = my_omega_mat(p_x, lamb, n)
475
+
476
+ position_rank = np.maximum(lamb, 0) > 0
477
+ rank_x = (np.maximum(lamb, 0)[position_rank]).size
478
+ final_f = val_g - f
479
+ x_result = x_result + tau * (np.eye(n))
480
+ time_used = time.time() - t0
481
+ print('\n')
482
+
483
+ print('Newton-CG: Number of iterations: %s \n' % k)
484
+ print('Newton-CG: Number of Funtion Evaluation: =========== %s\n' % f_eval)
485
+ print('Newton-CG: Final Dual Objective Function value: ========= %s\n' % final_f)
486
+ print('Newton-CG: Final Primal Objective Function value: ======= %s \n' % val_obj)
487
+ print('Newton-CG: The final relative duality gap: %s \n' % gap)
488
+ print('Newton-CG: The rank of the Optimal Solution - tau*I: %s \n' % rank_x)
489
+ print('Newton-CG: computing time for computing preconditioners: %s \n' % prec_time)
490
+ print('Newton-CG: computing time for linear system solving (cgs time): %s \n' % pcg_time)
491
+ print('Newton-CG: computing time for eigenvalue decompositions: =============== %s \n' % eig_time)
492
+ print('Newton-CG: computing time used for equal weight calibration ============ %s \n' % time_used)
493
+
494
+ return x_result, y
495
+
496
+
497
+ # end of the main function
498
+
499
+
500
+ # test
501
+ n = 3000
502
+ data_g_test = scipy.randn(n, n)
503
+ data_g_test = (data_g_test + data_g_test.transpose()) / 2.0
504
+ data_g_test = data_g_test - np.diag(np.diag(data_g_test)) + np.eye(n)
505
+ b = np.ones((n, 1))
506
+ tau = 0
507
+ tol = 1.0e-6
508
+ [x_test_result, y_test_result] = my_correlationmatrix(data_g_test, b, tau, tol)
509
+ print(x_test_result)
510
+ print(y_test_result)
knowledge_integration/knowledge/nn_networks/models.py ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import math
2
+ from torch import nn
3
+ from torch.nn.utils.parametrizations import spectral_norm
4
+
5
+
6
+ def initialize_weights(tensor):
7
+ return tensor.uniform_() * math.sqrt(0.25 / (tensor.shape[0] + tensor.shape[1]))
8
+
9
+
10
+ class _RRAutoencoder(nn.Module):
11
+ def __init__(self):
12
+ super().__init__()
13
+ self.linear_1 = nn.Linear(784, 200)
14
+ self.linear_2 = nn.Linear(200, 784)
15
+ self.encoder = self.linear_1
16
+ self.decoder = self.linear_2
17
+
18
+ def forward(self, x):
19
+ x = self.encoder(x)
20
+ x = self.decoder(x)
21
+
22
+ return x
23
+
24
+ def clamp(self):
25
+ pass
26
+
27
+
28
+ class _NNAutoencoder(_RRAutoencoder):
29
+ def __init__(self):
30
+ super().__init__()
31
+ self.linear_1.bias.data.zero_()
32
+ self.linear_2.bias.data.zero_()
33
+ self.linear_1.weight = nn.Parameter(
34
+ initialize_weights(self.linear_1.weight.data)
35
+ )
36
+ self.linear_2.weight = nn.Parameter(
37
+ initialize_weights(self.linear_2.weight.data)
38
+ )
39
+
40
+ def clamp(self):
41
+ self.linear_1.weight.data.clamp_(min=0)
42
+ self.linear_2.weight.data.clamp_(min=0)
43
+ self.linear_1.bias.data.clamp_(min=0)
44
+ self.linear_2.bias.data.clamp_(min=0)
45
+
46
+
47
+ class _PNAutoencoder(_NNAutoencoder):
48
+ def clamp(self):
49
+ self.linear_1.weight.data.clamp_(min=1e-3)
50
+ self.linear_2.weight.data.clamp_(min=1e-3)
51
+ self.linear_1.bias.data.clamp_(min=0)
52
+ self.linear_2.bias.data.clamp_(min=0)
53
+
54
+
55
+ class _NRAutoencoder(_NNAutoencoder):
56
+ def clamp(self):
57
+ self.linear_1.weight.data.clamp_(min=0)
58
+ self.linear_2.weight.data.clamp_(min=0)
59
+
60
+
61
+ class SigmoidNNAutoencoder(_NNAutoencoder):
62
+ def __init__(self):
63
+ super().__init__()
64
+ self.encoder = nn.Sequential(self.linear_1, nn.Sigmoid())
65
+ self.decoder = nn.Sequential(self.linear_2, nn.Sigmoid())
66
+
67
+
68
+ class TanhNNAutoencoder(_NNAutoencoder):
69
+ def __init__(self):
70
+ super().__init__()
71
+ self.encoder = nn.Sequential(self.linear_1, nn.Tanh())
72
+ self.decoder = nn.Sequential(self.linear_2, nn.Tanh())
73
+
74
+
75
+ class TanhPNAutoencoder(_PNAutoencoder):
76
+ def __init__(self):
77
+ super().__init__()
78
+ self.encoder = nn.Sequential(self.linear_1, nn.Tanh())
79
+ self.decoder = nn.Sequential(self.linear_2, nn.Tanh())
80
+
81
+
82
+ class ReLUNNAutoencoder(_NNAutoencoder):
83
+ def __init__(self):
84
+ super().__init__()
85
+ self.linear_1 = spectral_norm(self.linear_1)
86
+ self.linear_2 = spectral_norm(self.linear_2)
87
+ self.encoder = nn.Sequential(self.linear_1, nn.ReLU())
88
+ self.decoder = nn.Sequential(self.linear_2, nn.ReLU())
89
+
90
+ def clamp(self):
91
+ self.linear_1.parametrizations.weight.original.data.clamp_(min=0)
92
+ self.linear_2.parametrizations.weight.original.data.clamp_(min=0)
93
+ self.linear_1.bias.data.clamp_(min=0)
94
+ self.linear_2.bias.data.clamp_(min=0)
95
+
96
+
97
+ class ReLUPNAutoencoder(_PNAutoencoder):
98
+ def __init__(self):
99
+ super().__init__()
100
+ self.linear_1 = spectral_norm(self.linear_1)
101
+ self.linear_2 = spectral_norm(self.linear_2)
102
+ self.encoder = nn.Sequential(self.linear_1, nn.ReLU())
103
+ self.decoder = nn.Sequential(self.linear_2, nn.ReLU())
104
+
105
+ def clamp(self):
106
+ self.linear_1.parametrizations.weight.original.data.clamp_(min=1e-3)
107
+ self.linear_2.parametrizations.weight.original.data.clamp_(min=1e-3)
108
+ self.linear_1.bias.data.clamp_(min=0)
109
+ self.linear_2.bias.data.clamp_(min=0)
110
+
111
+
112
+ class TanhSwishNNAutoencoder(_NNAutoencoder):
113
+ def __init__(self):
114
+ super().__init__()
115
+ self.encoder = nn.Sequential(self.linear_1, nn.Tanh())
116
+ self.decoder = nn.Sequential(self.linear_2, nn.SiLU())
117
+
118
+
119
+ class ReLUSigmoidNRAutoencoder(_NRAutoencoder):
120
+ def __init__(self):
121
+ super().__init__()
122
+ self.encoder = nn.Sequential(self.linear_1, nn.ReLU())
123
+ self.decoder = nn.Sequential(self.linear_2, nn.Sigmoid())
124
+
125
+
126
+ class ReLUSigmoidRRAutoencoder(_RRAutoencoder):
127
+ def __init__(self):
128
+ super().__init__()
129
+ self.encoder = nn.Sequential(self.linear_1, nn.ReLU())
130
+ self.decoder = nn.Sequential(self.linear_2, nn.Sigmoid())
knowledge_integration/knowledge/nn_networks/train.py ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ from pathlib import Path
3
+
4
+ import torch
5
+ from torch import nn
6
+ from torch import optim
7
+ from torch.utils.data import DataLoader
8
+ from torchvision import transforms
9
+ from torchvision import datasets
10
+ from utils import get_network, epoch
11
+
12
+ torch.manual_seed(0)
13
+
14
+
15
+ def train_nn_network(args):
16
+
17
+ p = Path(__file__)
18
+ weights_path = f"{p.parent}/weights"
19
+ Path(weights_path).mkdir(parents=True, exist_ok=True)
20
+
21
+ device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
22
+ model = get_network(args.net)
23
+ model.to(device)
24
+ mnist_train = datasets.MNIST(
25
+ ".", train=True, download=True, transform=transforms.ToTensor()
26
+ )
27
+ mnist_test = datasets.MNIST(
28
+ ".", train=False, download=True, transform=transforms.ToTensor()
29
+ )
30
+ train_loader = DataLoader(
31
+ mnist_train, batch_size=args.b, shuffle=True, num_workers=4, pin_memory=True
32
+ )
33
+ test_loader = DataLoader(
34
+ mnist_test, batch_size=args.b, shuffle=False, num_workers=4, pin_memory=True
35
+ )
36
+ opt = optim.Adam(model.parameters(), lr=args.lr, weight_decay=args.wd)
37
+ criterion = nn.MSELoss()
38
+
39
+ best_loss = None
40
+
41
+ for i in range(1, args.epochs + 1):
42
+ train_loss = epoch(train_loader, model, device, criterion, opt)
43
+ test_loss = epoch(test_loader, model, device, criterion)
44
+ if best_loss is None or best_loss > test_loss:
45
+ best_loss = test_loss
46
+ torch.save(model.state_dict(), f"{weights_path}/{args.net}.pth")
47
+
48
+ print(f"Epoch: {i} | Train Loss: {train_loss:.4f} | Test Loss: {test_loss:.4f}")
49
+
50
+
51
+ if __name__ == "__main__":
52
+ train_nn_network()
knowledge_integration/knowledge/nn_networks/utils.py ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from models import (
2
+ SigmoidNNAutoencoder,
3
+ TanhNNAutoencoder,
4
+ TanhPNAutoencoder,
5
+ ReLUNNAutoencoder,
6
+ ReLUPNAutoencoder,
7
+ TanhSwishNNAutoencoder,
8
+ ReLUSigmoidNRAutoencoder,
9
+ ReLUSigmoidRRAutoencoder,
10
+ )
11
+ from tqdm import tqdm
12
+
13
+
14
+ def get_network(name):
15
+ match name:
16
+ case "nn_sigmoid":
17
+ return SigmoidNNAutoencoder()
18
+ case "nn_tanh":
19
+ return TanhNNAutoencoder()
20
+ case "pn_tanh":
21
+ return TanhPNAutoencoder()
22
+ case "nn_relu":
23
+ return ReLUNNAutoencoder()
24
+ case "pn_relu":
25
+ return ReLUPNAutoencoder()
26
+ case "nn_tanh_swish":
27
+ return TanhSwishNNAutoencoder()
28
+ case "nr_relu_sigmoid":
29
+ return ReLUSigmoidNRAutoencoder()
30
+ case "rr_relu_sigmoid":
31
+ return ReLUSigmoidRRAutoencoder()
32
+ case _:
33
+ raise NotImplementedError(
34
+ f"Autoencoder of name '{name}' currently is not supported"
35
+ )
36
+
37
+
38
+ class AverageMeter(object):
39
+ """Computes and stores the average and current value"""
40
+
41
+ def __init__(self):
42
+ self.reset()
43
+
44
+ def reset(self):
45
+ self.val = 0
46
+ self.avg = 0
47
+ self.sum = 0
48
+ self.count = 0
49
+
50
+ def update(self, val, n=1):
51
+ self.val = val
52
+ self.sum += val * n
53
+ self.count += n
54
+ self.avg = self.sum / self.count
55
+
56
+
57
+ def epoch(loader, model, device, criterion, opt=None):
58
+ losses = AverageMeter()
59
+
60
+ if opt is None:
61
+ model.eval()
62
+ else:
63
+ model.train()
64
+ for inputs, _ in tqdm(loader, leave=False):
65
+ inputs = inputs.view(-1, 28 * 28).to(device)
66
+ outputs = model(inputs)
67
+ loss = criterion(outputs, inputs)
68
+ if opt:
69
+ opt.zero_grad(set_to_none=True)
70
+ loss.backward()
71
+ opt.step()
72
+ model.clamp()
73
+
74
+ losses.update(loss.item(), inputs.size(0))
75
+
76
+ return losses.avg
knowledge_integration/knw.py ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import inspect
2
+ import textwrap
3
+
4
+
5
+ KNW_INJECTION = {}
6
+
7
+ class knw:
8
+ def __init__(self):
9
+ self.name = 'knowledge_integration'
10
+ self.description = 'Integrate knowledge into the LLM.'
11
+ self.core_function = 'core_function'
12
+ self.test_case = 'test_case'
13
+ self.runnable_function = 'runnable_function'
14
+ self.mode = 'full'
15
+ self.method_code = {}
16
+
17
+ def get_core_function(self):
18
+ """
19
+ Core function of the knowledge integration.
20
+ """
21
+ function_name = self.core_function
22
+ core_function = getattr(self, function_name, None)
23
+ return textwrap.dedent(core_function())
24
+
25
+ # return self.method_code[self.core_function]
26
+
27
+ def get_runnable_function(self):
28
+ """
29
+ Runnable function of the knowledge integration.
30
+ """
31
+ function_name = self.runnable_function
32
+ runnable_function = getattr(self, function_name, None)
33
+ return textwrap.dedent(runnable_function())
34
+ #return self.method_code[self.runnable_function]
35
+
36
+ def get_all_code(self):
37
+ return self.get_core_function(), self.get_runnable_function()
38
+ #return "Core code:" + self.get_core_function() + "\nOther function code" + self.get_runnable_function()
39
+
40
+ def get_test_case(self):
41
+ """
42
+ Test case for the knowledge integration.
43
+ """
44
+ return self.method_code[self.test_case]
45
+
46
+ def get_internal_function(self):
47
+ """
48
+ All other functions of the core function.
49
+ """
50
+ internal_code = ""
51
+ for name, code in self.method_code.items():
52
+ if name not in [self.core_function, self.test_case]:
53
+ internal_code += f"{code}\n"
54
+ return internal_code
55
+
56
+
57
+ def get_function_code(self, function_name):
58
+ function = getattr(self, function_name, None)
59
+ if function is None:
60
+ logger.warning("Method not found.")
61
+ else:
62
+ inspect_function = inspect.getsource(function)
63
+ return inspect_function.replace('self,', '').replace('self.', '').replace('self','')
64
+
65
+ def get_all_function_code(self):
66
+ all_code = "```python"
67
+ for name, code in self.method_code.items():
68
+ all_code += f"\n{code}\n"
69
+ return all_code+"```"
70
+
71
+ def get_all_function(self):
72
+ methods = inspect.getmembers(self, predicate=inspect.ismethod)
73
+ self.method_code = {name: self.get_function_code(name) for name, _ in methods if name not in ['__init__', 'get_all_function', 'get_all_function_code', 'get_core_function', 'get_function_code', 'get_test_case', 'get_internal_function', 'get_fixed_function']}
74
+ return self.method_code
75
+
76
+ def get_fix_function(self):
77
+ return self.method_code
78
+
79
+ def remove_outer_indentation(code_str): # delete the outer indentation
80
+ lines = code_str.splitlines()
81
+ non_empty_lines = [line for line in lines if line.strip()]
82
+
83
+ if not non_empty_lines:
84
+ return code_str
85
+ min_indent = min(len(line) - len(line.lstrip()) for line in non_empty_lines)
86
+
87
+ aligned_lines = [line[min_indent:] for line in lines]
88
+
89
+ return '\n'.join(aligned_lines)
90
+
91
+
92
+ if __name__ == '__main__':
93
+ kwn = knw()
94
+ # print(kwn.get_entrance_function()) #todo : problem in indent
95
+ print(kwn.get_all_function_code())
96
+ # print(instantiate_subclasses(knw))
knowledge_integration/nearest_correlation_matrix.py ADDED
@@ -0,0 +1,528 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ ################# This Python code is designed by Yancheng Yuan at National University of Singapore to solve ##
3
+ ################# min 0.5*<X-Sigma, X-Sigma>
4
+ ################# s.t. X_ii =b_i, i=1,2,...,n
5
+ ################# X>=tau*I (symmetric and positive semi-definite) ########
6
+ #################
7
+ ################# based on the algorithm in #################
8
+ ################# ``A Quadratically Convergent Newton Method fo r#################
9
+ ################# Computing the Nearest Correlation Matrix #################
10
+ ################# By Houduo Qi and Defeng Såun #################
11
+ ################# SIAM J. Matrix Anal. Appl. 28 (2006) 360--385.
12
+ #################
13
+ ################# Last modified date: March 19, 2016 #################
14
+ ################# #################
15
+ ################# The input arguments Sigma, b>0, tau>=0, and tol (tolerance error) #################
16
+ ################# #################
17
+ ################# #################
18
+ ################# For the correlation matrix problem, set b = np.ones((n,1)) #################
19
+ ################# #################
20
+ ################# For a positive definite matrix #################
21
+ ################# set tau = 1.0e-5 for example #################
22
+ ################# set tol = 1.0e-6 or lower if no very high accuracy required #################
23
+ ################# If the algorithm terminates with the given tol, then it means ##################
24
+ ################# the relative gap between the objective function value of the obtained solution ###
25
+ ################# and the objective function value of the global solution is no more than tol or ###
26
+ ################# the violation of the feasibility is smaller than tol*(1+np.linalg.norm(b)) ############
27
+ #################
28
+ ################# The outputs include the optimal primal solution, the dual solution and others ########
29
+ ################# Diagonal Preconditioner is used #################
30
+ #################
31
+ ################# Send your comments to [email protected] or [email protected] #################
32
+ #################
33
+ ################# Warning: Though the code works extremely well, it is your call to use it or not. #################
34
+
35
+
36
+ # from knw import knw, knowledge_injection # app use
37
+ # import sys
38
+ # sys.path.append('/Users/stephensun/Desktop/pypro/LAMBDA/knowledge_integration')
39
+ from knw import knw # debug use
40
+ import numpy as np
41
+ import scipy.io as sio
42
+ import scipy
43
+ import sys
44
+ import time
45
+
46
+
47
+ # @knowledge_injection('nearest_correlation_matrix')
48
+ class nearest_correlation_matrix(knw):
49
+ # name = 'nearest_correlation_matrix'
50
+ # description = 'Calculate the nearest correlation matrix.'
51
+ # entrance_function = 'NearestCorrelationMatrix'
52
+ # test_case = 'test_ncm'
53
+
54
+ def __init__(self):
55
+ super().__init__()
56
+ self.name = 'nearest_correlation_matrix'
57
+ self.description = 'The function calculates the nearest correlation matrix using the quadratically convergent newton method. Acceptable parameters: Sigma, b>0, tau>=0, and tol (tolerance error) For the correlation matrix problem, set b = np.ones((n,1)).'
58
+ self.entrance_function = 'NearestCorrelationMatrix'
59
+ self.test_case = 'test_ncm'
60
+
61
+ def NearestCorrelationMatrix(self, g_input, b_input=None, tau=None, tol=None):
62
+ print('-- Semismooth Newton-CG method starts -- \n')
63
+ [n, m] = g_input.shape
64
+ g_input = g_input.copy()
65
+ t0 = time.time() # time start
66
+ g_input = (g_input + g_input.transpose()) / 2.0
67
+ b_g = np.ones((n, 1))
68
+ error_tol = 1.0e-6
69
+ if b_input is None:
70
+ tau = 0
71
+ elif tau is None:
72
+ b_g = b_input.copy()
73
+ tau = 0
74
+ elif tol is None:
75
+ b_g = b_input.copy() - tau * np.ones((n, 1))
76
+ g_input = g_input - tau * np.eye(n, n)
77
+ else:
78
+ b_g = b_input.copy() - tau * np.ones((n, 1))
79
+ g_input = g_input - tau * np.eye(n, n)
80
+ error_tol = np.maximum(1.0e-12, tol)
81
+
82
+ res_b = np.zeros((300, 1))
83
+ norm_b0 = np.linalg.norm(b_g)
84
+ y = np.zeros((n, 1))
85
+ f_y = np.zeros((n, 1))
86
+ k = 0
87
+ f_eval = 0
88
+ iter_whole = 200
89
+ iter_inner = 20 # maximum number of line search in Newton method
90
+ maxit = 200 # maximum number of iterations in PCG
91
+ iterk = 0
92
+ inner = 0
93
+ tol_cg = 1.0e-2 # relative accuracy for CGs
94
+ sigma_1 = 1.0e-4
95
+ x0 = y.copy()
96
+ prec_time = 0
97
+ pcg_time = 0
98
+ eig_time = 0
99
+ c = np.ones((n, 1))
100
+ d = np.zeros((n, 1))
101
+ val_g = np.sum((g_input.astype(float)) * (g_input.astype(float)))
102
+ val_g = val_g * 0.5
103
+ x_result = g_input + np.diagflat(y)
104
+ x_result = (x_result + x_result.transpose()) / 2.0
105
+ eig_time0 = time.time()
106
+ [p_x, lamb] = self.my_mexeig(x_result)
107
+ eig_time = eig_time + (time.time() - eig_time0)
108
+ [f_0, f_y] = self.my_gradient(y, lamb, p_x, b_g, n)
109
+ initial_f = val_g - f_0
110
+ x_result = self.my_pca(x_result, lamb, p_x, b_g, n)
111
+ val_obj = np.sum(((x_result - g_input) * (x_result - g_input))) / 2.0
112
+ gap = (val_obj - initial_f) / (1.0 + np.abs(initial_f) + np.abs(val_obj))
113
+ f = f_0.copy()
114
+ f_eval = f_eval + 1
115
+ b_input = b_g - f_y
116
+ norm_b = np.linalg.norm(b_input)
117
+ time_used = time.time() - t0
118
+
119
+ print('Newton-CG: Initial Dual objective function value: %s \n' % initial_f)
120
+ print('Newton-CG: Initial Primal objective function value: %s \n' % val_obj)
121
+ print('Newton-CG: Norm of Gradient: %s \n' % norm_b)
122
+ print('Newton-CG: computing time used so far: %s \n' % time_used)
123
+
124
+ omega_12 = self.my_omega_mat(p_x, lamb, n)
125
+ x0 = y.copy()
126
+
127
+ while np.abs(gap) > error_tol and norm_b / (1 + norm_b0) > error_tol and k < iter_whole:
128
+ prec_time0 = time.time()
129
+ c = self.my_precond_matrix(omega_12, p_x, n)
130
+ prec_time = prec_time + (time.time() - prec_time0)
131
+
132
+ pcg_time0 = time.time()
133
+ [d, flag, relres, iterk] = self.my_pre_cg(b_input, tol_cg, maxit, c, omega_12, p_x, n)
134
+ pcg_time = pcg_time + (time.time() - pcg_time0)
135
+ print('Newton-CG: Number of CG Iterations=== %s \n' % iterk)
136
+ if flag == 1:
137
+ print('=== Not a completed Newton-CG step === \n')
138
+
139
+ slope = np.dot((f_y - b_g).transpose(), d)
140
+
141
+ y = (x0 + d).copy()
142
+ x_result = g_input + np.diagflat(y)
143
+ x_result = (x_result + x_result.transpose()) / 2.0
144
+ eig_time0 = time.time()
145
+ [p_x, lamb] = self.my_mexeig(x_result)
146
+ eig_time = eig_time + (time.time() - eig_time0)
147
+ [f, f_y] = self.my_gradient(y, lamb, p_x, b_g, n)
148
+
149
+ k_inner = 0
150
+ while k_inner <= iter_inner and f > f_0 + sigma_1 * (np.power(0.5, k_inner)) * slope + 1.0e-6:
151
+ k_inner = k_inner + 1
152
+ y = x0 + (np.power(0.5, k_inner)) * d
153
+ x_result = g_input + np.diagflat(y)
154
+ x_result = (x_result + x_result.transpose()) / 2.0
155
+ eig_time0 = time.time()
156
+ [p_x, lamb] = self.my_mexeig(x_result)
157
+ eig_time = eig_time + (time.time() - eig_time0)
158
+ [f, f_y] = self.my_gradient(y, lamb, p_x, b_g, n)
159
+
160
+ f_eval = f_eval + k_inner + 1
161
+ x0 = y.copy()
162
+ f_0 = f.copy()
163
+ val_dual = val_g - f_0
164
+ x_result = self.my_pca(x_result, lamb, p_x, b_g, n)
165
+ val_obj = np.sum((x_result - g_input) * (x_result - g_input)) / 2.0
166
+ gap = (val_obj - val_dual) / (1 + np.abs(val_dual) + np.abs(val_obj))
167
+ print('Newton-CG: The relative duality gap: %s \n' % gap)
168
+ print('Newton-CG: The Dual objective function value: %s \n' % val_dual)
169
+ print('Newton-CG: The Primal objective function value: %s \n' % val_obj)
170
+
171
+ b_input = b_g - f_y
172
+ norm_b = np.linalg.norm(b_input)
173
+ time_used = time.time() - t0
174
+ rel_norm_b = norm_b / (1 + norm_b0)
175
+ print('Newton-CG: Norm of Gradient: %s \n' % norm_b)
176
+ print('Newton-CG: Norm of Relative Gradient: %s \n' % rel_norm_b)
177
+ print('Newton-CG: Computing time used so for %s \n' % time_used)
178
+ res_b[k] = norm_b
179
+ k = k + 1
180
+ omega_12 = self.my_omega_mat(p_x, lamb, n)
181
+
182
+ position_rank = np.maximum(lamb, 0) > 0
183
+ rank_x = (np.maximum(lamb, 0)[position_rank]).size
184
+ final_f = val_g - f
185
+ x_result = x_result + tau * (np.eye(n))
186
+ time_used = time.time() - t0
187
+ print('\n')
188
+
189
+ print('Newton-CG: Number of iterations: %s \n' % k)
190
+ print('Newton-CG: Number of Funtion Evaluation: =========== %s\n' % f_eval)
191
+ print('Newton-CG: Final Dual Objective Function value: ========= %s\n' % final_f)
192
+ print('Newton-CG: Final Primal Objective Function value: ======= %s \n' % val_obj)
193
+ print('Newton-CG: The final relative duality gap: %s \n' % gap)
194
+ print('Newton-CG: The rank of the Optimal Solution - tau*I: %s \n' % rank_x)
195
+ print('Newton-CG: computing time for computing preconditioners: %s \n' % prec_time)
196
+ print('Newton-CG: computing time for linear system solving (cgs time): %s \n' % pcg_time)
197
+ print('Newton-CG: computing time for eigenvalue decompositions: =============== %s \n' % eig_time)
198
+ print('Newton-CG: computing time used for equal weight calibration ============ %s \n' % time_used)
199
+
200
+ return x_result, y
201
+
202
+ def my_gradient(self, y_input, lamb, p_input, b_0, n):
203
+ f = 0.0
204
+ Fy = np.zeros((n, 1))
205
+ p_input_copy = (p_input.copy()).transpose()
206
+ for i in range(0, n):
207
+ p_input_copy[i, :] = ((np.maximum(lamb[i], 0).astype(float)) ** 0.5) * p_input_copy[i, :]
208
+
209
+ for i in range(0, n):
210
+ Fy[i] = np.sum(p_input_copy[:, i] * p_input_copy[:, i])
211
+
212
+ for i in range(0, n):
213
+ f = f + np.square((np.maximum(lamb[i], 0)))
214
+
215
+ f = 0.5 * f - np.dot(b_0.transpose(), y_input)
216
+
217
+ return f, Fy
218
+
219
+
220
+ # use PCA to generate a primal feasible solution checked
221
+
222
+ def my_pca(self, x_input, lamb, p_input, b_0, n):
223
+ x_pca = x_input
224
+ lamb = np.asarray(lamb)
225
+ lp = lamb > 0
226
+ r = lamb[lp].size
227
+ if r == 0:
228
+ x_pca = np.zeros((n, n))
229
+ elif r == n:
230
+ x_pca = x_input
231
+ elif r < (n / 2.0):
232
+ lamb1 = lamb[lp].copy()
233
+ lamb1 = lamb1.transpose()
234
+ lamb1 = np.sqrt(lamb1.astype(float))
235
+ P1 = p_input[:, 0:r].copy()
236
+ if r > 1:
237
+ P1 = np.dot(P1, np.diagflat(lamb1))
238
+ x_pca = np.dot(P1, P1.transpose())
239
+ else:
240
+ x_pca = np.dot(np.dot(np.square(lamb1), P1), P1.transpose())
241
+
242
+ else:
243
+ lamb2 = -lamb[r:n].copy()
244
+ lamb2 = np.sqrt(lamb2.astype(float))
245
+ p_2 = p_input[:, r:n]
246
+ p_2 = np.dot(p_2, np.diagflat(lamb2))
247
+ x_pca = x_pca + np.dot(p_2, p_2.transpose())
248
+
249
+ # To make x_pca positive semidefinite with diagonal elements exactly b0
250
+ d = x_pca.diagonal()
251
+ d = d.reshape((d.size, 1))
252
+ d = np.maximum(d, b_0.reshape(d.shape))
253
+ x_pca = x_pca - np.diagflat(x_pca.diagonal()) + np.diagflat(d)
254
+ d = d.astype(float) ** (-0.5)
255
+ d = d * ((np.sqrt(b_0.astype(float))).reshape(d.shape))
256
+ x_pca = x_pca * (np.dot(d, d.reshape(1, d.size)))
257
+
258
+ return x_pca
259
+
260
+
261
+ # end of PCA
262
+
263
+ # To generate the first order difference of lambda
264
+ # To generate the first order essential part of d
265
+
266
+
267
+ def my_omega_mat(self, p_input, lamb, n):
268
+ idx_idp = np.where(lamb > 0)
269
+ idx_idp = idx_idp[0]
270
+ idx_idm = np.setdiff1d(range(0, n), idx_idp)
271
+ n = lamb.size
272
+ r = idx_idp.size
273
+ if r > 0:
274
+ if r == n:
275
+ omega_12 = np.ones((n, n))
276
+ else:
277
+ s = n - r
278
+ dp = lamb[0:r].copy()
279
+ dp = dp.reshape(dp.size, 1)
280
+ dn = lamb[r:n].copy()
281
+ dn = dn.reshape((dn.size, 1))
282
+ omega_12 = np.dot(dp, np.ones((1, s)))
283
+ omega_12 = omega_12 / (np.dot(np.abs(dp), np.ones((1, s))) + np.dot(np.ones((r, 1)), abs(dn.transpose())))
284
+ omega_12 = omega_12.reshape((r, s))
285
+
286
+ else:
287
+ omega_12 = np.array([])
288
+
289
+ return omega_12
290
+
291
+
292
+ # End of my_omega_mat
293
+
294
+
295
+ # To generate Jacobian
296
+
297
+
298
+ def my_jacobian_matrix(self, x, omega_12, p_input, n):
299
+ x_result = np.zeros((n, 1))
300
+ [r, s] = omega_12.shape
301
+ if r > 0:
302
+ hmat_1 = p_input[:, 0:r].copy()
303
+ if r < n / 2.0:
304
+ i = 0
305
+ while i < n:
306
+ hmat_1[i, :] = x[i] * hmat_1[i, :]
307
+ i = i + 1
308
+
309
+ omega_12 = omega_12 * (np.dot(hmat_1.transpose(), p_input[:, r:n]))
310
+ hmat = np.dot(hmat_1.transpose(), np.dot(p_input[:, 0:r], p_input[:, 0:r].transpose()))
311
+ hmat = hmat + np.dot(omega_12, p_input[:, r:n].transpose())
312
+ hmat = np.vstack((hmat, np.dot(omega_12.transpose(), p_input[:, 0:r].transpose())))
313
+ i = 0
314
+ while i < n:
315
+ x_result[i] = np.dot(p_input[i, :], hmat[:, i])
316
+ x_result[i] = x_result[i] + 1.0e-10 * x[i]
317
+ i = i + 1
318
+
319
+ else:
320
+ if r == n:
321
+ x_result = 1.0e-10 * x
322
+ else:
323
+ hmat_2 = p_input[:, r:n].copy()
324
+ i = 0
325
+ while i < n:
326
+ hmat_2[i, :] = x[i] * hmat_2[i, :]
327
+ i = i + 1
328
+
329
+ omega_12 = np.ones((r, s)) - omega_12
330
+ omega_12 = omega_12 * (np.dot(p_input[:, 0:r].transpose(), hmat_2))
331
+ hmat = np.dot(p_input[:, r:n].transpose(), hmat_2)
332
+ hmat = np.dot(hmat, p_input[:, r:n].transpose())
333
+ hmat = hmat + np.dot(omega_12.transpose(), p_input[:, 0:r].transpose())
334
+ hmat = np.vstack((np.dot(omega_12, p_input[:, r:n].transpose()), hmat))
335
+ i = 0
336
+ while i < n:
337
+ x_result[i] = np.dot(-p_input[i, :], hmat[:, i])
338
+ x_result[i] = x[i] + x_result[i] + 1.0e-10 * x[i]
339
+ i = i + 1
340
+
341
+ return x_result
342
+
343
+
344
+ # end of Jacobian
345
+ # PCG Method
346
+
347
+
348
+ def my_pre_cg(self, b, tol, maxit, c, omega_12, p_input, n):
349
+ # Initializations
350
+ r = b.copy()
351
+ r = r.reshape(r.size, 1)
352
+ c = c.reshape(c.size, 1)
353
+ n2b = np.linalg.norm(b)
354
+ tolb = tol * n2b
355
+ p = np.zeros((n, 1))
356
+ flag = 1
357
+ iterk = 0
358
+ relres = 1000
359
+ # Precondition
360
+ z = r / c
361
+ rz_1 = np.dot(r.transpose(), z)
362
+ rz_2 = 1
363
+ d = z.copy()
364
+ # d = d.reshape(z.shape)
365
+ # CG Iteration
366
+ for k in range(0, maxit):
367
+ if k > 0:
368
+ beta = rz_1 / rz_2
369
+ d = z + beta * d
370
+
371
+ w = self.my_jacobian_matrix(d, omega_12, p_input, n)
372
+ denom = np.dot(d.transpose(), w)
373
+ iterk = k + 1
374
+ relres = np.linalg.norm(r) / n2b
375
+ if denom <= 0:
376
+ ss = 0 # don't know the usage, check the paper
377
+ p = d / np.linalg.norm(d)
378
+ break
379
+ else:
380
+ alpha = rz_1 / denom
381
+ p = p + alpha * d
382
+ r = r - alpha * w
383
+
384
+ z = r / c
385
+ if np.linalg.norm(r) <= tolb: # exit if hmat p = b solved in relative error tolerance
386
+ iterk = k + 1
387
+ relres = np.linalg.norm(r) / n2b
388
+ flag = 0
389
+ break
390
+
391
+ rz_2 = rz_1
392
+ rz_1 = np.dot(r.transpose(), z)
393
+
394
+ return p, flag, relres, iterk
395
+
396
+
397
+ # end of pre_cg
398
+
399
+ # to generate the diagonal preconditioner
400
+
401
+
402
+ def my_precond_matrix(self, omega_12, p_input, n):
403
+ [r, s] = omega_12.shape
404
+ c = np.ones((n, 1))
405
+ if r > 0:
406
+ if r < n / 2.0:
407
+ hmat = (p_input.copy()).transpose()
408
+ hmat = hmat * hmat
409
+ hmat_12 = np.dot(hmat[0:r, :].transpose(), omega_12)
410
+ d = np.ones((r, 1))
411
+ for i in range(0, n):
412
+ c_temp = np.dot(d.transpose(), hmat[0:r, i])
413
+ c_temp = c_temp * hmat[0:r, i]
414
+ c[i] = np.sum(c_temp)
415
+ c[i] = c[i] + 2.0 * np.dot(hmat_12[i, :], hmat[r:n, i])
416
+ if c[i] < 1.0e-8:
417
+ c[i] = 1.0e-8
418
+
419
+ else:
420
+ if r < n:
421
+ hmat = (p_input.copy()).transpose()
422
+ hmat = hmat * hmat
423
+ omega_12 = np.ones((r, s)) - omega_12
424
+ hmat_12 = np.dot(omega_12, hmat[r:n, :])
425
+ d = np.ones((s, 1))
426
+ dd = np.ones((n, 1))
427
+
428
+ for i in range(0, n):
429
+ c_temp = np.dot(d.transpose(), hmat[r:n, i])
430
+ c[i] = np.sum(c_temp * hmat[r:n, i])
431
+ c[i] = c[i] + 2.0 * np.dot(hmat[0:r, i].transpose(), hmat_12[:, i])
432
+ alpha = np.sum(hmat[:, i])
433
+ c[i] = alpha * np.dot(hmat[:, i].transpose(), dd) - c[i]
434
+ if c[i] < 1.0e-8:
435
+ c[i] = 1.0e-8
436
+
437
+ return c
438
+
439
+
440
+ # end of precond_matrix
441
+
442
+
443
+ # my_issorted()
444
+
445
+ def my_issorted(self, x_input, flag):
446
+ n = x_input.size
447
+ tf_value = False
448
+ if n < 2:
449
+ tf_value = True
450
+ else:
451
+ if flag == 1:
452
+ i = 0
453
+ while i < n - 1:
454
+ if x_input[i] <= x_input[i + 1]:
455
+ i = i + 1
456
+ else:
457
+ break
458
+
459
+ if i == n - 1:
460
+ tf_value = True
461
+ elif i < n - 1:
462
+ tf_value = False
463
+
464
+ elif flag == -1:
465
+ i = n - 1
466
+ while i > 0:
467
+ if x_input[i] <= x_input[i - 1]:
468
+ i = i - 1
469
+ else:
470
+ break
471
+
472
+ if i == 0:
473
+ tf_value = True
474
+ elif i > 0:
475
+ tf_value = False
476
+
477
+ return tf_value
478
+
479
+
480
+ # end of my_issorted()
481
+
482
+
483
+ def my_mexeig(self, x_input):
484
+ [n, m] = x_input.shape
485
+ [lamb, p_x] = np.linalg.eigh(x_input)
486
+ # lamb = lamb.reshape((lamb.size, 1))
487
+ p_x = p_x.real
488
+ lamb = lamb.real
489
+ if self.my_issorted(lamb, 1):
490
+ lamb = lamb[::-1]
491
+ p_x = np.fliplr(p_x)
492
+ elif self.my_issorted(lamb, -1):
493
+ return p_x, lamb
494
+ else:
495
+ idx = np.argsort(-lamb)
496
+ # lamb_old = lamb # add for debug
497
+ lamb = lamb[idx]
498
+ # p_x_old = p_x add for debug
499
+ p_x = p_x[:, idx]
500
+
501
+ lamb = lamb.reshape((n, 1))
502
+ p_x = p_x.reshape((n, n))
503
+
504
+ return p_x, lamb
505
+
506
+
507
+ def test_ncm(self):
508
+ # test
509
+ n = 3000
510
+ data_g_test = scipy.randn(n, n)
511
+ data_g_test = (data_g_test + data_g_test.transpose()) / 2.0 #对称
512
+ data_g_test = data_g_test - np.diag(np.diag(data_g_test)) + np.eye(n) #对角元素都是1
513
+ b = np.ones((n, 1))
514
+ tau = 0
515
+ tol = 1.0e-6
516
+ [x_test_result, y_test_result] = self.NearestCorrelationMatrix(data_g_test, b, tau, tol)
517
+ print("The x_test_result: \n", x_test_result)
518
+ print()
519
+ print("The y_test_result: \n", y_test_result)
520
+
521
+
522
+
523
+ if __name__ == '__main__':
524
+ ncm = nearest_correlation_matrix()
525
+ print(ncm.get_all_function())
526
+ print(ncm.get_core_function())
527
+ print(ncm.get_test_case())
528
+ print(ncm.get_internal_function())
knowledge_integration/nn_network.py ADDED
@@ -0,0 +1,261 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from knw import knw
2
+ import textwrap
3
+
4
+ class nn_networks(knw):
5
+ def __init__(self):
6
+ super().__init__()
7
+ self.name = 'Fixed_points_of_nonnegative_neural_networks'
8
+ self.description = 'This is fixed_points_of_nonnegative_neural_networks which used fixed point theory to analyze nonnegative neural networks, which we define as neural networks that map nonnegative vectors to nonnegative vectors. Variables: networks: nn_sigmoid, learning rate: 5e-3, epochs: 30, wd: 0, b: 64 '
9
+ self.core_function = 'core'
10
+ self.runnable_function = 'runnable'
11
+ self.test_case = 'case_nn_networks'
12
+ self.mode = 'core'
13
+
14
+ def core(self):
15
+ case = """
16
+ args = argparse.ArgumentParser()
17
+ args.net = 'nn_sigmoid'
18
+ args.lr = 5e-3
19
+ args.epochs = 30
20
+ args.wd = 0
21
+ args.b = 64
22
+ train_nn_network(args)
23
+ """
24
+ return case
25
+
26
+ def runnable(self):
27
+ code = """
28
+ import numpy as np
29
+ import scipy.io as sio
30
+ import scipy
31
+ import sys
32
+ import time
33
+ import argparse
34
+ import torch
35
+ import math
36
+ from torch import nn
37
+ from torch.nn.utils.parametrizations import spectral_norm
38
+ from pathlib import Path
39
+ from torch import optim
40
+ from torch.utils.data import DataLoader
41
+ from torchvision import transforms
42
+ from torchvision import datasets
43
+ from tqdm import tqdm
44
+
45
+ def initialize_weights(tensor):
46
+ return tensor.uniform_() * math.sqrt(0.25 / (tensor.shape[0] + tensor.shape[1]))
47
+
48
+ class _RRAutoencoder(nn.Module):
49
+ def __init__(self):
50
+ super().__init__()
51
+ self.linear_1 = nn.Linear(784, 200)
52
+ self.linear_2 = nn.Linear(200, 784)
53
+ self.encoder = self.linear_1
54
+ self.decoder = self.linear_2
55
+
56
+ def forward(self, x):
57
+ x = self.encoder(x)
58
+ x = self.decoder(x)
59
+
60
+ return x
61
+
62
+ def clamp(self):
63
+ pass
64
+
65
+ class _NNAutoencoder(_RRAutoencoder):
66
+ def __init__(self):
67
+ super().__init__()
68
+ self.linear_1.bias.data.zero_()
69
+ self.linear_2.bias.data.zero_()
70
+ self.linear_1.weight = nn.Parameter(
71
+ initialize_weights(self.linear_1.weight.data)
72
+ )
73
+ self.linear_2.weight = nn.Parameter(
74
+ initialize_weights(self.linear_2.weight.data)
75
+ )
76
+
77
+ def clamp(self):
78
+ self.linear_1.weight.data.clamp_(min=0)
79
+ self.linear_2.weight.data.clamp_(min=0)
80
+ self.linear_1.bias.data.clamp_(min=0)
81
+ self.linear_2.bias.data.clamp_(min=0)
82
+
83
+ class _PNAutoencoder(_NNAutoencoder):
84
+ def clamp(self):
85
+ self.linear_1.weight.data.clamp_(min=1e-3)
86
+ self.linear_2.weight.data.clamp_(min=1e-3)
87
+ self.linear_1.bias.data.clamp_(min=0)
88
+ self.linear_2.bias.data.clamp_(min=0)
89
+
90
+ class _NRAutoencoder(_NNAutoencoder):
91
+ def clamp(self):
92
+ self.linear_1.weight.data.clamp_(min=0)
93
+ self.linear_2.weight.data.clamp_(min=0)
94
+
95
+ class SigmoidNNAutoencoder(_NNAutoencoder):
96
+ def __init__(self):
97
+ super().__init__()
98
+ self.encoder = nn.Sequential(self.linear_1, nn.Sigmoid())
99
+ self.decoder = nn.Sequential(self.linear_2, nn.Sigmoid())
100
+
101
+ class TanhNNAutoencoder(_NNAutoencoder):
102
+ def __init__(self):
103
+ super().__init__()
104
+ self.encoder = nn.Sequential(self.linear_1, nn.Tanh())
105
+ self.decoder = nn.Sequential(self.linear_2, nn.Tanh())
106
+
107
+ class TanhPNAutoencoder(_PNAutoencoder):
108
+ def __init__(self):
109
+ super().__init__()
110
+ self.encoder = nn.Sequential(self.linear_1, nn.Tanh())
111
+ self.decoder = nn.Sequential(self.linear_2, nn.Tanh())
112
+
113
+ class ReLUNNAutoencoder(_NNAutoencoder):
114
+ def __init__(self):
115
+ super().__init__()
116
+ self.linear_1 = spectral_norm(self.linear_1)
117
+ self.linear_2 = spectral_norm(self.linear_2)
118
+ self.encoder = nn.Sequential(self.linear_1, nn.ReLU())
119
+ self.decoder = nn.Sequential(self.linear_2, nn.ReLU())
120
+
121
+ def clamp(self):
122
+ self.linear_1.parametrizations.weight.original.data.clamp_(min=0)
123
+ self.linear_2.parametrizations.weight.original.data.clamp_(min=0)
124
+ self.linear_1.bias.data.clamp_(min=0)
125
+ self.linear_2.bias.data.clamp_(min=0)
126
+
127
+ class ReLUPNAutoencoder(_PNAutoencoder):
128
+ def __init__(self):
129
+ super().__init__()
130
+ self.linear_1 = spectral_norm(self.linear_1)
131
+ self.linear_2 = spectral_norm(self.linear_2)
132
+ self.encoder = nn.Sequential(self.linear_1, nn.ReLU())
133
+ self.decoder = nn.Sequential(self.linear_2, nn.ReLU())
134
+
135
+ def clamp(self):
136
+ self.linear_1.parametrizations.weight.original.data.clamp_(min=1e-3)
137
+ self.linear_2.parametrizations.weight.original.data.clamp_(min=1e-3)
138
+ self.linear_1.bias.data.clamp_(min=0)
139
+ self.linear_2.bias.data.clamp_(min=0)
140
+
141
+
142
+ class TanhSwishNNAutoencoder(_NNAutoencoder):
143
+ def __init__(self):
144
+ super().__init__()
145
+ self.encoder = nn.Sequential(self.linear_1, nn.Tanh())
146
+ self.decoder = nn.Sequential(self.linear_2, nn.SiLU())
147
+
148
+ class ReLUSigmoidNRAutoencoder(_NRAutoencoder):
149
+ def __init__(self):
150
+ super().__init__()
151
+ self.encoder = nn.Sequential(self.linear_1, nn.ReLU())
152
+ self.decoder = nn.Sequential(self.linear_2, nn.Sigmoid())
153
+
154
+ class ReLUSigmoidRRAutoencoder(_RRAutoencoder):
155
+ def __init__(self):
156
+ super().__init__()
157
+ self.encoder = nn.Sequential(self.linear_1, nn.ReLU())
158
+ self.decoder = nn.Sequential(self.linear_2, nn.Sigmoid())
159
+
160
+ def get_network(name):
161
+ match name:
162
+ case "nn_sigmoid":
163
+ return SigmoidNNAutoencoder()
164
+ case "nn_tanh":
165
+ return TanhNNAutoencoder()
166
+ case "pn_tanh":
167
+ return TanhPNAutoencoder()
168
+ case "nn_relu":
169
+ return ReLUNNAutoencoder()
170
+ case "pn_relu":
171
+ return ReLUPNAutoencoder()
172
+ case "nn_tanh_swish":
173
+ return TanhSwishNNAutoencoder()
174
+ case "nr_relu_sigmoid":
175
+ return ReLUSigmoidNRAutoencoder()
176
+ case "rr_relu_sigmoid":
177
+ return ReLUSigmoidRRAutoencoder()
178
+ case _:
179
+ raise NotImplementedError(
180
+ f"Autoencoder of name '{name}' currently is not supported"
181
+ )
182
+
183
+ class AverageMeter(object):
184
+
185
+ def __init__(self):
186
+ self.reset()
187
+
188
+ def reset(self):
189
+ self.val = 0
190
+ self.avg = 0
191
+ self.sum = 0
192
+ self.count = 0
193
+
194
+ def update(self, val, n=1):
195
+ self.val = val
196
+ self.sum += val * n
197
+ self.count += n
198
+ self.avg = self.sum / self.count
199
+
200
+ def epoch(loader, model, device, criterion, opt=None):
201
+ losses = AverageMeter()
202
+
203
+ if opt is None:
204
+ model.eval()
205
+ else:
206
+ model.train()
207
+ for inputs, _ in tqdm(loader, leave=False):
208
+ inputs = inputs.view(-1, 28 * 28).to(device)
209
+ outputs = model(inputs)
210
+ loss = criterion(outputs, inputs)
211
+ if opt:
212
+ opt.zero_grad(set_to_none=True)
213
+ loss.backward()
214
+ opt.step()
215
+ model.clamp()
216
+
217
+ losses.update(loss.item(), inputs.size(0))
218
+
219
+ return losses.avg
220
+
221
+ def train_nn_network(args):
222
+ # p = Path(__file__)
223
+ # weights_path = f"{p.parent}/weights"
224
+ # Path(weights_path).mkdir(parents=True, exist_ok=True)
225
+
226
+ device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
227
+ model = get_network(args.net)
228
+ model.to(device)
229
+ mnist_train = datasets.MNIST(
230
+ ".", train=True, download=True, transform=transforms.ToTensor()
231
+ )
232
+ mnist_test = datasets.MNIST(
233
+ ".", train=False, download=True, transform=transforms.ToTensor()
234
+ )
235
+ train_loader = DataLoader(
236
+ mnist_train, batch_size=args.b, shuffle=True, num_workers=4, pin_memory=True
237
+ )
238
+ test_loader = DataLoader(
239
+ mnist_test, batch_size=args.b, shuffle=False, num_workers=4, pin_memory=True
240
+ )
241
+ opt = optim.Adam(model.parameters(), lr=args.lr, weight_decay=args.wd)
242
+ criterion = nn.MSELoss()
243
+
244
+ best_loss = None
245
+
246
+ for i in range(1, args.epochs + 1):
247
+ train_loss = epoch(train_loader, model, device, criterion, opt)
248
+ test_loss = epoch(test_loader, model, device, criterion)
249
+ if best_loss is None or best_loss > test_loss:
250
+ best_loss = test_loss
251
+ # torch.save(model.state_dict(), f"{weights_path}/{args.net}.pth")
252
+
253
+ print(f"Epoch: {i} | Train Loss: {train_loss:.4f} | Test Loss: {test_loss:.4f}")
254
+
255
+ """
256
+ return code
257
+
258
+ if __name__ == '__main__':
259
+ nnn = nn_networks()
260
+ print(nnn.get_core_function())
261
+ print(nnn.runnable())
knowledge_integration/pami.py ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from knw import knw
2
+
3
+
4
+ class pattern_mining(knw):
5
+ def __init__(self):
6
+ super().__init__()
7
+ self.name = "pami"
8
+ self.description = "The pami library is a Python library for pattern mining. User can choose many algorithms like FP-growth algorithm."
9
+ self.core_function = "pami"
10
+ self.mode = 'full'
11
+
12
+ def pami(self):
13
+ return """
14
+ !pip install PAMI
15
+ from PAMI.frequentPattern.basic import FPGrowth as alg
16
+ fileURL = "https://u-aizu.ac.jp/~udayrage/datasets/transactionalDatabases/Transactional_T10I4D100K.csv"
17
+ minSup = 300
18
+ obj = alg.FPGrowth(iFile=fileURL, minSup=minSup, sep='\t') #here is sep='tab'
19
+ # obj.startMine() #deprecated
20
+ obj.mine()
21
+ obj.save('frequentPatternsAtMinSupCount300.txt')
22
+ frequentPatternsDF = obj.getPatternsAsDataFrame()
23
+ print('Total No of patterns: ' + str(len(frequentPatternsDF))) # print the total number of patterns
24
+ print('Runtime: ' + str(obj.getRuntime())) # measure the runtime
25
+ print('Memory (RSS): ' + str(obj.getMemoryRSS()))
26
+ print('Memory (USS): ' + str(obj.getMemoryUSS()))
27
+ """
28
+
29
+
30
+ # if __name__ == '__main__':
31
+ # pami = pami()
32
+ # print(pami.get_core_function())
knowledge_integration/test.yaml ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ case: "def epoch(loader, model, device, criterion, opt=None):
2
+ losses = AverageMeter()
3
+
4
+ if opt is None:
5
+ model.eval()
6
+ else:
7
+ model.train()
8
+ for inputs, _ in tqdm(loader, leave=False):
9
+ inputs = inputs.view(-1, 28 * 28).to(device)
10
+ outputs = model(inputs)
11
+ loss = criterion(outputs, inputs)
12
+ if opt:
13
+ opt.zero_grad(set_to_none=True)
14
+ loss.backward()
15
+ opt.step()
16
+ model.clamp()
17
+
18
+ losses.update(loss.item(), inputs.size(0))
19
+
20
+ return losses.avg"
knw.py ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import inspect
2
+ import textwrap
3
+
4
+
5
+ KNW_INJECTION = {}
6
+
7
+ class knw:
8
+ def __init__(self):
9
+ self.name = 'knowledge_integration'
10
+ self.description = 'Integrate knowledge into the LLM.'
11
+ self.core_function = 'core_function'
12
+ self.test_case = 'test_case'
13
+ self.runnable_function = 'runnable_function'
14
+ self.mode = 'full'
15
+ self.method_code = {}
16
+
17
+ def get_core_function(self):
18
+ """
19
+ Core function of the knowledge integration.
20
+ """
21
+ function_name = self.core_function
22
+ core_function = getattr(self, function_name, None)
23
+ return textwrap.dedent(core_function())
24
+
25
+ # return self.method_code[self.core_function]
26
+
27
+ def get_runnable_function(self):
28
+ """
29
+ Runnable function of the knowledge integration.
30
+ """
31
+ function_name = self.runnable_function
32
+ runnable_function = getattr(self, function_name, None)
33
+ return textwrap.dedent(runnable_function())
34
+ #return self.method_code[self.runnable_function]
35
+
36
+ def get_all_code(self):
37
+ return self.get_core_function(), self.get_runnable_function()
38
+ #return "Core code:" + self.get_core_function() + "\nOther function code" + self.get_runnable_function()
39
+
40
+ def get_test_case(self):
41
+ """
42
+ Test case for the knowledge integration.
43
+ """
44
+ return self.method_code[self.test_case]
45
+
46
+ def get_internal_function(self):
47
+ """
48
+ All other functions of the core function.
49
+ """
50
+ internal_code = ""
51
+ for name, code in self.method_code.items():
52
+ if name not in [self.core_function, self.test_case]:
53
+ internal_code += f"{code}\n"
54
+ return internal_code
55
+
56
+
57
+ def get_function_code(self, function_name):
58
+ function = getattr(self, function_name, None)
59
+ if function is None:
60
+ logger.warning("Method not found.")
61
+ else:
62
+ inspect_function = inspect.getsource(function)
63
+ return inspect_function.replace('self,', '').replace('self.', '').replace('self','')
64
+
65
+ def get_all_function_code(self):
66
+ all_code = "```python"
67
+ for name, code in self.method_code.items():
68
+ all_code += f"\n{code}\n"
69
+ return all_code+"```"
70
+
71
+ def get_all_function(self):
72
+ methods = inspect.getmembers(self, predicate=inspect.ismethod)
73
+ self.method_code = {name: self.get_function_code(name) for name, _ in methods if name not in ['__init__', 'get_all_function', 'get_all_function_code', 'get_core_function', 'get_function_code', 'get_test_case', 'get_internal_function', 'get_fixed_function']}
74
+ return self.method_code
75
+
76
+ def get_fix_function(self):
77
+ return self.method_code
78
+
79
+ def remove_outer_indentation(code_str): # delete the outer indentation
80
+ lines = code_str.splitlines()
81
+ non_empty_lines = [line for line in lines if line.strip()]
82
+
83
+ if not non_empty_lines:
84
+ return code_str
85
+ min_indent = min(len(line) - len(line.lstrip()) for line in non_empty_lines)
86
+
87
+ aligned_lines = [line[min_indent:] for line in lines]
88
+
89
+ return '\n'.join(aligned_lines)
90
+
91
+
92
+ if __name__ == '__main__':
93
+ kwn = knw()
94
+ # print(kwn.get_entrance_function()) #todo : problem in indent
95
+ print(kwn.get_all_function_code())
96
+ # print(instantiate_subclasses(knw))
knw_in.py ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import sys
3
+ sys.path.append('/Users/stephensun/Desktop/pypro/LAMBDA/knowledge_integration')
4
+ from sentence_transformers import SentenceTransformer, util
5
+ import numpy as np
6
+ # from knw import KNW_INJECTION, knowledge_injection
7
+ from prompt_engineering.prompts import PMT_KNW_IN_CORE, PMT_KNW_IN_FULL
8
+ from knowledge_integration.nearest_correlation_matrix import nearest_correlation_matrix
9
+ from knowledge_integration.nn_network import nn_networks
10
+ from knowledge_integration.pami import pattern_mining
11
+ from kernel import execute
12
+
13
+
14
+ KNW_INJECTION = {}
15
+
16
+ def knowledge_register():
17
+ ncm = nearest_correlation_matrix()
18
+ ncm_key = ncm.name+ncm.description
19
+ KNW_INJECTION[ncm_key] = ncm
20
+ nnn = nn_networks()
21
+ nnn_key = nnn.name+nnn.description
22
+ KNW_INJECTION[nnn_key] = nnn
23
+ pami = pattern_mining()
24
+ pami_key = pami.name+pami.description
25
+ KNW_INJECTION[pami_key] = pami
26
+
27
+
28
+
29
+ # 初始化句子嵌入模型
30
+ model = SentenceTransformer('all-MiniLM-L6-v2')
31
+
32
+ def search_knowledge(user_input, knowledge_embeddings, knowledge_keys):
33
+
34
+ input_embedding = model.encode(user_input, convert_to_tensor=True) #embeding
35
+ # similarity
36
+ similarities_list = util.pytorch_cos_sim(input_embedding, knowledge_embeddings)
37
+ if torch.max(similarities_list) > 0.3:
38
+
39
+ best_match_idx = np.argmax(similarities_list.cpu())
40
+ best_match_key = knowledge_keys[best_match_idx]
41
+ else:
42
+ best_match_key = False
43
+ return (best_match_key, KNW_INJECTION[best_match_key]) if best_match_key else (False, None)
44
+
45
+
46
+ def format_code_snaps(knw, kernel):
47
+ if knw.mode == 'full':
48
+ core_code = knw.get_core_function()
49
+ return PMT_KNW_IN_FULL.format(code=core_code)
50
+ elif knw.mode == 'core':
51
+ core_code = knw.get_core_function()
52
+ runnable_code = knw.get_runnable_function()
53
+ print("Knowledge_integration: core mode, runnable result: ", execute(runnable_code,kernel))
54
+ retri_knw = PMT_KNW_IN_CORE.format(core=core_code, runnable=runnable_code)
55
+ return retri_knw
56
+ else:
57
+ raise ValueError(f"Invalid mode: {knw.mode}, please choose from ['full', 'core'].")
58
+ # test_case = knw.get_test_case()
59
+ # return KNOWLEDGE_INJECTION_PMT_FIXED.format(test_case=test_case)
60
+
61
+
62
+ def retrieval_knowledge(instruction, kernel): # return code_snaps and mode: 'full' or runnable code in 'core'. Nothing retrieval, return None
63
+ knowledge_register()
64
+ knowledge_keys = list(KNW_INJECTION.keys())
65
+ knowledge_embeddings = model.encode(knowledge_keys, convert_to_tensor=True)
66
+ best_key, best_knw_object = search_knowledge(instruction, knowledge_embeddings, knowledge_keys)
67
+ if best_key:
68
+ return format_code_snaps(best_knw_object, kernel)
69
+ else:
70
+ return None
71
+
72
+ # def execute_runnable(code):
73
+ # res_type, res = my_app.conv.run_code(code)
74
+
75
+
76
+ if __name__ == '__main__':
77
+ # knowledge_register()
78
+ # knowledge_keys = list(KNW_INJECTION.keys())
79
+ # knowledge_embeddings = model.encode(knowledge_keys, convert_to_tensor=True)
80
+ # user_input = "calculate nearest correlation matrix"
81
+ # best_key, best_knw_object = search_knowledge(user_input)
82
+ # print(best_key,best_knw_object)
83
+ # print(f"Best match key: {best_key}")
84
+ # print(format_code_snaps(best_knw_object))
85
+ #print(retrieval_knowledge("calculate nearest correlation matrix", 'full'))
86
+ print(retrieval_knowledge("Train a fixed points of nonnegative neural networks. Set parameters: networks: nn_sigmoid, learning rate: 5e-3, epochs: 30, wd: 0, b: 64"))
87
+ #print(retrieval_knowledge("Use pattern mining to find frequent patterns in the dataset. Set parameters: fileURL: https://u-aizu.ac.jp/~udayrage/datasets/transactionalDatabases/Transactional_T10I4D100K.csv, minSup: 300."))
lambda_utils.py ADDED
@@ -0,0 +1,320 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+ from typing import Tuple, Any
3
+
4
+
5
+ def extract_code_in_one_cell(text: str, lang) -> tuple[bool, Any]: #
6
+ pattern = r'```{lang}([^\n]*)(.*?)```'.format(lang=lang)
7
+ matches = re.findall(pattern, text, re.DOTALL)
8
+ if len(matches)>1:
9
+ code_blocks = ''
10
+ for match in matches:
11
+ code_block = match[1]
12
+ code_blocks += code_block
13
+ return True, code_blocks
14
+ elif len(matches):
15
+ code_block = matches[-1]
16
+ #if 'python' in code_block[0]:
17
+ return True, code_block[1]
18
+ else:
19
+ return False, ''
20
+
21
+
22
+ def extract_code(text: str) -> tuple[bool, Any]:
23
+ pattern = r'```python([^\n]*)(.*?)```'
24
+ matches = re.findall(pattern, text, re.DOTALL)
25
+ if len(matches)>1:
26
+ code_blocks = ''
27
+ for match in matches:
28
+ code_block = match[1]
29
+ code_blocks += code_block
30
+ return True, code_blocks
31
+ elif len(matches):
32
+ code_block = matches[-1]
33
+ #if 'python' in code_block[0]:
34
+ return True, code_block[1]
35
+ else:
36
+ return False, ''
37
+
38
+ def remove_code_blocks(text):
39
+ pattern = r'(```.*?```)'
40
+ text = re.sub(pattern, '', text, flags=re.DOTALL)
41
+ text = text.replace("Execution result:", '')
42
+
43
+ return text
44
+
45
+ def check_msg_ua(msg):
46
+ corrected_lst = []
47
+ for item in msg:
48
+ if corrected_lst and corrected_lst[-1] == item:
49
+ continue
50
+ corrected_lst.append(item)
51
+ if corrected_lst and corrected_lst[-1] != 'assistant':
52
+ corrected_lst.append('assistant')
53
+ return corrected_lst
54
+
55
+
56
+
57
+ if __name__ == '__main__':
58
+ pmt = """
59
+ Retrieval: The retriever found the following pieces of code cloud address the problem. All functions are well-defined and have been executed in the back-end. So, you should directly refer to the core code and modify it as appropriate instead of re-implement any function in runnable function.
60
+
61
+ Core code (All functions have been well-defined in the runnable function):
62
+ ```core_function
63
+
64
+ args = argparse.ArgumentParser()
65
+ args.net = 'nn_sigmoid'
66
+ args.lr = 5e-3
67
+ args.epochs = 30
68
+ args.wd = 0
69
+ args.b = 64
70
+ train_nn_network(args)
71
+
72
+ ```
73
+
74
+ Runnable function (Have been executed in back-end):
75
+ ```runnable
76
+
77
+ import numpy as np
78
+ import scipy.io as sio
79
+ import scipy
80
+ import sys
81
+ import time
82
+ import argparse
83
+ import torch
84
+ import math
85
+ from torch import nn
86
+ from torch.nn.utils.parametrizations import spectral_norm
87
+ from pathlib import Path
88
+ from torch import optim
89
+ from torch.utils.data import DataLoader
90
+ from torchvision import transforms
91
+ from torchvision import datasets
92
+ from tqdm import tqdm
93
+
94
+ def initialize_weights(tensor):
95
+ return tensor.uniform_() * math.sqrt(0.25 / (tensor.shape[0] + tensor.shape[1]))
96
+
97
+ class _RRAutoencoder(nn.Module):
98
+ def __init__(self):
99
+ super().__init__()
100
+ self.linear_1 = nn.Linear(784, 200)
101
+ self.linear_2 = nn.Linear(200, 784)
102
+ self.encoder = self.linear_1
103
+ self.decoder = self.linear_2
104
+
105
+ def forward(self, x):
106
+ x = self.encoder(x)
107
+ x = self.decoder(x)
108
+
109
+ return x
110
+
111
+ def clamp(self):
112
+ pass
113
+
114
+ class _NNAutoencoder(_RRAutoencoder):
115
+ def __init__(self):
116
+ super().__init__()
117
+ self.linear_1.bias.data.zero_()
118
+ self.linear_2.bias.data.zero_()
119
+ self.linear_1.weight = nn.Parameter(
120
+ initialize_weights(self.linear_1.weight.data)
121
+ )
122
+ self.linear_2.weight = nn.Parameter(
123
+ initialize_weights(self.linear_2.weight.data)
124
+ )
125
+
126
+ def clamp(self):
127
+ self.linear_1.weight.data.clamp_(min=0)
128
+ self.linear_2.weight.data.clamp_(min=0)
129
+ self.linear_1.bias.data.clamp_(min=0)
130
+ self.linear_2.bias.data.clamp_(min=0)
131
+
132
+ class _PNAutoencoder(_NNAutoencoder):
133
+ def clamp(self):
134
+ self.linear_1.weight.data.clamp_(min=1e-3)
135
+ self.linear_2.weight.data.clamp_(min=1e-3)
136
+ self.linear_1.bias.data.clamp_(min=0)
137
+ self.linear_2.bias.data.clamp_(min=0)
138
+
139
+ class _NRAutoencoder(_NNAutoencoder):
140
+ def clamp(self):
141
+ self.linear_1.weight.data.clamp_(min=0)
142
+ self.linear_2.weight.data.clamp_(min=0)
143
+
144
+ class SigmoidNNAutoencoder(_NNAutoencoder):
145
+ def __init__(self):
146
+ super().__init__()
147
+ self.encoder = nn.Sequential(self.linear_1, nn.Sigmoid())
148
+ self.decoder = nn.Sequential(self.linear_2, nn.Sigmoid())
149
+
150
+ class TanhNNAutoencoder(_NNAutoencoder):
151
+ def __init__(self):
152
+ super().__init__()
153
+ self.encoder = nn.Sequential(self.linear_1, nn.Tanh())
154
+ self.decoder = nn.Sequential(self.linear_2, nn.Tanh())
155
+
156
+ class TanhPNAutoencoder(_PNAutoencoder):
157
+ def __init__(self):
158
+ super().__init__()
159
+ self.encoder = nn.Sequential(self.linear_1, nn.Tanh())
160
+ self.decoder = nn.Sequential(self.linear_2, nn.Tanh())
161
+
162
+ class ReLUNNAutoencoder(_NNAutoencoder):
163
+ def __init__(self):
164
+ super().__init__()
165
+ self.linear_1 = spectral_norm(self.linear_1)
166
+ self.linear_2 = spectral_norm(self.linear_2)
167
+ self.encoder = nn.Sequential(self.linear_1, nn.ReLU())
168
+ self.decoder = nn.Sequential(self.linear_2, nn.ReLU())
169
+
170
+ def clamp(self):
171
+ self.linear_1.parametrizations.weight.original.data.clamp_(min=0)
172
+ self.linear_2.parametrizations.weight.original.data.clamp_(min=0)
173
+ self.linear_1.bias.data.clamp_(min=0)
174
+ self.linear_2.bias.data.clamp_(min=0)
175
+
176
+ class ReLUPNAutoencoder(_PNAutoencoder):
177
+ def __init__(self):
178
+ super().__init__()
179
+ self.linear_1 = spectral_norm(self.linear_1)
180
+ self.linear_2 = spectral_norm(self.linear_2)
181
+ self.encoder = nn.Sequential(self.linear_1, nn.ReLU())
182
+ self.decoder = nn.Sequential(self.linear_2, nn.ReLU())
183
+
184
+ def clamp(self):
185
+ self.linear_1.parametrizations.weight.original.data.clamp_(min=1e-3)
186
+ self.linear_2.parametrizations.weight.original.data.clamp_(min=1e-3)
187
+ self.linear_1.bias.data.clamp_(min=0)
188
+ self.linear_2.bias.data.clamp_(min=0)
189
+
190
+
191
+ class TanhSwishNNAutoencoder(_NNAutoencoder):
192
+ def __init__(self):
193
+ super().__init__()
194
+ self.encoder = nn.Sequential(self.linear_1, nn.Tanh())
195
+ self.decoder = nn.Sequential(self.linear_2, nn.SiLU())
196
+
197
+ class ReLUSigmoidNRAutoencoder(_NRAutoencoder):
198
+ def __init__(self):
199
+ super().__init__()
200
+ self.encoder = nn.Sequential(self.linear_1, nn.ReLU())
201
+ self.decoder = nn.Sequential(self.linear_2, nn.Sigmoid())
202
+
203
+ class ReLUSigmoidRRAutoencoder(_RRAutoencoder):
204
+ def __init__(self):
205
+ super().__init__()
206
+ self.encoder = nn.Sequential(self.linear_1, nn.ReLU())
207
+ self.decoder = nn.Sequential(self.linear_2, nn.Sigmoid())
208
+
209
+ def get_network(name):
210
+ match name:
211
+ case "nn_sigmoid":
212
+ return SigmoidNNAutoencoder()
213
+ case "nn_tanh":
214
+ return TanhNNAutoencoder()
215
+ case "pn_tanh":
216
+ return TanhPNAutoencoder()
217
+ case "nn_relu":
218
+ return ReLUNNAutoencoder()
219
+ case "pn_relu":
220
+ return ReLUPNAutoencoder()
221
+ case "nn_tanh_swish":
222
+ return TanhSwishNNAutoencoder()
223
+ case "nr_relu_sigmoid":
224
+ return ReLUSigmoidNRAutoencoder()
225
+ case "rr_relu_sigmoid":
226
+ return ReLUSigmoidRRAutoencoder()
227
+ case _:
228
+ raise NotImplementedError(
229
+ f"Autoencoder of name '{name}' currently is not supported"
230
+ )
231
+
232
+ class AverageMeter(object):
233
+
234
+ def __init__(self):
235
+ self.reset()
236
+
237
+ def reset(self):
238
+ self.val = 0
239
+ self.avg = 0
240
+ self.sum = 0
241
+ self.count = 0
242
+
243
+ def update(self, val, n=1):
244
+ self.val = val
245
+ self.sum += val * n
246
+ self.count += n
247
+ self.avg = self.sum / self.count
248
+
249
+ def epoch(loader, model, device, criterion, opt=None):
250
+ losses = AverageMeter()
251
+
252
+ if opt is None:
253
+ model.eval()
254
+ else:
255
+ model.train()
256
+ for inputs, _ in tqdm(loader, leave=False):
257
+ inputs = inputs.view(-1, 28 * 28).to(device)
258
+ outputs = model(inputs)
259
+ loss = criterion(outputs, inputs)
260
+ if opt:
261
+ opt.zero_grad(set_to_none=True)
262
+ loss.backward()
263
+ opt.step()
264
+ model.clamp()
265
+
266
+ losses.update(loss.item(), inputs.size(0))
267
+
268
+ return losses.avg
269
+
270
+ def train_nn_network(args):
271
+ # p = Path(__file__)
272
+ # weights_path = f"{p.parent}/weights"
273
+ # Path(weights_path).mkdir(parents=True, exist_ok=True)
274
+
275
+ device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
276
+ model = get_network(args.net)
277
+ model.to(device)
278
+ mnist_train = datasets.MNIST(
279
+ ".", train=True, download=True, transform=transforms.ToTensor()
280
+ )
281
+ mnist_test = datasets.MNIST(
282
+ ".", train=False, download=True, transform=transforms.ToTensor()
283
+ )
284
+ train_loader = DataLoader(
285
+ mnist_train, batch_size=args.b, shuffle=True, num_workers=4, pin_memory=True
286
+ )
287
+ test_loader = DataLoader(
288
+ mnist_test, batch_size=args.b, shuffle=False, num_workers=4, pin_memory=True
289
+ )
290
+ opt = optim.Adam(model.parameters(), lr=args.lr, weight_decay=args.wd)
291
+ criterion = nn.MSELoss()
292
+
293
+ best_loss = None
294
+
295
+ for i in range(1, args.epochs + 1):
296
+ train_loss = epoch(train_loader, model, device, criterion, opt)
297
+ test_loss = epoch(test_loader, model, device, criterion)
298
+ if best_loss is None or best_loss > test_loss:
299
+ best_loss = test_loss
300
+ # torch.save(model.state_dict(), f"{weights_path}/{args.net}.pth")
301
+
302
+ print(f"Epoch: {i} | Train Loss: {train_loss:.4f} | Test Loss: {test_loss:.4f}")
303
+
304
+
305
+ ```
306
+ Your modified code:
307
+ ```python
308
+ import argparse
309
+
310
+ args = argparse.ArgumentParser()
311
+ args.net = 'nn_sigmoid'
312
+ args.lr = 5e-3
313
+ args.epochs = 5 # Changed epochs to 5 as per the request
314
+ args.wd = 0
315
+ args.b = 64
316
+ train_nn_network(args)
317
+ ```
318
+ """
319
+ print(extract_code(pmt))
320
+ print(extract_code_in_one_cell(pmt,'python'))
myenv/Lib/site-packages/pip-23.2.1.dist-info/AUTHORS.txt ADDED
@@ -0,0 +1,738 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ @Switch01
2
+ A_Rog
3
+ Aakanksha Agrawal
4
+ Abhinav Sagar
5
+ ABHYUDAY PRATAP SINGH
6
+ abs51295
7
+ AceGentile
8
+ Adam Chainz
9
+ Adam Tse
10
+ Adam Wentz
11
+ admin
12
+ Adrien Morison
13
+ ahayrapetyan
14
+ Ahilya
15
+ AinsworthK
16
+ Akash Srivastava
17
+ Alan Yee
18
+ Albert Tugushev
19
+ Albert-Guan
20
+ albertg
21
+ Alberto Sottile
22
+ Aleks Bunin
23
+ Alethea Flowers
24
+ Alex Gaynor
25
+ Alex Grönholm
26
+ Alex Hedges
27
+ Alex Loosley
28
+ Alex Morega
29
+ Alex Stachowiak
30
+ Alexander Shtyrov
31
+ Alexandre Conrad
32
+ Alexey Popravka
33
+ Alli
34
+ Ami Fischman
35
+ Ananya Maiti
36
+ Anatoly Techtonik
37
+ Anders Kaseorg
38
+ Andre Aguiar
39
+ Andreas Lutro
40
+ Andrei Geacar
41
+ Andrew Gaul
42
+ Andrew Shymanel
43
+ Andrey Bienkowski
44
+ Andrey Bulgakov
45
+ Andrés Delfino
46
+ Andy Freeland
47
+ Andy Kluger
48
+ Ani Hayrapetyan
49
+ Aniruddha Basak
50
+ Anish Tambe
51
+ Anrs Hu
52
+ Anthony Sottile
53
+ Antoine Musso
54
+ Anton Ovchinnikov
55
+ Anton Patrushev
56
+ Antonio Alvarado Hernandez
57
+ Antony Lee
58
+ Antti Kaihola
59
+ Anubhav Patel
60
+ Anudit Nagar
61
+ Anuj Godase
62
+ AQNOUCH Mohammed
63
+ AraHaan
64
+ Arindam Choudhury
65
+ Armin Ronacher
66
+ Artem
67
+ Arun Babu Neelicattu
68
+ Ashley Manton
69
+ Ashwin Ramaswami
70
+ atse
71
+ Atsushi Odagiri
72
+ Avinash Karhana
73
+ Avner Cohen
74
+ Awit (Ah-Wit) Ghirmai
75
+ Baptiste Mispelon
76
+ Barney Gale
77
+ barneygale
78
+ Bartek Ogryczak
79
+ Bastian Venthur
80
+ Ben Bodenmiller
81
+ Ben Darnell
82
+ Ben Hoyt
83
+ Ben Mares
84
+ Ben Rosser
85
+ Bence Nagy
86
+ Benjamin Peterson
87
+ Benjamin VanEvery
88
+ Benoit Pierre
89
+ Berker Peksag
90
+ Bernard
91
+ Bernard Tyers
92
+ Bernardo B. Marques
93
+ Bernhard M. Wiedemann
94
+ Bertil Hatt
95
+ Bhavam Vidyarthi
96
+ Blazej Michalik
97
+ Bogdan Opanchuk
98
+ BorisZZZ
99
+ Brad Erickson
100
+ Bradley Ayers
101
+ Brandon L. Reiss
102
+ Brandt Bucher
103
+ Brett Randall
104
+ Brett Rosen
105
+ Brian Cristante
106
+ Brian Rosner
107
+ briantracy
108
+ BrownTruck
109
+ Bruno Oliveira
110
+ Bruno Renié
111
+ Bruno S
112
+ Bstrdsmkr
113
+ Buck Golemon
114
+ burrows
115
+ Bussonnier Matthias
116
+ bwoodsend
117
+ c22
118
+ Caleb Martinez
119
+ Calvin Smith
120
+ Carl Meyer
121
+ Carlos Liam
122
+ Carol Willing
123
+ Carter Thayer
124
+ Cass
125
+ Chandrasekhar Atina
126
+ Chih-Hsuan Yen
127
+ Chris Brinker
128
+ Chris Hunt
129
+ Chris Jerdonek
130
+ Chris Kuehl
131
+ Chris McDonough
132
+ Chris Pawley
133
+ Chris Pryer
134
+ Chris Wolfe
135
+ Christian Clauss
136
+ Christian Heimes
137
+ Christian Oudard
138
+ Christoph Reiter
139
+ Christopher Hunt
140
+ Christopher Snyder
141
+ cjc7373
142
+ Clark Boylan
143
+ Claudio Jolowicz
144
+ Clay McClure
145
+ Cody
146
+ Cody Soyland
147
+ Colin Watson
148
+ Collin Anderson
149
+ Connor Osborn
150
+ Cooper Lees
151
+ Cooper Ry Lees
152
+ Cory Benfield
153
+ Cory Wright
154
+ Craig Kerstiens
155
+ Cristian Sorinel
156
+ Cristina
157
+ Cristina Muñoz
158
+ Curtis Doty
159
+ cytolentino
160
+ Daan De Meyer
161
+ Damian
162
+ Damian Quiroga
163
+ Damian Shaw
164
+ Dan Black
165
+ Dan Savilonis
166
+ Dan Sully
167
+ Dane Hillard
168
+ daniel
169
+ Daniel Collins
170
+ Daniel Hahler
171
+ Daniel Holth
172
+ Daniel Jost
173
+ Daniel Katz
174
+ Daniel Shaulov
175
+ Daniele Esposti
176
+ Daniele Nicolodi
177
+ Daniele Procida
178
+ Daniil Konovalenko
179
+ Danny Hermes
180
+ Danny McClanahan
181
+ Darren Kavanagh
182
+ Dav Clark
183
+ Dave Abrahams
184
+ Dave Jones
185
+ David Aguilar
186
+ David Black
187
+ David Bordeynik
188
+ David Caro
189
+ David D Lowe
190
+ David Evans
191
+ David Hewitt
192
+ David Linke
193
+ David Poggi
194
+ David Pursehouse
195
+ David Runge
196
+ David Tucker
197
+ David Wales
198
+ Davidovich
199
+ Deepak Sharma
200
+ Deepyaman Datta
201
+ Denise Yu
202
+ derwolfe
203
+ Desetude
204
+ Devesh Kumar Singh
205
+ Diego Caraballo
206
+ Diego Ramirez
207
+ DiegoCaraballo
208
+ Dimitri Merejkowsky
209
+ Dimitri Papadopoulos
210
+ Dirk Stolle
211
+ Dmitry Gladkov
212
+ Dmitry Volodin
213
+ Domen Kožar
214
+ Dominic Davis-Foster
215
+ Donald Stufft
216
+ Dongweiming
217
+ doron zarhi
218
+ Dos Moonen
219
+ Douglas Thor
220
+ DrFeathers
221
+ Dustin Ingram
222
+ Dwayne Bailey
223
+ Ed Morley
224
+ Edgar Ramírez
225
+ Ee Durbin
226
+ Eitan Adler
227
+ ekristina
228
+ elainechan
229
+ Eli Schwartz
230
+ Elisha Hollander
231
+ Ellen Marie Dash
232
+ Emil Burzo
233
+ Emil Styrke
234
+ Emmanuel Arias
235
+ Endoh Takanao
236
+ enoch
237
+ Erdinc Mutlu
238
+ Eric Cousineau
239
+ Eric Gillingham
240
+ Eric Hanchrow
241
+ Eric Hopper
242
+ Erik M. Bray
243
+ Erik Rose
244
+ Erwin Janssen
245
+ Eugene Vereshchagin
246
+ everdimension
247
+ Federico
248
+ Felipe Peter
249
+ Felix Yan
250
+ fiber-space
251
+ Filip Kokosiński
252
+ Filipe Laíns
253
+ Finn Womack
254
+ finnagin
255
+ Florian Briand
256
+ Florian Rathgeber
257
+ Francesco
258
+ Francesco Montesano
259
+ Frost Ming
260
+ Gabriel Curio
261
+ Gabriel de Perthuis
262
+ Garry Polley
263
+ gavin
264
+ gdanielson
265
+ Geoffrey Sneddon
266
+ George Song
267
+ Georgi Valkov
268
+ Georgy Pchelkin
269
+ ghost
270
+ Giftlin Rajaiah
271
+ gizmoguy1
272
+ gkdoc
273
+ Godefroid Chapelle
274
+ Gopinath M
275
+ GOTO Hayato
276
+ gousaiyang
277
+ gpiks
278
+ Greg Roodt
279
+ Greg Ward
280
+ Guilherme Espada
281
+ Guillaume Seguin
282
+ gutsytechster
283
+ Guy Rozendorn
284
+ Guy Tuval
285
+ gzpan123
286
+ Hanjun Kim
287
+ Hari Charan
288
+ Harsh Vardhan
289
+ harupy
290
+ Harutaka Kawamura
291
+ hauntsaninja
292
+ Henrich Hartzer
293
+ Henry Schreiner
294
+ Herbert Pfennig
295
+ Holly Stotelmyer
296
+ Honnix
297
+ Hsiaoming Yang
298
+ Hugo Lopes Tavares
299
+ Hugo van Kemenade
300
+ Hugues Bruant
301
+ Hynek Schlawack
302
+ Ian Bicking
303
+ Ian Cordasco
304
+ Ian Lee
305
+ Ian Stapleton Cordasco
306
+ Ian Wienand
307
+ Igor Kuzmitshov
308
+ Igor Sobreira
309
+ Ilan Schnell
310
+ Illia Volochii
311
+ Ilya Baryshev
312
+ Inada Naoki
313
+ Ionel Cristian Mărieș
314
+ Ionel Maries Cristian
315
+ Ivan Pozdeev
316
+ Jacob Kim
317
+ Jacob Walls
318
+ Jaime Sanz
319
+ jakirkham
320
+ Jakub Kuczys
321
+ Jakub Stasiak
322
+ Jakub Vysoky
323
+ Jakub Wilk
324
+ James Cleveland
325
+ James Curtin
326
+ James Firth
327
+ James Gerity
328
+ James Polley
329
+ Jan Pokorný
330
+ Jannis Leidel
331
+ Jarek Potiuk
332
+ jarondl
333
+ Jason Curtis
334
+ Jason R. Coombs
335
+ JasonMo
336
+ JasonMo1
337
+ Jay Graves
338
+ Jean-Christophe Fillion-Robin
339
+ Jeff Barber
340
+ Jeff Dairiki
341
+ Jelmer Vernooij
342
+ jenix21
343
+ Jeremy Stanley
344
+ Jeremy Zafran
345
+ Jesse Rittner
346
+ Jiashuo Li
347
+ Jim Fisher
348
+ Jim Garrison
349
+ Jiun Bae
350
+ Jivan Amara
351
+ Joe Bylund
352
+ Joe Michelini
353
+ John Paton
354
+ John T. Wodder II
355
+ John-Scott Atlakson
356
+ johnthagen
357
+ Jon Banafato
358
+ Jon Dufresne
359
+ Jon Parise
360
+ Jonas Nockert
361
+ Jonathan Herbert
362
+ Joonatan Partanen
363
+ Joost Molenaar
364
+ Jorge Niedbalski
365
+ Joseph Bylund
366
+ Joseph Long
367
+ Josh Bronson
368
+ Josh Hansen
369
+ Josh Schneier
370
+ Juan Luis Cano Rodríguez
371
+ Juanjo Bazán
372
+ Judah Rand
373
+ Julian Berman
374
+ Julian Gethmann
375
+ Julien Demoor
376
+ Jussi Kukkonen
377
+ jwg4
378
+ Jyrki Pulliainen
379
+ Kai Chen
380
+ Kai Mueller
381
+ Kamal Bin Mustafa
382
+ kasium
383
+ kaustav haldar
384
+ keanemind
385
+ Keith Maxwell
386
+ Kelsey Hightower
387
+ Kenneth Belitzky
388
+ Kenneth Reitz
389
+ Kevin Burke
390
+ Kevin Carter
391
+ Kevin Frommelt
392
+ Kevin R Patterson
393
+ Kexuan Sun
394
+ Kit Randel
395
+ Klaas van Schelven
396
+ KOLANICH
397
+ kpinc
398
+ Krishna Oza
399
+ Kumar McMillan
400
+ Kyle Persohn
401
+ lakshmanaram
402
+ Laszlo Kiss-Kollar
403
+ Laurent Bristiel
404
+ Laurent LAPORTE
405
+ Laurie O
406
+ Laurie Opperman
407
+ layday
408
+ Leon Sasson
409
+ Lev Givon
410
+ Lincoln de Sousa
411
+ Lipis
412
+ lorddavidiii
413
+ Loren Carvalho
414
+ Lucas Cimon
415
+ Ludovic Gasc
416
+ Lukas Juhrich
417
+ Luke Macken
418
+ Luo Jiebin
419
+ luojiebin
420
+ luz.paz
421
+ László Kiss Kollár
422
+ M00nL1ght
423
+ Marc Abramowitz
424
+ Marc Tamlyn
425
+ Marcus Smith
426
+ Mariatta
427
+ Mark Kohler
428
+ Mark Williams
429
+ Markus Hametner
430
+ Martey Dodoo
431
+ Martin Fischer
432
+ Martin Häcker
433
+ Martin Pavlasek
434
+ Masaki
435
+ Masklinn
436
+ Matej Stuchlik
437
+ Mathew Jennings
438
+ Mathieu Bridon
439
+ Mathieu Kniewallner
440
+ Matt Bacchi
441
+ Matt Good
442
+ Matt Maker
443
+ Matt Robenolt
444
+ matthew
445
+ Matthew Einhorn
446
+ Matthew Feickert
447
+ Matthew Gilliard
448
+ Matthew Iversen
449
+ Matthew Treinish
450
+ Matthew Trumbell
451
+ Matthew Willson
452
+ Matthias Bussonnier
453
+ mattip
454
+ Maurits van Rees
455
+ Max W Chase
456
+ Maxim Kurnikov
457
+ Maxime Rouyrre
458
+ mayeut
459
+ mbaluna
460
+ mdebi
461
+ memoselyk
462
+ meowmeowcat
463
+ Michael
464
+ Michael Aquilina
465
+ Michael E. Karpeles
466
+ Michael Klich
467
+ Michael Mintz
468
+ Michael Williamson
469
+ michaelpacer
470
+ Michał Górny
471
+ Mickaël Schoentgen
472
+ Miguel Araujo Perez
473
+ Mihir Singh
474
+ Mike
475
+ Mike Hendricks
476
+ Min RK
477
+ MinRK
478
+ Miro Hrončok
479
+ Monica Baluna
480
+ montefra
481
+ Monty Taylor
482
+ Muha Ajjan‮
483
+ Nadav Wexler
484
+ Nahuel Ambrosini
485
+ Nate Coraor
486
+ Nate Prewitt
487
+ Nathan Houghton
488
+ Nathaniel J. Smith
489
+ Nehal J Wani
490
+ Neil Botelho
491
+ Nguyễn Gia Phong
492
+ Nicholas Serra
493
+ Nick Coghlan
494
+ Nick Stenning
495
+ Nick Timkovich
496
+ Nicolas Bock
497
+ Nicole Harris
498
+ Nikhil Benesch
499
+ Nikhil Ladha
500
+ Nikita Chepanov
501
+ Nikolay Korolev
502
+ Nipunn Koorapati
503
+ Nitesh Sharma
504
+ Niyas Sait
505
+ Noah
506
+ Noah Gorny
507
+ Nowell Strite
508
+ NtaleGrey
509
+ nvdv
510
+ OBITORASU
511
+ Ofek Lev
512
+ ofrinevo
513
+ Oliver Freund
514
+ Oliver Jeeves
515
+ Oliver Mannion
516
+ Oliver Tonnhofer
517
+ Olivier Girardot
518
+ Olivier Grisel
519
+ Ollie Rutherfurd
520
+ OMOTO Kenji
521
+ Omry Yadan
522
+ onlinejudge95
523
+ Oren Held
524
+ Oscar Benjamin
525
+ Oz N Tiram
526
+ Pachwenko
527
+ Patrick Dubroy
528
+ Patrick Jenkins
529
+ Patrick Lawson
530
+ patricktokeeffe
531
+ Patrik Kopkan
532
+ Paul Kehrer
533
+ Paul Moore
534
+ Paul Nasrat
535
+ Paul Oswald
536
+ Paul van der Linden
537
+ Paulus Schoutsen
538
+ Pavel Safronov
539
+ Pavithra Eswaramoorthy
540
+ Pawel Jasinski
541
+ Paweł Szramowski
542
+ Pekka Klärck
543
+ Peter Gessler
544
+ Peter Lisák
545
+ Peter Waller
546
+ petr-tik
547
+ Phaneendra Chiruvella
548
+ Phil Elson
549
+ Phil Freo
550
+ Phil Pennock
551
+ Phil Whelan
552
+ Philip Jägenstedt
553
+ Philip Molloy
554
+ Philippe Ombredanne
555
+ Pi Delport
556
+ Pierre-Yves Rofes
557
+ Pieter Degroote
558
+ pip
559
+ Prabakaran Kumaresshan
560
+ Prabhjyotsing Surjit Singh Sodhi
561
+ Prabhu Marappan
562
+ Pradyun Gedam
563
+ Prashant Sharma
564
+ Pratik Mallya
565
+ pre-commit-ci[bot]
566
+ Preet Thakkar
567
+ Preston Holmes
568
+ Przemek Wrzos
569
+ Pulkit Goyal
570
+ q0w
571
+ Qiangning Hong
572
+ Quentin Lee
573
+ Quentin Pradet
574
+ R. David Murray
575
+ Rafael Caricio
576
+ Ralf Schmitt
577
+ Razzi Abuissa
578
+ rdb
579
+ Reece Dunham
580
+ Remi Rampin
581
+ Rene Dudfield
582
+ Riccardo Magliocchetti
583
+ Riccardo Schirone
584
+ Richard Jones
585
+ Richard Si
586
+ Ricky Ng-Adam
587
+ Rishi
588
+ RobberPhex
589
+ Robert Collins
590
+ Robert McGibbon
591
+ Robert Pollak
592
+ Robert T. McGibbon
593
+ robin elisha robinson
594
+ Roey Berman
595
+ Rohan Jain
596
+ Roman Bogorodskiy
597
+ Roman Donchenko
598
+ Romuald Brunet
599
+ ronaudinho
600
+ Ronny Pfannschmidt
601
+ Rory McCann
602
+ Ross Brattain
603
+ Roy Wellington Ⅳ
604
+ Ruairidh MacLeod
605
+ Russell Keith-Magee
606
+ Ryan Shepherd
607
+ Ryan Wooden
608
+ ryneeverett
609
+ Sachi King
610
+ Salvatore Rinchiera
611
+ sandeepkiran-js
612
+ Savio Jomton
613
+ schlamar
614
+ Scott Kitterman
615
+ Sean
616
+ seanj
617
+ Sebastian Jordan
618
+ Sebastian Schaetz
619
+ Segev Finer
620
+ SeongSoo Cho
621
+ Sergey Vasilyev
622
+ Seth Michael Larson
623
+ Seth Woodworth
624
+ Shantanu
625
+ shireenrao
626
+ Shivansh-007
627
+ Shlomi Fish
628
+ Shovan Maity
629
+ Simeon Visser
630
+ Simon Cross
631
+ Simon Pichugin
632
+ sinoroc
633
+ sinscary
634
+ snook92
635
+ socketubs
636
+ Sorin Sbarnea
637
+ Srinivas Nyayapati
638
+ Stavros Korokithakis
639
+ Stefan Scherfke
640
+ Stefano Rivera
641
+ Stephan Erb
642
+ Stephen Rosen
643
+ stepshal
644
+ Steve (Gadget) Barnes
645
+ Steve Barnes
646
+ Steve Dower
647
+ Steve Kowalik
648
+ Steven Myint
649
+ Steven Silvester
650
+ stonebig
651
+ Stéphane Bidoul
652
+ Stéphane Bidoul (ACSONE)
653
+ Stéphane Klein
654
+ Sumana Harihareswara
655
+ Surbhi Sharma
656
+ Sviatoslav Sydorenko
657
+ Swat009
658
+ Sylvain
659
+ Takayuki SHIMIZUKAWA
660
+ Taneli Hukkinen
661
+ tbeswick
662
+ Thiago
663
+ Thijs Triemstra
664
+ Thomas Fenzl
665
+ Thomas Grainger
666
+ Thomas Guettler
667
+ Thomas Johansson
668
+ Thomas Kluyver
669
+ Thomas Smith
670
+ Thomas VINCENT
671
+ Tim D. Smith
672
+ Tim Gates
673
+ Tim Harder
674
+ Tim Heap
675
+ tim smith
676
+ tinruufu
677
+ Tobias Hermann
678
+ Tom Forbes
679
+ Tom Freudenheim
680
+ Tom V
681
+ Tomas Hrnciar
682
+ Tomas Orsava
683
+ Tomer Chachamu
684
+ Tommi Enenkel | AnB
685
+ Tomáš Hrnčiar
686
+ Tony Beswick
687
+ Tony Narlock
688
+ Tony Zhaocheng Tan
689
+ TonyBeswick
690
+ toonarmycaptain
691
+ Toshio Kuratomi
692
+ toxinu
693
+ Travis Swicegood
694
+ Tushar Sadhwani
695
+ Tzu-ping Chung
696
+ Valentin Haenel
697
+ Victor Stinner
698
+ victorvpaulo
699
+ Vikram - Google
700
+ Viktor Szépe
701
+ Ville Skyttä
702
+ Vinay Sajip
703
+ Vincent Philippon
704
+ Vinicyus Macedo
705
+ Vipul Kumar
706
+ Vitaly Babiy
707
+ Vladimir Rutsky
708
+ W. Trevor King
709
+ Wil Tan
710
+ Wilfred Hughes
711
+ William Edwards
712
+ William ML Leslie
713
+ William T Olson
714
+ William Woodruff
715
+ Wilson Mo
716
+ wim glenn
717
+ Winson Luk
718
+ Wolfgang Maier
719
+ Wu Zhenyu
720
+ XAMES3
721
+ Xavier Fernandez
722
+ xoviat
723
+ xtreak
724
+ YAMAMOTO Takashi
725
+ Yen Chi Hsuan
726
+ Yeray Diaz Diaz
727
+ Yoval P
728
+ Yu Jian
729
+ Yuan Jing Vincent Yan
730
+ Yusuke Hayashi
731
+ Zearin
732
+ Zhiping Deng
733
+ ziebam
734
+ Zvezdan Petkovic
735
+ Łukasz Langa
736
+ Роман Донченко
737
+ Семён Марьясин
738
+ ‮rekcäH nitraM‮
myenv/Lib/site-packages/pip-23.2.1.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
myenv/Lib/site-packages/pip-23.2.1.dist-info/LICENSE.txt ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Copyright (c) 2008-present The pip developers (see AUTHORS.txt file)
2
+
3
+ Permission is hereby granted, free of charge, to any person obtaining
4
+ a copy of this software and associated documentation files (the
5
+ "Software"), to deal in the Software without restriction, including
6
+ without limitation the rights to use, copy, modify, merge, publish,
7
+ distribute, sublicense, and/or sell copies of the Software, and to
8
+ permit persons to whom the Software is furnished to do so, subject to
9
+ the following conditions:
10
+
11
+ The above copyright notice and this permission notice shall be
12
+ included in all copies or substantial portions of the Software.
13
+
14
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
15
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
17
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
18
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
19
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
20
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
myenv/Lib/site-packages/pip-23.2.1.dist-info/METADATA ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.1
2
+ Name: pip
3
+ Version: 23.2.1
4
+ Summary: The PyPA recommended tool for installing Python packages.
5
+ Home-page: https://pip.pypa.io/
6
+ Author: The pip developers
7
+ Author-email: [email protected]
8
+ License: MIT
9
+ Project-URL: Documentation, https://pip.pypa.io
10
+ Project-URL: Source, https://github.com/pypa/pip
11
+ Project-URL: Changelog, https://pip.pypa.io/en/stable/news/
12
+ Classifier: Development Status :: 5 - Production/Stable
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: License :: OSI Approved :: MIT License
15
+ Classifier: Topic :: Software Development :: Build Tools
16
+ Classifier: Programming Language :: Python
17
+ Classifier: Programming Language :: Python :: 3
18
+ Classifier: Programming Language :: Python :: 3 :: Only
19
+ Classifier: Programming Language :: Python :: 3.7
20
+ Classifier: Programming Language :: Python :: 3.8
21
+ Classifier: Programming Language :: Python :: 3.9
22
+ Classifier: Programming Language :: Python :: 3.10
23
+ Classifier: Programming Language :: Python :: 3.11
24
+ Classifier: Programming Language :: Python :: 3.12
25
+ Classifier: Programming Language :: Python :: Implementation :: CPython
26
+ Classifier: Programming Language :: Python :: Implementation :: PyPy
27
+ Requires-Python: >=3.7
28
+ License-File: LICENSE.txt
29
+ License-File: AUTHORS.txt
30
+
31
+ pip - The Python Package Installer
32
+ ==================================
33
+
34
+ .. image:: https://img.shields.io/pypi/v/pip.svg
35
+ :target: https://pypi.org/project/pip/
36
+
37
+ .. image:: https://readthedocs.org/projects/pip/badge/?version=latest
38
+ :target: https://pip.pypa.io/en/latest
39
+
40
+ pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes.
41
+
42
+ Please take a look at our documentation for how to install and use pip:
43
+
44
+ * `Installation`_
45
+ * `Usage`_
46
+
47
+ We release updates regularly, with a new version every 3 months. Find more details in our documentation:
48
+
49
+ * `Release notes`_
50
+ * `Release process`_
51
+
52
+ In pip 20.3, we've `made a big improvement to the heart of pip`_; `learn more`_. We want your input, so `sign up for our user experience research studies`_ to help us do it right.
53
+
54
+ **Note**: pip 21.0, in January 2021, removed Python 2 support, per pip's `Python 2 support policy`_. Please migrate to Python 3.
55
+
56
+ If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms:
57
+
58
+ * `Issue tracking`_
59
+ * `Discourse channel`_
60
+ * `User IRC`_
61
+
62
+ If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms:
63
+
64
+ * `GitHub page`_
65
+ * `Development documentation`_
66
+ * `Development IRC`_
67
+
68
+ Code of Conduct
69
+ ---------------
70
+
71
+ Everyone interacting in the pip project's codebases, issue trackers, chat
72
+ rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
73
+
74
+ .. _package installer: https://packaging.python.org/guides/tool-recommendations/
75
+ .. _Python Package Index: https://pypi.org
76
+ .. _Installation: https://pip.pypa.io/en/stable/installation/
77
+ .. _Usage: https://pip.pypa.io/en/stable/
78
+ .. _Release notes: https://pip.pypa.io/en/stable/news.html
79
+ .. _Release process: https://pip.pypa.io/en/latest/development/release-process/
80
+ .. _GitHub page: https://github.com/pypa/pip
81
+ .. _Development documentation: https://pip.pypa.io/en/latest/development
82
+ .. _made a big improvement to the heart of pip: https://pyfound.blogspot.com/2020/11/pip-20-3-new-resolver.html
83
+ .. _learn more: https://pip.pypa.io/en/latest/user_guide/#changes-to-the-pip-dependency-resolver-in-20-3-2020
84
+ .. _sign up for our user experience research studies: https://pyfound.blogspot.com/2020/03/new-pip-resolver-to-roll-out-this-year.html
85
+ .. _Python 2 support policy: https://pip.pypa.io/en/latest/development/release-process/#python-2-support
86
+ .. _Issue tracking: https://github.com/pypa/pip/issues
87
+ .. _Discourse channel: https://discuss.python.org/c/packaging
88
+ .. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa
89
+ .. _Development IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa-dev
90
+ .. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md
myenv/Lib/site-packages/pip-23.2.1.dist-info/RECORD ADDED
@@ -0,0 +1,1003 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ../../Scripts/pip.exe,sha256=FkUj2L0bd4Vcqtc1tWAMu-IfI39TF00ODps_3eV6Etc,108434
2
+ ../../Scripts/pip3.12.exe,sha256=FkUj2L0bd4Vcqtc1tWAMu-IfI39TF00ODps_3eV6Etc,108434
3
+ ../../Scripts/pip3.exe,sha256=FkUj2L0bd4Vcqtc1tWAMu-IfI39TF00ODps_3eV6Etc,108434
4
+ pip-23.2.1.dist-info/AUTHORS.txt,sha256=Pd_qYtjluu4WDft2A179dPtIvwYVBNtDfccCitVRMQM,10082
5
+ pip-23.2.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
6
+ pip-23.2.1.dist-info/LICENSE.txt,sha256=Y0MApmnUmurmWxLGxIySTFGkzfPR_whtw0VtyLyqIQQ,1093
7
+ pip-23.2.1.dist-info/METADATA,sha256=yHPLQvsD1b6f-zdCQWMibZXbsAjs886JMSh3C0oxRhQ,4239
8
+ pip-23.2.1.dist-info/RECORD,,
9
+ pip-23.2.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ pip-23.2.1.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
11
+ pip-23.2.1.dist-info/entry_points.txt,sha256=xg35gOct0aY8S3ftLtweJ0uw3KBAIVyW4k-0Jx1rkNE,125
12
+ pip-23.2.1.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
13
+ pip/__init__.py,sha256=hELWH3UN2ilBntczbn1BJOIzJEoiE8w9H-gsR5TeuEk,357
14
+ pip/__main__.py,sha256=WzbhHXTbSE6gBY19mNN9m4s5o_365LOvTYSgqgbdBhE,854
15
+ pip/__pip-runner__.py,sha256=EnrfKmKMzWAdqg_JicLCOP9Y95Ux7zHh4ObvqLtQcjo,1444
16
+ pip/__pycache__/__init__.cpython-312.pyc,,
17
+ pip/__pycache__/__main__.cpython-312.pyc,,
18
+ pip/__pycache__/__pip-runner__.cpython-312.pyc,,
19
+ pip/_internal/__init__.py,sha256=nnFCuxrPMgALrIDxSoy-H6Zj4W4UY60D-uL1aJyq0pc,573
20
+ pip/_internal/__pycache__/__init__.cpython-312.pyc,,
21
+ pip/_internal/__pycache__/build_env.cpython-312.pyc,,
22
+ pip/_internal/__pycache__/cache.cpython-312.pyc,,
23
+ pip/_internal/__pycache__/configuration.cpython-312.pyc,,
24
+ pip/_internal/__pycache__/exceptions.cpython-312.pyc,,
25
+ pip/_internal/__pycache__/main.cpython-312.pyc,,
26
+ pip/_internal/__pycache__/pyproject.cpython-312.pyc,,
27
+ pip/_internal/__pycache__/self_outdated_check.cpython-312.pyc,,
28
+ pip/_internal/__pycache__/wheel_builder.cpython-312.pyc,,
29
+ pip/_internal/build_env.py,sha256=1ESpqw0iupS_K7phZK5zshVE5Czy9BtGLFU4W6Enva8,10243
30
+ pip/_internal/cache.py,sha256=pMyi1n2nfdo7xzLVhmdOvIy1INt27HbqhJNj7vMcWlI,10429
31
+ pip/_internal/cli/__init__.py,sha256=FkHBgpxxb-_gd6r1FjnNhfMOzAUYyXoXKJ6abijfcFU,132
32
+ pip/_internal/cli/__pycache__/__init__.cpython-312.pyc,,
33
+ pip/_internal/cli/__pycache__/autocompletion.cpython-312.pyc,,
34
+ pip/_internal/cli/__pycache__/base_command.cpython-312.pyc,,
35
+ pip/_internal/cli/__pycache__/cmdoptions.cpython-312.pyc,,
36
+ pip/_internal/cli/__pycache__/command_context.cpython-312.pyc,,
37
+ pip/_internal/cli/__pycache__/main.cpython-312.pyc,,
38
+ pip/_internal/cli/__pycache__/main_parser.cpython-312.pyc,,
39
+ pip/_internal/cli/__pycache__/parser.cpython-312.pyc,,
40
+ pip/_internal/cli/__pycache__/progress_bars.cpython-312.pyc,,
41
+ pip/_internal/cli/__pycache__/req_command.cpython-312.pyc,,
42
+ pip/_internal/cli/__pycache__/spinners.cpython-312.pyc,,
43
+ pip/_internal/cli/__pycache__/status_codes.cpython-312.pyc,,
44
+ pip/_internal/cli/autocompletion.py,sha256=wY2JPZY2Eji1vhR7bVo-yCBPJ9LCy6P80iOAhZD1Vi8,6676
45
+ pip/_internal/cli/base_command.py,sha256=ACUUqWkZMU2O1pmUSpfBV3fwb36JzzTHGrbKXyb5f74,8726
46
+ pip/_internal/cli/cmdoptions.py,sha256=0bXhKutppZLBgAL54iK3tTrj-JRVbUB5M_2pHv_wnKk,30030
47
+ pip/_internal/cli/command_context.py,sha256=RHgIPwtObh5KhMrd3YZTkl8zbVG-6Okml7YbFX4Ehg0,774
48
+ pip/_internal/cli/main.py,sha256=Uzxt_YD1hIvB1AW5mxt6IVcht5G712AtMqdo51UMhmQ,2816
49
+ pip/_internal/cli/main_parser.py,sha256=laDpsuBDl6kyfywp9eMMA9s84jfH2TJJn-vmL0GG90w,4338
50
+ pip/_internal/cli/parser.py,sha256=tWP-K1uSxnJyXu3WE0kkH3niAYRBeuUaxeydhzOdhL4,10817
51
+ pip/_internal/cli/progress_bars.py,sha256=So4mPoSjXkXiSHiTzzquH3VVyVD_njXlHJSExYPXAow,1968
52
+ pip/_internal/cli/req_command.py,sha256=GqS9jkeHktOy6zRzC6uhcRY7SelnAV1LZ6OfS_gNcEk,18440
53
+ pip/_internal/cli/spinners.py,sha256=hIJ83GerdFgFCdobIA23Jggetegl_uC4Sp586nzFbPE,5118
54
+ pip/_internal/cli/status_codes.py,sha256=sEFHUaUJbqv8iArL3HAtcztWZmGOFX01hTesSytDEh0,116
55
+ pip/_internal/commands/__init__.py,sha256=5oRO9O3dM2vGuh0bFw4HOVletryrz5HHMmmPWwJrH9U,3882
56
+ pip/_internal/commands/__pycache__/__init__.cpython-312.pyc,,
57
+ pip/_internal/commands/__pycache__/cache.cpython-312.pyc,,
58
+ pip/_internal/commands/__pycache__/check.cpython-312.pyc,,
59
+ pip/_internal/commands/__pycache__/completion.cpython-312.pyc,,
60
+ pip/_internal/commands/__pycache__/configuration.cpython-312.pyc,,
61
+ pip/_internal/commands/__pycache__/debug.cpython-312.pyc,,
62
+ pip/_internal/commands/__pycache__/download.cpython-312.pyc,,
63
+ pip/_internal/commands/__pycache__/freeze.cpython-312.pyc,,
64
+ pip/_internal/commands/__pycache__/hash.cpython-312.pyc,,
65
+ pip/_internal/commands/__pycache__/help.cpython-312.pyc,,
66
+ pip/_internal/commands/__pycache__/index.cpython-312.pyc,,
67
+ pip/_internal/commands/__pycache__/inspect.cpython-312.pyc,,
68
+ pip/_internal/commands/__pycache__/install.cpython-312.pyc,,
69
+ pip/_internal/commands/__pycache__/list.cpython-312.pyc,,
70
+ pip/_internal/commands/__pycache__/search.cpython-312.pyc,,
71
+ pip/_internal/commands/__pycache__/show.cpython-312.pyc,,
72
+ pip/_internal/commands/__pycache__/uninstall.cpython-312.pyc,,
73
+ pip/_internal/commands/__pycache__/wheel.cpython-312.pyc,,
74
+ pip/_internal/commands/cache.py,sha256=aDR3pKRRX9dHobQ2HzKryf02jgOZnGcnfEmX_288Vcg,7581
75
+ pip/_internal/commands/check.py,sha256=Rb13Q28yoLh0j1gpx5SU0jlResNct21eQCRsnaO9xKA,1782
76
+ pip/_internal/commands/completion.py,sha256=2frgchce-GE5Gh9SjEJV-MTcpxy3G9-Es8mpe66nHts,3986
77
+ pip/_internal/commands/configuration.py,sha256=NB5uf8HIX8-li95YLoZO09nALIWlLCHDF5aifSKcBn8,9815
78
+ pip/_internal/commands/debug.py,sha256=AesEID-4gPFDWTwPiPaGZuD4twdT-imaGuMR5ZfSn8s,6591
79
+ pip/_internal/commands/download.py,sha256=e4hw088zGo26WmJaMIRvCniLlLmoOjqolGyfHjsCkCQ,5335
80
+ pip/_internal/commands/freeze.py,sha256=2qjQrH9KWi5Roav0CuR7vc7hWm4uOi_0l6tp3ESKDHM,3172
81
+ pip/_internal/commands/hash.py,sha256=EVVOuvGtoPEdFi8SNnmdqlCQrhCxV-kJsdwtdcCnXGQ,1703
82
+ pip/_internal/commands/help.py,sha256=gcc6QDkcgHMOuAn5UxaZwAStsRBrnGSn_yxjS57JIoM,1132
83
+ pip/_internal/commands/index.py,sha256=cGQVSA5dAs7caQ9sz4kllYvaI4ZpGiq1WhCgaImXNSA,4793
84
+ pip/_internal/commands/inspect.py,sha256=2wSPt9yfr3r6g-s2S5L6PvRtaHNVyb4TuodMStJ39cw,3188
85
+ pip/_internal/commands/install.py,sha256=sdi44xeJlENfU-ziPl1TbUC3no2-ZGDpwBigmX1JuM0,28934
86
+ pip/_internal/commands/list.py,sha256=LNL6016BPvFpAZVzNoo_DWDzvRFpfw__m9Rp5kw-yUM,12457
87
+ pip/_internal/commands/search.py,sha256=sbBZiARRc050QquOKcCvOr2K3XLsoYebLKZGRi__iUI,5697
88
+ pip/_internal/commands/show.py,sha256=t5jia4zcYJRJZy4U_Von7zMl03hJmmcofj6oDNTnj7Y,6419
89
+ pip/_internal/commands/uninstall.py,sha256=OIqO9tqadY8kM4HwhFf1Q62fUIp7v8KDrTRo8yWMz7Y,3886
90
+ pip/_internal/commands/wheel.py,sha256=CSnX8Pmf1oPCnd7j7bn1_f58G9KHNiAblvVJ5zykN-A,6476
91
+ pip/_internal/configuration.py,sha256=i_dePJKndPAy7hf48Sl6ZuPyl3tFPCE67z0SNatwuwE,13839
92
+ pip/_internal/distributions/__init__.py,sha256=Hq6kt6gXBgjNit5hTTWLAzeCNOKoB-N0pGYSqehrli8,858
93
+ pip/_internal/distributions/__pycache__/__init__.cpython-312.pyc,,
94
+ pip/_internal/distributions/__pycache__/base.cpython-312.pyc,,
95
+ pip/_internal/distributions/__pycache__/installed.cpython-312.pyc,,
96
+ pip/_internal/distributions/__pycache__/sdist.cpython-312.pyc,,
97
+ pip/_internal/distributions/__pycache__/wheel.cpython-312.pyc,,
98
+ pip/_internal/distributions/base.py,sha256=jrF1Vi7eGyqFqMHrieh1PIOrGU7KeCxhYPZnbvtmvGY,1221
99
+ pip/_internal/distributions/installed.py,sha256=NI2OgsgH9iBq9l5vB-56vOg5YsybOy-AU4VE5CSCO2I,729
100
+ pip/_internal/distributions/sdist.py,sha256=SQBdkatXSigKGG_SaD0U0p1Jwdfrg26UCNcHgkXZfdA,6494
101
+ pip/_internal/distributions/wheel.py,sha256=m-J4XO-gvFerlYsFzzSXYDvrx8tLZlJFTCgDxctn8ig,1164
102
+ pip/_internal/exceptions.py,sha256=LyTVY2dANx-i_TEk5Yr9YcwUtiy0HOEFCAQq1F_46co,23737
103
+ pip/_internal/index/__init__.py,sha256=vpt-JeTZefh8a-FC22ZeBSXFVbuBcXSGiILhQZJaNpQ,30
104
+ pip/_internal/index/__pycache__/__init__.cpython-312.pyc,,
105
+ pip/_internal/index/__pycache__/collector.cpython-312.pyc,,
106
+ pip/_internal/index/__pycache__/package_finder.cpython-312.pyc,,
107
+ pip/_internal/index/__pycache__/sources.cpython-312.pyc,,
108
+ pip/_internal/index/collector.py,sha256=3OmYZ3tCoRPGOrELSgQWG-03M-bQHa2-VCA3R_nJAaU,16504
109
+ pip/_internal/index/package_finder.py,sha256=rrUw4vj7QE_eMt022jw--wQiKznMaUgVBkJ1UCrVUxo,37873
110
+ pip/_internal/index/sources.py,sha256=7jw9XSeeQA5K-H4I5a5034Ks2gkQqm4zPXjrhwnP1S4,6556
111
+ pip/_internal/locations/__init__.py,sha256=Dh8LJWG8LRlDK4JIj9sfRF96TREzE--N_AIlx7Tqoe4,15365
112
+ pip/_internal/locations/__pycache__/__init__.cpython-312.pyc,,
113
+ pip/_internal/locations/__pycache__/_distutils.cpython-312.pyc,,
114
+ pip/_internal/locations/__pycache__/_sysconfig.cpython-312.pyc,,
115
+ pip/_internal/locations/__pycache__/base.cpython-312.pyc,,
116
+ pip/_internal/locations/_distutils.py,sha256=cmi6h63xYNXhQe7KEWEMaANjHFy5yQOPt_1_RCWyXMY,6100
117
+ pip/_internal/locations/_sysconfig.py,sha256=jyNVtUfMIf0mtyY-Xp1m9yQ8iwECozSVVFmjkN9a2yw,7680
118
+ pip/_internal/locations/base.py,sha256=RQiPi1d4FVM2Bxk04dQhXZ2PqkeljEL2fZZ9SYqIQ78,2556
119
+ pip/_internal/main.py,sha256=r-UnUe8HLo5XFJz8inTcOOTiu_sxNhgHb6VwlGUllOI,340
120
+ pip/_internal/metadata/__init__.py,sha256=84j1dPJaIoz5Q2ZTPi0uB1iaDAHiUNfKtYSGQCfFKpo,4280
121
+ pip/_internal/metadata/__pycache__/__init__.cpython-312.pyc,,
122
+ pip/_internal/metadata/__pycache__/_json.cpython-312.pyc,,
123
+ pip/_internal/metadata/__pycache__/base.cpython-312.pyc,,
124
+ pip/_internal/metadata/__pycache__/pkg_resources.cpython-312.pyc,,
125
+ pip/_internal/metadata/_json.py,sha256=BTkWfFDrWFwuSodImjtbAh8wCL3isecbnjTb5E6UUDI,2595
126
+ pip/_internal/metadata/base.py,sha256=vIwIo1BtoqegehWMAXhNrpLGYBq245rcaCNkBMPnTU8,25277
127
+ pip/_internal/metadata/importlib/__init__.py,sha256=9ZVO8BoE7NEZPmoHp5Ap_NJo0HgNIezXXg-TFTtt3Z4,107
128
+ pip/_internal/metadata/importlib/__pycache__/__init__.cpython-312.pyc,,
129
+ pip/_internal/metadata/importlib/__pycache__/_compat.cpython-312.pyc,,
130
+ pip/_internal/metadata/importlib/__pycache__/_dists.cpython-312.pyc,,
131
+ pip/_internal/metadata/importlib/__pycache__/_envs.cpython-312.pyc,,
132
+ pip/_internal/metadata/importlib/_compat.py,sha256=GAe_prIfCE4iUylrnr_2dJRlkkBVRUbOidEoID7LPoE,1882
133
+ pip/_internal/metadata/importlib/_dists.py,sha256=BUV8y6D0PePZrEN3vfJL-m1FDqZ6YPRgAiBeBinHhNg,8181
134
+ pip/_internal/metadata/importlib/_envs.py,sha256=I1DHMyAgZb8jT8CYndWl2aw2dN675p-BKPCuJhvdhrY,7435
135
+ pip/_internal/metadata/pkg_resources.py,sha256=WjwiNdRsvxqxL4MA5Tb5a_q3Q3sUhdpbZF8wGLtPMI0,9773
136
+ pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63
137
+ pip/_internal/models/__pycache__/__init__.cpython-312.pyc,,
138
+ pip/_internal/models/__pycache__/candidate.cpython-312.pyc,,
139
+ pip/_internal/models/__pycache__/direct_url.cpython-312.pyc,,
140
+ pip/_internal/models/__pycache__/format_control.cpython-312.pyc,,
141
+ pip/_internal/models/__pycache__/index.cpython-312.pyc,,
142
+ pip/_internal/models/__pycache__/installation_report.cpython-312.pyc,,
143
+ pip/_internal/models/__pycache__/link.cpython-312.pyc,,
144
+ pip/_internal/models/__pycache__/scheme.cpython-312.pyc,,
145
+ pip/_internal/models/__pycache__/search_scope.cpython-312.pyc,,
146
+ pip/_internal/models/__pycache__/selection_prefs.cpython-312.pyc,,
147
+ pip/_internal/models/__pycache__/target_python.cpython-312.pyc,,
148
+ pip/_internal/models/__pycache__/wheel.cpython-312.pyc,,
149
+ pip/_internal/models/candidate.py,sha256=6pcABsaR7CfIHlbJbr2_kMkVJFL_yrYjTx6SVWUnCPQ,990
150
+ pip/_internal/models/direct_url.py,sha256=EepBxI97j7wSZ3AmRETYyVTmR9NoTas15vc8popxVTg,6931
151
+ pip/_internal/models/format_control.py,sha256=DJpMYjxeYKKQdwNcML2_F0vtAh-qnKTYe-CpTxQe-4g,2520
152
+ pip/_internal/models/index.py,sha256=tYnL8oxGi4aSNWur0mG8DAP7rC6yuha_MwJO8xw0crI,1030
153
+ pip/_internal/models/installation_report.py,sha256=ueXv1RiMLAucaTuEvXACXX5R64_Wcm8b1Ztqx4Rd5xI,2609
154
+ pip/_internal/models/link.py,sha256=6OEk3bt41WU7QZoiyuoVPGsKOU-J_BbDDhouKbIXm0Y,20819
155
+ pip/_internal/models/scheme.py,sha256=3EFQp_ICu_shH1-TBqhl0QAusKCPDFOlgHFeN4XowWs,738
156
+ pip/_internal/models/search_scope.py,sha256=ASVyyZxiJILw7bTIVVpJx8J293M3Hk5F33ilGn0e80c,4643
157
+ pip/_internal/models/selection_prefs.py,sha256=KZdi66gsR-_RUXUr9uejssk3rmTHrQVJWeNA2sV-VSY,1907
158
+ pip/_internal/models/target_python.py,sha256=qKpZox7J8NAaPmDs5C_aniwfPDxzvpkrCKqfwndG87k,3858
159
+ pip/_internal/models/wheel.py,sha256=YqazoIZyma_Q1ejFa1C7NHKQRRWlvWkdK96VRKmDBeI,3600
160
+ pip/_internal/network/__init__.py,sha256=jf6Tt5nV_7zkARBrKojIXItgejvoegVJVKUbhAa5Ioc,50
161
+ pip/_internal/network/__pycache__/__init__.cpython-312.pyc,,
162
+ pip/_internal/network/__pycache__/auth.cpython-312.pyc,,
163
+ pip/_internal/network/__pycache__/cache.cpython-312.pyc,,
164
+ pip/_internal/network/__pycache__/download.cpython-312.pyc,,
165
+ pip/_internal/network/__pycache__/lazy_wheel.cpython-312.pyc,,
166
+ pip/_internal/network/__pycache__/session.cpython-312.pyc,,
167
+ pip/_internal/network/__pycache__/utils.cpython-312.pyc,,
168
+ pip/_internal/network/__pycache__/xmlrpc.cpython-312.pyc,,
169
+ pip/_internal/network/auth.py,sha256=TC-OcW2KU4W6R1hU4qPgQXvVH54adACpZz6sWq-R9NA,20541
170
+ pip/_internal/network/cache.py,sha256=hgXftU-eau4MWxHSLquTMzepYq5BPC2zhCkhN3glBy8,2145
171
+ pip/_internal/network/download.py,sha256=HvDDq9bVqaN3jcS3DyVJHP7uTqFzbShdkf7NFSoHfkw,6096
172
+ pip/_internal/network/lazy_wheel.py,sha256=2PXVduYZPCPZkkQFe1J1GbfHJWeCU--FXonGyIfw9eU,7638
173
+ pip/_internal/network/session.py,sha256=uhovd4J7abd0Yr2g426yC4aC6Uw1VKrQfpzalsEBEMw,18607
174
+ pip/_internal/network/utils.py,sha256=6A5SrUJEEUHxbGtbscwU2NpCyz-3ztiDlGWHpRRhsJ8,4073
175
+ pip/_internal/network/xmlrpc.py,sha256=AzQgG4GgS152_cqmGr_Oz2MIXsCal-xfsis7fA7nmU0,1791
176
+ pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
177
+ pip/_internal/operations/__pycache__/__init__.cpython-312.pyc,,
178
+ pip/_internal/operations/__pycache__/check.cpython-312.pyc,,
179
+ pip/_internal/operations/__pycache__/freeze.cpython-312.pyc,,
180
+ pip/_internal/operations/__pycache__/prepare.cpython-312.pyc,,
181
+ pip/_internal/operations/build/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
182
+ pip/_internal/operations/build/__pycache__/__init__.cpython-312.pyc,,
183
+ pip/_internal/operations/build/__pycache__/build_tracker.cpython-312.pyc,,
184
+ pip/_internal/operations/build/__pycache__/metadata.cpython-312.pyc,,
185
+ pip/_internal/operations/build/__pycache__/metadata_editable.cpython-312.pyc,,
186
+ pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-312.pyc,,
187
+ pip/_internal/operations/build/__pycache__/wheel.cpython-312.pyc,,
188
+ pip/_internal/operations/build/__pycache__/wheel_editable.cpython-312.pyc,,
189
+ pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-312.pyc,,
190
+ pip/_internal/operations/build/build_tracker.py,sha256=vf81EwomN3xe9G8qRJED0VGqNikmRQRQoobNsxi5Xrs,4133
191
+ pip/_internal/operations/build/metadata.py,sha256=9S0CUD8U3QqZeXp-Zyt8HxwU90lE4QrnYDgrqZDzBnc,1422
192
+ pip/_internal/operations/build/metadata_editable.py,sha256=VLL7LvntKE8qxdhUdEJhcotFzUsOSI8NNS043xULKew,1474
193
+ pip/_internal/operations/build/metadata_legacy.py,sha256=o-eU21As175hDC7dluM1fJJ_FqokTIShyWpjKaIpHZw,2198
194
+ pip/_internal/operations/build/wheel.py,sha256=sT12FBLAxDC6wyrDorh8kvcZ1jG5qInCRWzzP-UkJiQ,1075
195
+ pip/_internal/operations/build/wheel_editable.py,sha256=yOtoH6zpAkoKYEUtr8FhzrYnkNHQaQBjWQ2HYae1MQg,1417
196
+ pip/_internal/operations/build/wheel_legacy.py,sha256=C9j6rukgQI1n_JeQLoZGuDdfUwzCXShyIdPTp6edbMQ,3064
197
+ pip/_internal/operations/check.py,sha256=LD5BisEdT9vgzS7rLYUuk01z0l4oMj2Q7SsAxVu-pEk,6806
198
+ pip/_internal/operations/freeze.py,sha256=uqoeTAf6HOYVMR2UgAT8N85UZoGEVEoQdan_Ao6SOfk,9816
199
+ pip/_internal/operations/install/__init__.py,sha256=mX7hyD2GNBO2mFGokDQ30r_GXv7Y_PLdtxcUv144e-s,51
200
+ pip/_internal/operations/install/__pycache__/__init__.cpython-312.pyc,,
201
+ pip/_internal/operations/install/__pycache__/editable_legacy.cpython-312.pyc,,
202
+ pip/_internal/operations/install/__pycache__/wheel.cpython-312.pyc,,
203
+ pip/_internal/operations/install/editable_legacy.py,sha256=YeR0KadWXw_ZheC1NtAG1qVIEkOgRGHc23x-YtGW7NU,1282
204
+ pip/_internal/operations/install/wheel.py,sha256=8lsVMt_FAuiGNsf_e7C7_cCSOEO7pHyjgVmRNx-WXrw,27475
205
+ pip/_internal/operations/prepare.py,sha256=nxjIiGRSiUUSRFpwN-Qro7N6BE9jqV4mudJ7CIv9qwY,28868
206
+ pip/_internal/pyproject.py,sha256=ltmrXWaMXjiJHbYyzWplTdBvPYPdKk99GjKuQVypGZU,7161
207
+ pip/_internal/req/__init__.py,sha256=TELFgZOof3lhMmaICVWL9U7PlhXo9OufokbMAJ6J2GI,2738
208
+ pip/_internal/req/__pycache__/__init__.cpython-312.pyc,,
209
+ pip/_internal/req/__pycache__/constructors.cpython-312.pyc,,
210
+ pip/_internal/req/__pycache__/req_file.cpython-312.pyc,,
211
+ pip/_internal/req/__pycache__/req_install.cpython-312.pyc,,
212
+ pip/_internal/req/__pycache__/req_set.cpython-312.pyc,,
213
+ pip/_internal/req/__pycache__/req_uninstall.cpython-312.pyc,,
214
+ pip/_internal/req/constructors.py,sha256=8YE-eNXMSZ1lgsJZg-HnIo8EdaGfiOM2t3EaLlLD5Og,16610
215
+ pip/_internal/req/req_file.py,sha256=5PCO4GnDEnUENiFj4vD_1QmAMjHNtvN6HXbETZ9UGok,17872
216
+ pip/_internal/req/req_install.py,sha256=hpG29Bm2PAq7G-ogTatZcNUgjwt0zpdTXtxGw4M_MtU,33084
217
+ pip/_internal/req/req_set.py,sha256=pSCcIKURDkGb6JAKsc-cdvnvnAJlYPk-p3vvON9M3DY,4704
218
+ pip/_internal/req/req_uninstall.py,sha256=sGwa_yZ6X2NcRSUJWzUlYkf8bDEjRySAE3aQ5OewIWA,24678
219
+ pip/_internal/resolution/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
220
+ pip/_internal/resolution/__pycache__/__init__.cpython-312.pyc,,
221
+ pip/_internal/resolution/__pycache__/base.cpython-312.pyc,,
222
+ pip/_internal/resolution/base.py,sha256=qlmh325SBVfvG6Me9gc5Nsh5sdwHBwzHBq6aEXtKsLA,583
223
+ pip/_internal/resolution/legacy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
224
+ pip/_internal/resolution/legacy/__pycache__/__init__.cpython-312.pyc,,
225
+ pip/_internal/resolution/legacy/__pycache__/resolver.cpython-312.pyc,,
226
+ pip/_internal/resolution/legacy/resolver.py,sha256=th-eTPIvbecfJaUsdrbH1aHQvDV2yCE-RhrrpsJhKbE,24128
227
+ pip/_internal/resolution/resolvelib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
228
+ pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-312.pyc,,
229
+ pip/_internal/resolution/resolvelib/__pycache__/base.cpython-312.pyc,,
230
+ pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-312.pyc,,
231
+ pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-312.pyc,,
232
+ pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-312.pyc,,
233
+ pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-312.pyc,,
234
+ pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-312.pyc,,
235
+ pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-312.pyc,,
236
+ pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-312.pyc,,
237
+ pip/_internal/resolution/resolvelib/base.py,sha256=u1O4fkvCO4mhmu5i32xrDv9AX5NgUci_eYVyBDQhTIM,5220
238
+ pip/_internal/resolution/resolvelib/candidates.py,sha256=u5mU96o2lnUy-ODRJv7Wevee0xCYI6IKIXNamSBQnso,18969
239
+ pip/_internal/resolution/resolvelib/factory.py,sha256=y1Q2fsV1GKDKPitoapOLLEs75WNzEpd4l_RezCt927c,27845
240
+ pip/_internal/resolution/resolvelib/found_candidates.py,sha256=hvL3Hoa9VaYo-qEOZkBi2Iqw251UDxPz-uMHVaWmLpE,5705
241
+ pip/_internal/resolution/resolvelib/provider.py,sha256=4t23ivjruqM6hKBX1KpGiTt-M4HGhRcZnGLV0c01K7U,9824
242
+ pip/_internal/resolution/resolvelib/reporter.py,sha256=YFm9hQvz4DFCbjZeFTQ56hTz3Ac-mDBnHkeNRVvMHLY,3100
243
+ pip/_internal/resolution/resolvelib/requirements.py,sha256=zHnERhfubmvKyM3kgdAOs0dYFiqUfzKR-DAt4y0NWOI,5454
244
+ pip/_internal/resolution/resolvelib/resolver.py,sha256=n2Vn9EC5-7JmcRY5erIPQ4hUWnEUngG0oYS3JW3xXZo,11642
245
+ pip/_internal/self_outdated_check.py,sha256=pnqBuKKZQ8OxKP0MaUUiDHl3AtyoMJHHG4rMQ7YcYXY,8167
246
+ pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
247
+ pip/_internal/utils/__pycache__/__init__.cpython-312.pyc,,
248
+ pip/_internal/utils/__pycache__/_jaraco_text.cpython-312.pyc,,
249
+ pip/_internal/utils/__pycache__/_log.cpython-312.pyc,,
250
+ pip/_internal/utils/__pycache__/appdirs.cpython-312.pyc,,
251
+ pip/_internal/utils/__pycache__/compat.cpython-312.pyc,,
252
+ pip/_internal/utils/__pycache__/compatibility_tags.cpython-312.pyc,,
253
+ pip/_internal/utils/__pycache__/datetime.cpython-312.pyc,,
254
+ pip/_internal/utils/__pycache__/deprecation.cpython-312.pyc,,
255
+ pip/_internal/utils/__pycache__/direct_url_helpers.cpython-312.pyc,,
256
+ pip/_internal/utils/__pycache__/egg_link.cpython-312.pyc,,
257
+ pip/_internal/utils/__pycache__/encoding.cpython-312.pyc,,
258
+ pip/_internal/utils/__pycache__/entrypoints.cpython-312.pyc,,
259
+ pip/_internal/utils/__pycache__/filesystem.cpython-312.pyc,,
260
+ pip/_internal/utils/__pycache__/filetypes.cpython-312.pyc,,
261
+ pip/_internal/utils/__pycache__/glibc.cpython-312.pyc,,
262
+ pip/_internal/utils/__pycache__/hashes.cpython-312.pyc,,
263
+ pip/_internal/utils/__pycache__/inject_securetransport.cpython-312.pyc,,
264
+ pip/_internal/utils/__pycache__/logging.cpython-312.pyc,,
265
+ pip/_internal/utils/__pycache__/misc.cpython-312.pyc,,
266
+ pip/_internal/utils/__pycache__/models.cpython-312.pyc,,
267
+ pip/_internal/utils/__pycache__/packaging.cpython-312.pyc,,
268
+ pip/_internal/utils/__pycache__/setuptools_build.cpython-312.pyc,,
269
+ pip/_internal/utils/__pycache__/subprocess.cpython-312.pyc,,
270
+ pip/_internal/utils/__pycache__/temp_dir.cpython-312.pyc,,
271
+ pip/_internal/utils/__pycache__/unpacking.cpython-312.pyc,,
272
+ pip/_internal/utils/__pycache__/urls.cpython-312.pyc,,
273
+ pip/_internal/utils/__pycache__/virtualenv.cpython-312.pyc,,
274
+ pip/_internal/utils/__pycache__/wheel.cpython-312.pyc,,
275
+ pip/_internal/utils/_jaraco_text.py,sha256=yvDGelTVugRayPaOF2k4ab0Ky4d3uOkAfuOQjASjImY,3351
276
+ pip/_internal/utils/_log.py,sha256=-jHLOE_THaZz5BFcCnoSL9EYAtJ0nXem49s9of4jvKw,1015
277
+ pip/_internal/utils/appdirs.py,sha256=swgcTKOm3daLeXTW6v5BUS2Ti2RvEnGRQYH_yDXklAo,1665
278
+ pip/_internal/utils/compat.py,sha256=ACyBfLgj3_XG-iA5omEDrXqDM0cQKzi8h8HRBInzG6Q,1884
279
+ pip/_internal/utils/compatibility_tags.py,sha256=ydin8QG8BHqYRsPY4OL6cmb44CbqXl1T0xxS97VhHkk,5377
280
+ pip/_internal/utils/datetime.py,sha256=m21Y3wAtQc-ji6Veb6k_M5g6A0ZyFI4egchTdnwh-pQ,242
281
+ pip/_internal/utils/deprecation.py,sha256=NKo8VqLioJ4nnXXGmW4KdasxF90EFHkZaHeX1fT08C8,3627
282
+ pip/_internal/utils/direct_url_helpers.py,sha256=6F1tc2rcKaCZmgfVwsE6ObIe_Pux23mUVYA-2D9wCFc,3206
283
+ pip/_internal/utils/egg_link.py,sha256=ZryCchR_yQSCsdsMkCpxQjjLbQxObA5GDtLG0RR5mGc,2118
284
+ pip/_internal/utils/encoding.py,sha256=qqsXDtiwMIjXMEiIVSaOjwH5YmirCaK-dIzb6-XJsL0,1169
285
+ pip/_internal/utils/entrypoints.py,sha256=YlhLTRl2oHBAuqhc-zmL7USS67TPWVHImjeAQHreZTQ,3064
286
+ pip/_internal/utils/filesystem.py,sha256=RhMIXUaNVMGjc3rhsDahWQ4MavvEQDdqXqgq-F6fpw8,5122
287
+ pip/_internal/utils/filetypes.py,sha256=i8XAQ0eFCog26Fw9yV0Yb1ygAqKYB1w9Cz9n0fj8gZU,716
288
+ pip/_internal/utils/glibc.py,sha256=Mesxxgg3BLxheLZx-dSf30b6gKpOgdVXw6W--uHSszQ,3113
289
+ pip/_internal/utils/hashes.py,sha256=MjOigC75z6qoRMkgHiHqot7eqxfwDZSrEflJMPm-bHE,5118
290
+ pip/_internal/utils/inject_securetransport.py,sha256=o-QRVMGiENrTJxw3fAhA7uxpdEdw6M41TjHYtSVRrcg,795
291
+ pip/_internal/utils/logging.py,sha256=U2q0i1n8hPS2gQh8qcocAg5dovGAa_bR24akmXMzrk4,11632
292
+ pip/_internal/utils/misc.py,sha256=Ds3rSQU7HbdAywwmEBcPnVoLB1Tp_2gL6IbaWcpe8i0,22343
293
+ pip/_internal/utils/models.py,sha256=5GoYU586SrxURMvDn_jBMJInitviJg4O5-iOU-6I0WY,1193
294
+ pip/_internal/utils/packaging.py,sha256=5Wm6_x7lKrlqVjPI5MBN_RurcRHwVYoQ7Ksrs84de7s,2108
295
+ pip/_internal/utils/setuptools_build.py,sha256=ouXpud-jeS8xPyTPsXJ-m34NPvK5os45otAzdSV_IJE,4435
296
+ pip/_internal/utils/subprocess.py,sha256=0EMhgfPGFk8FZn6Qq7Hp9PN6YHuQNWiVby4DXcTCON4,9200
297
+ pip/_internal/utils/temp_dir.py,sha256=aCX489gRa4Nu0dMKRFyGhV6maJr60uEynu5uCbKR4Qg,7702
298
+ pip/_internal/utils/unpacking.py,sha256=SBb2iV1crb89MDRTEKY86R4A_UOWApTQn9VQVcMDOlE,8821
299
+ pip/_internal/utils/urls.py,sha256=AhaesUGl-9it6uvG6fsFPOr9ynFpGaTMk4t5XTX7Z_Q,1759
300
+ pip/_internal/utils/virtualenv.py,sha256=S6f7csYorRpiD6cvn3jISZYc3I8PJC43H5iMFpRAEDU,3456
301
+ pip/_internal/utils/wheel.py,sha256=lXOgZyTlOm5HmK8tw5iw0A3_5A6wRzsXHOaQkIvvloU,4549
302
+ pip/_internal/vcs/__init__.py,sha256=UAqvzpbi0VbZo3Ub6skEeZAw-ooIZR-zX_WpCbxyCoU,596
303
+ pip/_internal/vcs/__pycache__/__init__.cpython-312.pyc,,
304
+ pip/_internal/vcs/__pycache__/bazaar.cpython-312.pyc,,
305
+ pip/_internal/vcs/__pycache__/git.cpython-312.pyc,,
306
+ pip/_internal/vcs/__pycache__/mercurial.cpython-312.pyc,,
307
+ pip/_internal/vcs/__pycache__/subversion.cpython-312.pyc,,
308
+ pip/_internal/vcs/__pycache__/versioncontrol.cpython-312.pyc,,
309
+ pip/_internal/vcs/bazaar.py,sha256=j0oin0fpGRHcCFCxEcpPCQoFEvA-DMLULKdGP8Nv76o,3519
310
+ pip/_internal/vcs/git.py,sha256=mjhwudCx9WlLNkxZ6_kOKmueF0rLoU2i1xeASKF6yiQ,18116
311
+ pip/_internal/vcs/mercurial.py,sha256=1FG5Zh2ltJZKryO40d2l2Q91FYNazuS16kkpoAVOh0Y,5244
312
+ pip/_internal/vcs/subversion.py,sha256=vhZs8L-TNggXqM1bbhl-FpbxE3TrIB6Tgnx8fh3S2HE,11729
313
+ pip/_internal/vcs/versioncontrol.py,sha256=KUOc-hN51em9jrqxKwUR3JnkgSE-xSOqMiiJcSaL6B8,22811
314
+ pip/_internal/wheel_builder.py,sha256=3UlHfxQi7_AAXI7ur8aPpPbmqHhecCsubmkHEl-00KU,11842
315
+ pip/_vendor/__init__.py,sha256=fNxOSVD0auElsD8fN9tuq5psfgMQ-RFBtD4X5gjlRkg,4966
316
+ pip/_vendor/__pycache__/__init__.cpython-312.pyc,,
317
+ pip/_vendor/__pycache__/six.cpython-312.pyc,,
318
+ pip/_vendor/__pycache__/typing_extensions.cpython-312.pyc,,
319
+ pip/_vendor/cachecontrol/__init__.py,sha256=hrxlv3q7upsfyMw8k3gQ9vagBax1pYHSGGqYlZ0Zk0M,465
320
+ pip/_vendor/cachecontrol/__pycache__/__init__.cpython-312.pyc,,
321
+ pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-312.pyc,,
322
+ pip/_vendor/cachecontrol/__pycache__/adapter.cpython-312.pyc,,
323
+ pip/_vendor/cachecontrol/__pycache__/cache.cpython-312.pyc,,
324
+ pip/_vendor/cachecontrol/__pycache__/compat.cpython-312.pyc,,
325
+ pip/_vendor/cachecontrol/__pycache__/controller.cpython-312.pyc,,
326
+ pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-312.pyc,,
327
+ pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-312.pyc,,
328
+ pip/_vendor/cachecontrol/__pycache__/serialize.cpython-312.pyc,,
329
+ pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-312.pyc,,
330
+ pip/_vendor/cachecontrol/_cmd.py,sha256=lxUXqfNTVx84zf6tcWbkLZHA6WVBRtJRpfeA9ZqhaAY,1379
331
+ pip/_vendor/cachecontrol/adapter.py,sha256=ew9OYEQHEOjvGl06ZsuX8W3DAvHWsQKHwWAxISyGug8,5033
332
+ pip/_vendor/cachecontrol/cache.py,sha256=Tty45fOjH40fColTGkqKQvQQmbYsMpk-nCyfLcv2vG4,1535
333
+ pip/_vendor/cachecontrol/caches/__init__.py,sha256=h-1cUmOz6mhLsjTjOrJ8iPejpGdLCyG4lzTftfGZvLg,242
334
+ pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-312.pyc,,
335
+ pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-312.pyc,,
336
+ pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-312.pyc,,
337
+ pip/_vendor/cachecontrol/caches/file_cache.py,sha256=GpexcE29LoY4MaZwPUTcUBZaDdcsjqyLxZFznk8Hbr4,5271
338
+ pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=mp-QWonP40I3xJGK3XVO-Gs9a3UjzlqqEmp9iLJH9F4,1033
339
+ pip/_vendor/cachecontrol/compat.py,sha256=LNx7vqBndYdHU8YuJt53ab_8rzMGTXVrvMb7CZJkxG0,778
340
+ pip/_vendor/cachecontrol/controller.py,sha256=bAYrt7x_VH4toNpI066LQxbHpYGpY1MxxmZAhspplvw,16416
341
+ pip/_vendor/cachecontrol/filewrapper.py,sha256=X4BAQOO26GNOR7nH_fhTzAfeuct2rBQcx_15MyFBpcs,3946
342
+ pip/_vendor/cachecontrol/heuristics.py,sha256=8kAyuZLSCyEIgQr6vbUwfhpqg9ows4mM0IV6DWazevI,4154
343
+ pip/_vendor/cachecontrol/serialize.py,sha256=_U1NU_C-SDgFzkbAxAsPDgMTHeTWZZaHCQnZN_jh0U8,7105
344
+ pip/_vendor/cachecontrol/wrapper.py,sha256=X3-KMZ20Ho3VtqyVaXclpeQpFzokR5NE8tZSfvKVaB8,774
345
+ pip/_vendor/certifi/__init__.py,sha256=q5ePznlfOw-XYIOV6RTnh45yS9haN-Nb1d__4QXc3g0,94
346
+ pip/_vendor/certifi/__main__.py,sha256=1k3Cr95vCxxGRGDljrW3wMdpZdL3Nhf0u1n-k2qdsCY,255
347
+ pip/_vendor/certifi/__pycache__/__init__.cpython-312.pyc,,
348
+ pip/_vendor/certifi/__pycache__/__main__.cpython-312.pyc,,
349
+ pip/_vendor/certifi/__pycache__/core.cpython-312.pyc,,
350
+ pip/_vendor/certifi/cacert.pem,sha256=swFTXcpJHZgU6ij6oyCsehnQ9dlCN5lvoKO1qTZDJRQ,278952
351
+ pip/_vendor/certifi/core.py,sha256=ZwiOsv-sD_ouU1ft8wy_xZ3LQ7UbcVzyqj2XNyrsZis,4279
352
+ pip/_vendor/chardet/__init__.py,sha256=57R-HSxj0PWmILMN0GFmUNqEMfrEVSamXyjD-W6_fbs,4797
353
+ pip/_vendor/chardet/__pycache__/__init__.cpython-312.pyc,,
354
+ pip/_vendor/chardet/__pycache__/big5freq.cpython-312.pyc,,
355
+ pip/_vendor/chardet/__pycache__/big5prober.cpython-312.pyc,,
356
+ pip/_vendor/chardet/__pycache__/chardistribution.cpython-312.pyc,,
357
+ pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-312.pyc,,
358
+ pip/_vendor/chardet/__pycache__/charsetprober.cpython-312.pyc,,
359
+ pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-312.pyc,,
360
+ pip/_vendor/chardet/__pycache__/codingstatemachinedict.cpython-312.pyc,,
361
+ pip/_vendor/chardet/__pycache__/cp949prober.cpython-312.pyc,,
362
+ pip/_vendor/chardet/__pycache__/enums.cpython-312.pyc,,
363
+ pip/_vendor/chardet/__pycache__/escprober.cpython-312.pyc,,
364
+ pip/_vendor/chardet/__pycache__/escsm.cpython-312.pyc,,
365
+ pip/_vendor/chardet/__pycache__/eucjpprober.cpython-312.pyc,,
366
+ pip/_vendor/chardet/__pycache__/euckrfreq.cpython-312.pyc,,
367
+ pip/_vendor/chardet/__pycache__/euckrprober.cpython-312.pyc,,
368
+ pip/_vendor/chardet/__pycache__/euctwfreq.cpython-312.pyc,,
369
+ pip/_vendor/chardet/__pycache__/euctwprober.cpython-312.pyc,,
370
+ pip/_vendor/chardet/__pycache__/gb2312freq.cpython-312.pyc,,
371
+ pip/_vendor/chardet/__pycache__/gb2312prober.cpython-312.pyc,,
372
+ pip/_vendor/chardet/__pycache__/hebrewprober.cpython-312.pyc,,
373
+ pip/_vendor/chardet/__pycache__/jisfreq.cpython-312.pyc,,
374
+ pip/_vendor/chardet/__pycache__/johabfreq.cpython-312.pyc,,
375
+ pip/_vendor/chardet/__pycache__/johabprober.cpython-312.pyc,,
376
+ pip/_vendor/chardet/__pycache__/jpcntx.cpython-312.pyc,,
377
+ pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-312.pyc,,
378
+ pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-312.pyc,,
379
+ pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-312.pyc,,
380
+ pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-312.pyc,,
381
+ pip/_vendor/chardet/__pycache__/langrussianmodel.cpython-312.pyc,,
382
+ pip/_vendor/chardet/__pycache__/langthaimodel.cpython-312.pyc,,
383
+ pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-312.pyc,,
384
+ pip/_vendor/chardet/__pycache__/latin1prober.cpython-312.pyc,,
385
+ pip/_vendor/chardet/__pycache__/macromanprober.cpython-312.pyc,,
386
+ pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-312.pyc,,
387
+ pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-312.pyc,,
388
+ pip/_vendor/chardet/__pycache__/mbcssm.cpython-312.pyc,,
389
+ pip/_vendor/chardet/__pycache__/resultdict.cpython-312.pyc,,
390
+ pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-312.pyc,,
391
+ pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-312.pyc,,
392
+ pip/_vendor/chardet/__pycache__/sjisprober.cpython-312.pyc,,
393
+ pip/_vendor/chardet/__pycache__/universaldetector.cpython-312.pyc,,
394
+ pip/_vendor/chardet/__pycache__/utf1632prober.cpython-312.pyc,,
395
+ pip/_vendor/chardet/__pycache__/utf8prober.cpython-312.pyc,,
396
+ pip/_vendor/chardet/__pycache__/version.cpython-312.pyc,,
397
+ pip/_vendor/chardet/big5freq.py,sha256=ltcfP-3PjlNHCoo5e4a7C4z-2DhBTXRfY6jbMbB7P30,31274
398
+ pip/_vendor/chardet/big5prober.py,sha256=lPMfwCX6v2AaPgvFh_cSWZcgLDbWiFCHLZ_p9RQ9uxE,1763
399
+ pip/_vendor/chardet/chardistribution.py,sha256=13B8XUG4oXDuLdXvfbIWwLFeR-ZU21AqTS1zcdON8bU,10032
400
+ pip/_vendor/chardet/charsetgroupprober.py,sha256=UKK3SaIZB2PCdKSIS0gnvMtLR9JJX62M-fZJu3OlWyg,3915
401
+ pip/_vendor/chardet/charsetprober.py,sha256=L3t8_wIOov8em-vZWOcbkdsrwe43N6_gqNh5pH7WPd4,5420
402
+ pip/_vendor/chardet/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
403
+ pip/_vendor/chardet/cli/__pycache__/__init__.cpython-312.pyc,,
404
+ pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-312.pyc,,
405
+ pip/_vendor/chardet/cli/chardetect.py,sha256=zibMVg5RpKb-ME9_7EYG4ZM2Sf07NHcQzZ12U-rYJho,3242
406
+ pip/_vendor/chardet/codingstatemachine.py,sha256=K7k69sw3jY5DmTXoSJQVsUtFIQKYPQVOSJJhBuGv_yE,3732
407
+ pip/_vendor/chardet/codingstatemachinedict.py,sha256=0GY3Hi2qIZvDrOOJ3AtqppM1RsYxr_66ER4EHjuMiMc,542
408
+ pip/_vendor/chardet/cp949prober.py,sha256=0jKRV7fECuWI16rNnks0ZECKA1iZYCIEaP8A1ZvjUSI,1860
409
+ pip/_vendor/chardet/enums.py,sha256=TzECiZoCKNMqgwU76cPCeKWFBqaWvAdLMev5_bCkhY8,1683
410
+ pip/_vendor/chardet/escprober.py,sha256=Kho48X65xE0scFylIdeJjM2bcbvRvv0h0WUbMWrJD3A,4006
411
+ pip/_vendor/chardet/escsm.py,sha256=AqyXpA2FQFD7k-buBty_7itGEYkhmVa8X09NLRul3QM,12176
412
+ pip/_vendor/chardet/eucjpprober.py,sha256=5KYaM9fsxkRYzw1b5k0fL-j_-ezIw-ij9r97a9MHxLY,3934
413
+ pip/_vendor/chardet/euckrfreq.py,sha256=3mHuRvXfsq_QcQysDQFb8qSudvTiol71C6Ic2w57tKM,13566
414
+ pip/_vendor/chardet/euckrprober.py,sha256=hiFT6wM174GIwRvqDsIcuOc-dDsq2uPKMKbyV8-1Xnc,1753
415
+ pip/_vendor/chardet/euctwfreq.py,sha256=2alILE1Lh5eqiFJZjzRkMQXolNJRHY5oBQd-vmZYFFM,36913
416
+ pip/_vendor/chardet/euctwprober.py,sha256=NxbpNdBtU0VFI0bKfGfDkpP7S2_8_6FlO87dVH0ogws,1753
417
+ pip/_vendor/chardet/gb2312freq.py,sha256=49OrdXzD-HXqwavkqjo8Z7gvs58hONNzDhAyMENNkvY,20735
418
+ pip/_vendor/chardet/gb2312prober.py,sha256=KPEBueaSLSvBpFeINMu0D6TgHcR90e5PaQawifzF4o0,1759
419
+ pip/_vendor/chardet/hebrewprober.py,sha256=96T_Lj_OmW-fK7JrSHojYjyG3fsGgbzkoTNleZ3kfYE,14537
420
+ pip/_vendor/chardet/jisfreq.py,sha256=mm8tfrwqhpOd3wzZKS4NJqkYBQVcDfTM2JiQ5aW932E,25796
421
+ pip/_vendor/chardet/johabfreq.py,sha256=dBpOYG34GRX6SL8k_LbS9rxZPMjLjoMlgZ03Pz5Hmqc,42498
422
+ pip/_vendor/chardet/johabprober.py,sha256=O1Qw9nVzRnun7vZp4UZM7wvJSv9W941mEU9uDMnY3DU,1752
423
+ pip/_vendor/chardet/jpcntx.py,sha256=uhHrYWkLxE_rF5OkHKInm0HUsrjgKHHVQvtt3UcvotA,27055
424
+ pip/_vendor/chardet/langbulgarianmodel.py,sha256=vmbvYFP8SZkSxoBvLkFqKiH1sjma5ihk3PTpdy71Rr4,104562
425
+ pip/_vendor/chardet/langgreekmodel.py,sha256=JfB7bupjjJH2w3X_mYnQr9cJA_7EuITC2cRW13fUjeI,98484
426
+ pip/_vendor/chardet/langhebrewmodel.py,sha256=3HXHaLQPNAGcXnJjkIJfozNZLTvTJmf4W5Awi6zRRKc,98196
427
+ pip/_vendor/chardet/langhungarianmodel.py,sha256=WxbeQIxkv8YtApiNqxQcvj-tMycsoI4Xy-fwkDHpP_Y,101363
428
+ pip/_vendor/chardet/langrussianmodel.py,sha256=s395bTZ87ESTrZCOdgXbEjZ9P1iGPwCl_8xSsac_DLY,128035
429
+ pip/_vendor/chardet/langthaimodel.py,sha256=7bJlQitRpTnVGABmbSznHnJwOHDy3InkTvtFUx13WQI,102774
430
+ pip/_vendor/chardet/langturkishmodel.py,sha256=XY0eGdTIy4eQ9Xg1LVPZacb-UBhHBR-cq0IpPVHowKc,95372
431
+ pip/_vendor/chardet/latin1prober.py,sha256=p15EEmFbmQUwbKLC7lOJVGHEZwcG45ubEZYTGu01J5g,5380
432
+ pip/_vendor/chardet/macromanprober.py,sha256=9anfzmY6TBfUPDyBDOdY07kqmTHpZ1tK0jL-p1JWcOY,6077
433
+ pip/_vendor/chardet/mbcharsetprober.py,sha256=Wr04WNI4F3X_VxEverNG-H25g7u-MDDKlNt-JGj-_uU,3715
434
+ pip/_vendor/chardet/mbcsgroupprober.py,sha256=iRpaNBjV0DNwYPu_z6TiHgRpwYahiM7ztI_4kZ4Uz9A,2131
435
+ pip/_vendor/chardet/mbcssm.py,sha256=hUtPvDYgWDaA2dWdgLsshbwRfm3Q5YRlRogdmeRUNQw,30391
436
+ pip/_vendor/chardet/metadata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
437
+ pip/_vendor/chardet/metadata/__pycache__/__init__.cpython-312.pyc,,
438
+ pip/_vendor/chardet/metadata/__pycache__/languages.cpython-312.pyc,,
439
+ pip/_vendor/chardet/metadata/languages.py,sha256=FhvBIdZFxRQ-dTwkb_0madRKgVBCaUMQz9I5xqjE5iQ,13560
440
+ pip/_vendor/chardet/resultdict.py,sha256=ez4FRvN5KaSosJeJ2WzUyKdDdg35HDy_SSLPXKCdt5M,402
441
+ pip/_vendor/chardet/sbcharsetprober.py,sha256=-nd3F90i7GpXLjehLVHqVBE0KlWzGvQUPETLBNn4o6U,6400
442
+ pip/_vendor/chardet/sbcsgroupprober.py,sha256=gcgI0fOfgw_3YTClpbra_MNxwyEyJ3eUXraoLHYb59E,4137
443
+ pip/_vendor/chardet/sjisprober.py,sha256=aqQufMzRw46ZpFlzmYaYeT2-nzmKb-hmcrApppJ862k,4007
444
+ pip/_vendor/chardet/universaldetector.py,sha256=xYBrg4x0dd9WnT8qclfADVD9ondrUNkqPmvte1pa520,14848
445
+ pip/_vendor/chardet/utf1632prober.py,sha256=pw1epGdMj1hDGiCu1AHqqzOEfjX8MVdiW7O1BlT8-eQ,8505
446
+ pip/_vendor/chardet/utf8prober.py,sha256=8m08Ub5490H4jQ6LYXvFysGtgKoKsHUd2zH_i8_TnVw,2812
447
+ pip/_vendor/chardet/version.py,sha256=lGtJcxGM44Qz4Cbk4rbbmrKxnNr1-97U25TameLehZw,244
448
+ pip/_vendor/colorama/__init__.py,sha256=wePQA4U20tKgYARySLEC047ucNX-g8pRLpYBuiHlLb8,266
449
+ pip/_vendor/colorama/__pycache__/__init__.cpython-312.pyc,,
450
+ pip/_vendor/colorama/__pycache__/ansi.cpython-312.pyc,,
451
+ pip/_vendor/colorama/__pycache__/ansitowin32.cpython-312.pyc,,
452
+ pip/_vendor/colorama/__pycache__/initialise.cpython-312.pyc,,
453
+ pip/_vendor/colorama/__pycache__/win32.cpython-312.pyc,,
454
+ pip/_vendor/colorama/__pycache__/winterm.cpython-312.pyc,,
455
+ pip/_vendor/colorama/ansi.py,sha256=Top4EeEuaQdBWdteKMEcGOTeKeF19Q-Wo_6_Cj5kOzQ,2522
456
+ pip/_vendor/colorama/ansitowin32.py,sha256=vPNYa3OZbxjbuFyaVo0Tmhmy1FZ1lKMWCnT7odXpItk,11128
457
+ pip/_vendor/colorama/initialise.py,sha256=-hIny86ClXo39ixh5iSCfUIa2f_h_bgKRDW7gqs-KLU,3325
458
+ pip/_vendor/colorama/tests/__init__.py,sha256=MkgPAEzGQd-Rq0w0PZXSX2LadRWhUECcisJY8lSrm4Q,75
459
+ pip/_vendor/colorama/tests/__pycache__/__init__.cpython-312.pyc,,
460
+ pip/_vendor/colorama/tests/__pycache__/ansi_test.cpython-312.pyc,,
461
+ pip/_vendor/colorama/tests/__pycache__/ansitowin32_test.cpython-312.pyc,,
462
+ pip/_vendor/colorama/tests/__pycache__/initialise_test.cpython-312.pyc,,
463
+ pip/_vendor/colorama/tests/__pycache__/isatty_test.cpython-312.pyc,,
464
+ pip/_vendor/colorama/tests/__pycache__/utils.cpython-312.pyc,,
465
+ pip/_vendor/colorama/tests/__pycache__/winterm_test.cpython-312.pyc,,
466
+ pip/_vendor/colorama/tests/ansi_test.py,sha256=FeViDrUINIZcr505PAxvU4AjXz1asEiALs9GXMhwRaE,2839
467
+ pip/_vendor/colorama/tests/ansitowin32_test.py,sha256=RN7AIhMJ5EqDsYaCjVo-o4u8JzDD4ukJbmevWKS70rY,10678
468
+ pip/_vendor/colorama/tests/initialise_test.py,sha256=BbPy-XfyHwJ6zKozuQOvNvQZzsx9vdb_0bYXn7hsBTc,6741
469
+ pip/_vendor/colorama/tests/isatty_test.py,sha256=Pg26LRpv0yQDB5Ac-sxgVXG7hsA1NYvapFgApZfYzZg,1866
470
+ pip/_vendor/colorama/tests/utils.py,sha256=1IIRylG39z5-dzq09R_ngufxyPZxgldNbrxKxUGwGKE,1079
471
+ pip/_vendor/colorama/tests/winterm_test.py,sha256=qoWFPEjym5gm2RuMwpf3pOis3a5r_PJZFCzK254JL8A,3709
472
+ pip/_vendor/colorama/win32.py,sha256=YQOKwMTwtGBbsY4dL5HYTvwTeP9wIQra5MvPNddpxZs,6181
473
+ pip/_vendor/colorama/winterm.py,sha256=XCQFDHjPi6AHYNdZwy0tA02H-Jh48Jp-HvCjeLeLp3U,7134
474
+ pip/_vendor/distlib/__init__.py,sha256=acgfseOC55dNrVAzaBKpUiH3Z6V7Q1CaxsiQ3K7pC-E,581
475
+ pip/_vendor/distlib/__pycache__/__init__.cpython-312.pyc,,
476
+ pip/_vendor/distlib/__pycache__/compat.cpython-312.pyc,,
477
+ pip/_vendor/distlib/__pycache__/database.cpython-312.pyc,,
478
+ pip/_vendor/distlib/__pycache__/index.cpython-312.pyc,,
479
+ pip/_vendor/distlib/__pycache__/locators.cpython-312.pyc,,
480
+ pip/_vendor/distlib/__pycache__/manifest.cpython-312.pyc,,
481
+ pip/_vendor/distlib/__pycache__/markers.cpython-312.pyc,,
482
+ pip/_vendor/distlib/__pycache__/metadata.cpython-312.pyc,,
483
+ pip/_vendor/distlib/__pycache__/resources.cpython-312.pyc,,
484
+ pip/_vendor/distlib/__pycache__/scripts.cpython-312.pyc,,
485
+ pip/_vendor/distlib/__pycache__/util.cpython-312.pyc,,
486
+ pip/_vendor/distlib/__pycache__/version.cpython-312.pyc,,
487
+ pip/_vendor/distlib/__pycache__/wheel.cpython-312.pyc,,
488
+ pip/_vendor/distlib/compat.py,sha256=tfoMrj6tujk7G4UC2owL6ArgDuCKabgBxuJRGZSmpko,41259
489
+ pip/_vendor/distlib/database.py,sha256=o_mw0fAr93NDAHHHfqG54Y1Hi9Rkfrp2BX15XWZYK50,51697
490
+ pip/_vendor/distlib/index.py,sha256=HFiDG7LMoaBs829WuotrfIwcErOOExUOR_AeBtw_TCU,20834
491
+ pip/_vendor/distlib/locators.py,sha256=wNzG-zERzS_XGls-nBPVVyLRHa2skUlkn0-5n0trMWA,51991
492
+ pip/_vendor/distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811
493
+ pip/_vendor/distlib/markers.py,sha256=TpHHHLgkzyT7YHbwj-2i6weRaq-Ivy2-MUnrDkjau-U,5058
494
+ pip/_vendor/distlib/metadata.py,sha256=g_DIiu8nBXRzA-mWPRpatHGbmFZqaFoss7z9TG7QSUU,39801
495
+ pip/_vendor/distlib/resources.py,sha256=LwbPksc0A1JMbi6XnuPdMBUn83X7BPuFNWqPGEKI698,10820
496
+ pip/_vendor/distlib/scripts.py,sha256=BmkTKmiTk4m2cj-iueliatwz3ut_9SsABBW51vnQnZU,18102
497
+ pip/_vendor/distlib/t32.exe,sha256=a0GV5kCoWsMutvliiCKmIgV98eRZ33wXoS-XrqvJQVs,97792
498
+ pip/_vendor/distlib/t64-arm.exe,sha256=68TAa32V504xVBnufojh0PcenpR3U4wAqTqf-MZqbPw,182784
499
+ pip/_vendor/distlib/t64.exe,sha256=gaYY8hy4fbkHYTTnA4i26ct8IQZzkBG2pRdy0iyuBrc,108032
500
+ pip/_vendor/distlib/util.py,sha256=31dPXn3Rfat0xZLeVoFpuniyhe6vsbl9_QN-qd9Lhlk,66262
501
+ pip/_vendor/distlib/version.py,sha256=WG__LyAa2GwmA6qSoEJtvJE8REA1LZpbSizy8WvhJLk,23513
502
+ pip/_vendor/distlib/w32.exe,sha256=R4csx3-OGM9kL4aPIzQKRo5TfmRSHZo6QWyLhDhNBks,91648
503
+ pip/_vendor/distlib/w64-arm.exe,sha256=xdyYhKj0WDcVUOCb05blQYvzdYIKMbmJn2SZvzkcey4,168448
504
+ pip/_vendor/distlib/w64.exe,sha256=ejGf-rojoBfXseGLpya6bFTFPWRG21X5KvU8J5iU-K0,101888
505
+ pip/_vendor/distlib/wheel.py,sha256=Rgqs658VsJ3R2845qwnZD8XQryV2CzWw2mghwLvxxsI,43898
506
+ pip/_vendor/distro/__init__.py,sha256=2fHjF-SfgPvjyNZ1iHh_wjqWdR_Yo5ODHwZC0jLBPhc,981
507
+ pip/_vendor/distro/__main__.py,sha256=bu9d3TifoKciZFcqRBuygV3GSuThnVD_m2IK4cz96Vs,64
508
+ pip/_vendor/distro/__pycache__/__init__.cpython-312.pyc,,
509
+ pip/_vendor/distro/__pycache__/__main__.cpython-312.pyc,,
510
+ pip/_vendor/distro/__pycache__/distro.cpython-312.pyc,,
511
+ pip/_vendor/distro/distro.py,sha256=UZO1LjIhtFCMdlbiz39gj3raV-Amf3SBwzGzfApiMHw,49330
512
+ pip/_vendor/idna/__init__.py,sha256=KJQN1eQBr8iIK5SKrJ47lXvxG0BJ7Lm38W4zT0v_8lk,849
513
+ pip/_vendor/idna/__pycache__/__init__.cpython-312.pyc,,
514
+ pip/_vendor/idna/__pycache__/codec.cpython-312.pyc,,
515
+ pip/_vendor/idna/__pycache__/compat.cpython-312.pyc,,
516
+ pip/_vendor/idna/__pycache__/core.cpython-312.pyc,,
517
+ pip/_vendor/idna/__pycache__/idnadata.cpython-312.pyc,,
518
+ pip/_vendor/idna/__pycache__/intranges.cpython-312.pyc,,
519
+ pip/_vendor/idna/__pycache__/package_data.cpython-312.pyc,,
520
+ pip/_vendor/idna/__pycache__/uts46data.cpython-312.pyc,,
521
+ pip/_vendor/idna/codec.py,sha256=6ly5odKfqrytKT9_7UrlGklHnf1DSK2r9C6cSM4sa28,3374
522
+ pip/_vendor/idna/compat.py,sha256=0_sOEUMT4CVw9doD3vyRhX80X19PwqFoUBs7gWsFME4,321
523
+ pip/_vendor/idna/core.py,sha256=1JxchwKzkxBSn7R_oCE12oBu3eVux0VzdxolmIad24M,12950
524
+ pip/_vendor/idna/idnadata.py,sha256=xUjqKqiJV8Ho_XzBpAtv5JFoVPSupK-SUXvtjygUHqw,44375
525
+ pip/_vendor/idna/intranges.py,sha256=YBr4fRYuWH7kTKS2tXlFjM24ZF1Pdvcir-aywniInqg,1881
526
+ pip/_vendor/idna/package_data.py,sha256=C_jHJzmX8PI4xq0jpzmcTMxpb5lDsq4o5VyxQzlVrZE,21
527
+ pip/_vendor/idna/uts46data.py,sha256=zvjZU24s58_uAS850Mcd0NnD0X7_gCMAMjzWNIeUJdc,206539
528
+ pip/_vendor/msgpack/__init__.py,sha256=hyGhlnmcJkxryJBKC3X5FnEph375kQoL_mG8LZUuXgY,1132
529
+ pip/_vendor/msgpack/__pycache__/__init__.cpython-312.pyc,,
530
+ pip/_vendor/msgpack/__pycache__/exceptions.cpython-312.pyc,,
531
+ pip/_vendor/msgpack/__pycache__/ext.cpython-312.pyc,,
532
+ pip/_vendor/msgpack/__pycache__/fallback.cpython-312.pyc,,
533
+ pip/_vendor/msgpack/exceptions.py,sha256=dCTWei8dpkrMsQDcjQk74ATl9HsIBH0ybt8zOPNqMYc,1081
534
+ pip/_vendor/msgpack/ext.py,sha256=C5MK8JhVYGYFWPvxsORsqZAnvOXefYQ57m1Ym0luW5M,6079
535
+ pip/_vendor/msgpack/fallback.py,sha256=tvNBHyxxFbuVlC8GZShETClJxjLiDMOja4XwwyvNm2g,34544
536
+ pip/_vendor/packaging/__about__.py,sha256=ugASIO2w1oUyH8_COqQ2X_s0rDhjbhQC3yJocD03h2c,661
537
+ pip/_vendor/packaging/__init__.py,sha256=b9Kk5MF7KxhhLgcDmiUWukN-LatWFxPdNug0joPhHSk,497
538
+ pip/_vendor/packaging/__pycache__/__about__.cpython-312.pyc,,
539
+ pip/_vendor/packaging/__pycache__/__init__.cpython-312.pyc,,
540
+ pip/_vendor/packaging/__pycache__/_manylinux.cpython-312.pyc,,
541
+ pip/_vendor/packaging/__pycache__/_musllinux.cpython-312.pyc,,
542
+ pip/_vendor/packaging/__pycache__/_structures.cpython-312.pyc,,
543
+ pip/_vendor/packaging/__pycache__/markers.cpython-312.pyc,,
544
+ pip/_vendor/packaging/__pycache__/requirements.cpython-312.pyc,,
545
+ pip/_vendor/packaging/__pycache__/specifiers.cpython-312.pyc,,
546
+ pip/_vendor/packaging/__pycache__/tags.cpython-312.pyc,,
547
+ pip/_vendor/packaging/__pycache__/utils.cpython-312.pyc,,
548
+ pip/_vendor/packaging/__pycache__/version.cpython-312.pyc,,
549
+ pip/_vendor/packaging/_manylinux.py,sha256=XcbiXB-qcjv3bcohp6N98TMpOP4_j3m-iOA8ptK2GWY,11488
550
+ pip/_vendor/packaging/_musllinux.py,sha256=_KGgY_qc7vhMGpoqss25n2hiLCNKRtvz9mCrS7gkqyc,4378
551
+ pip/_vendor/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
552
+ pip/_vendor/packaging/markers.py,sha256=AJBOcY8Oq0kYc570KuuPTkvuqjAlhufaE2c9sCUbm64,8487
553
+ pip/_vendor/packaging/requirements.py,sha256=NtDlPBtojpn1IUC85iMjPNsUmufjpSlwnNA-Xb4m5NA,4676
554
+ pip/_vendor/packaging/specifiers.py,sha256=LRQ0kFsHrl5qfcFNEEJrIFYsnIHQUJXY9fIsakTrrqE,30110
555
+ pip/_vendor/packaging/tags.py,sha256=lmsnGNiJ8C4D_Pf9PbM0qgbZvD9kmB9lpZBQUZa3R_Y,15699
556
+ pip/_vendor/packaging/utils.py,sha256=dJjeat3BS-TYn1RrUFVwufUMasbtzLfYRoy_HXENeFQ,4200
557
+ pip/_vendor/packaging/version.py,sha256=_fLRNrFrxYcHVfyo8vk9j8s6JM8N_xsSxVFr6RJyco8,14665
558
+ pip/_vendor/pkg_resources/__init__.py,sha256=hTAeJCNYb7dJseIDVsYK3mPQep_gphj4tQh-bspX8bg,109364
559
+ pip/_vendor/pkg_resources/__pycache__/__init__.cpython-312.pyc,,
560
+ pip/_vendor/platformdirs/__init__.py,sha256=SkhEYVyC_HUHC6KX7n4M_6coyRMtEB38QMyOYIAX6Yk,20155
561
+ pip/_vendor/platformdirs/__main__.py,sha256=fVvSiTzr2-RM6IsjWjj4fkaOtDOgDhUWv6sA99do4CQ,1476
562
+ pip/_vendor/platformdirs/__pycache__/__init__.cpython-312.pyc,,
563
+ pip/_vendor/platformdirs/__pycache__/__main__.cpython-312.pyc,,
564
+ pip/_vendor/platformdirs/__pycache__/android.cpython-312.pyc,,
565
+ pip/_vendor/platformdirs/__pycache__/api.cpython-312.pyc,,
566
+ pip/_vendor/platformdirs/__pycache__/macos.cpython-312.pyc,,
567
+ pip/_vendor/platformdirs/__pycache__/unix.cpython-312.pyc,,
568
+ pip/_vendor/platformdirs/__pycache__/version.cpython-312.pyc,,
569
+ pip/_vendor/platformdirs/__pycache__/windows.cpython-312.pyc,,
570
+ pip/_vendor/platformdirs/android.py,sha256=y_EEMKwYl2-bzYBDovksSn8m76on0Lda8eyJksVQE9U,7211
571
+ pip/_vendor/platformdirs/api.py,sha256=jWtX06jAJytYrkJDOqEls97mCkyHRSZkoqUlbMK5Qew,7132
572
+ pip/_vendor/platformdirs/macos.py,sha256=LueVOoVgGWDBwQb8OFwXkVKfVn33CM1Lkwf1-A86tRQ,3678
573
+ pip/_vendor/platformdirs/unix.py,sha256=22JhR8ZY0aLxSVCFnKrc6f1iz6Gv42K24Daj7aTjfSg,8809
574
+ pip/_vendor/platformdirs/version.py,sha256=mavZTQIJIXfdewEaSTn7EWrNfPZWeRofb-74xqW5f2M,160
575
+ pip/_vendor/platformdirs/windows.py,sha256=4TtbPGoWG2PRgI11uquDa7eRk8TcxvnUNuuMGZItnXc,9573
576
+ pip/_vendor/pygments/__init__.py,sha256=6AuDljQtvf89DTNUyWM7k3oUlP_lq70NU-INKKteOBY,2983
577
+ pip/_vendor/pygments/__main__.py,sha256=es8EKMvXj5yToIfQ-pf3Dv5TnIeeM6sME0LW-n4ecHo,353
578
+ pip/_vendor/pygments/__pycache__/__init__.cpython-312.pyc,,
579
+ pip/_vendor/pygments/__pycache__/__main__.cpython-312.pyc,,
580
+ pip/_vendor/pygments/__pycache__/cmdline.cpython-312.pyc,,
581
+ pip/_vendor/pygments/__pycache__/console.cpython-312.pyc,,
582
+ pip/_vendor/pygments/__pycache__/filter.cpython-312.pyc,,
583
+ pip/_vendor/pygments/__pycache__/formatter.cpython-312.pyc,,
584
+ pip/_vendor/pygments/__pycache__/lexer.cpython-312.pyc,,
585
+ pip/_vendor/pygments/__pycache__/modeline.cpython-312.pyc,,
586
+ pip/_vendor/pygments/__pycache__/plugin.cpython-312.pyc,,
587
+ pip/_vendor/pygments/__pycache__/regexopt.cpython-312.pyc,,
588
+ pip/_vendor/pygments/__pycache__/scanner.cpython-312.pyc,,
589
+ pip/_vendor/pygments/__pycache__/sphinxext.cpython-312.pyc,,
590
+ pip/_vendor/pygments/__pycache__/style.cpython-312.pyc,,
591
+ pip/_vendor/pygments/__pycache__/token.cpython-312.pyc,,
592
+ pip/_vendor/pygments/__pycache__/unistring.cpython-312.pyc,,
593
+ pip/_vendor/pygments/__pycache__/util.cpython-312.pyc,,
594
+ pip/_vendor/pygments/cmdline.py,sha256=byxYJp9gnjVeyhRlZ3UTMgo_LhkXh1afvN8wJBtAcc8,23685
595
+ pip/_vendor/pygments/console.py,sha256=2wZ5W-U6TudJD1_NLUwjclMpbomFM91lNv11_60sfGY,1697
596
+ pip/_vendor/pygments/filter.py,sha256=j5aLM9a9wSx6eH1oy473oSkJ02hGWNptBlVo4s1g_30,1938
597
+ pip/_vendor/pygments/filters/__init__.py,sha256=h_koYkUFo-FFUxjs564JHUAz7O3yJpVwI6fKN3MYzG0,40386
598
+ pip/_vendor/pygments/filters/__pycache__/__init__.cpython-312.pyc,,
599
+ pip/_vendor/pygments/formatter.py,sha256=J9OL9hXLJKZk7moUgKwpjW9HNf4WlJFg_o_-Z_S_tTY,4178
600
+ pip/_vendor/pygments/formatters/__init__.py,sha256=_xgAcdFKr0QNYwh_i98AU9hvfP3X2wAkhElFcRRF3Uo,5424
601
+ pip/_vendor/pygments/formatters/__pycache__/__init__.cpython-312.pyc,,
602
+ pip/_vendor/pygments/formatters/__pycache__/_mapping.cpython-312.pyc,,
603
+ pip/_vendor/pygments/formatters/__pycache__/bbcode.cpython-312.pyc,,
604
+ pip/_vendor/pygments/formatters/__pycache__/groff.cpython-312.pyc,,
605
+ pip/_vendor/pygments/formatters/__pycache__/html.cpython-312.pyc,,
606
+ pip/_vendor/pygments/formatters/__pycache__/img.cpython-312.pyc,,
607
+ pip/_vendor/pygments/formatters/__pycache__/irc.cpython-312.pyc,,
608
+ pip/_vendor/pygments/formatters/__pycache__/latex.cpython-312.pyc,,
609
+ pip/_vendor/pygments/formatters/__pycache__/other.cpython-312.pyc,,
610
+ pip/_vendor/pygments/formatters/__pycache__/pangomarkup.cpython-312.pyc,,
611
+ pip/_vendor/pygments/formatters/__pycache__/rtf.cpython-312.pyc,,
612
+ pip/_vendor/pygments/formatters/__pycache__/svg.cpython-312.pyc,,
613
+ pip/_vendor/pygments/formatters/__pycache__/terminal.cpython-312.pyc,,
614
+ pip/_vendor/pygments/formatters/__pycache__/terminal256.cpython-312.pyc,,
615
+ pip/_vendor/pygments/formatters/_mapping.py,sha256=1Cw37FuQlNacnxRKmtlPX4nyLoX9_ttko5ZwscNUZZ4,4176
616
+ pip/_vendor/pygments/formatters/bbcode.py,sha256=r1b7wzWTJouADDLh-Z11iRi4iQxD0JKJ1qHl6mOYxsA,3314
617
+ pip/_vendor/pygments/formatters/groff.py,sha256=xy8Zf3tXOo6MWrXh7yPGWx3lVEkg_DhY4CxmsDb0IVo,5094
618
+ pip/_vendor/pygments/formatters/html.py,sha256=PIzAyilNqaTzSSP2slDG2VDLE3qNioWy2rgtSSoviuI,35610
619
+ pip/_vendor/pygments/formatters/img.py,sha256=XKXmg2_XONrR4mtq2jfEU8XCsoln3VSGTw-UYiEokys,21938
620
+ pip/_vendor/pygments/formatters/irc.py,sha256=Ep-m8jd3voFO6Fv57cUGFmz6JVA67IEgyiBOwv0N4a0,4981
621
+ pip/_vendor/pygments/formatters/latex.py,sha256=FGzJ-YqSTE8z_voWPdzvLY5Tq8jE_ygjGjM6dXZJ8-k,19351
622
+ pip/_vendor/pygments/formatters/other.py,sha256=gPxkk5BdAzWTCgbEHg1lpLi-1F6ZPh5A_aotgLXHnzg,5073
623
+ pip/_vendor/pygments/formatters/pangomarkup.py,sha256=6LKnQc8yh49f802bF0sPvbzck4QivMYqqoXAPaYP8uU,2212
624
+ pip/_vendor/pygments/formatters/rtf.py,sha256=aA0v_psW6KZI3N18TKDifxeL6mcF8EDXcPXDWI4vhVQ,5014
625
+ pip/_vendor/pygments/formatters/svg.py,sha256=dQONWypbzfvzGCDtdp3M_NJawScJvM2DiHbx1k-ww7g,7335
626
+ pip/_vendor/pygments/formatters/terminal.py,sha256=FG-rpjRpFmNpiGB4NzIucvxq6sQIXB3HOTo2meTKtrU,4674
627
+ pip/_vendor/pygments/formatters/terminal256.py,sha256=13SJ3D5pFdqZ9zROE6HbWnBDwHvOGE8GlsmqGhprRp4,11753
628
+ pip/_vendor/pygments/lexer.py,sha256=2BpqLlT2ExvOOi7vnjK5nB4Fp-m52ldiPaXMox5uwug,34618
629
+ pip/_vendor/pygments/lexers/__init__.py,sha256=j5KEi5O_VQ5GS59H49l-10gzUOkWKxlwGeVMlGO2MMk,12130
630
+ pip/_vendor/pygments/lexers/__pycache__/__init__.cpython-312.pyc,,
631
+ pip/_vendor/pygments/lexers/__pycache__/_mapping.cpython-312.pyc,,
632
+ pip/_vendor/pygments/lexers/__pycache__/python.cpython-312.pyc,,
633
+ pip/_vendor/pygments/lexers/_mapping.py,sha256=Hts4r_ZQ8icftGM7gkBPeED5lyVSv4affFgXYE6Ap04,72281
634
+ pip/_vendor/pygments/lexers/python.py,sha256=c7jnmKFU9DLxTJW0UbwXt6Z9FJqbBlVsWA1Qr9xSA_w,53424
635
+ pip/_vendor/pygments/modeline.py,sha256=eF2vO4LpOGoPvIKKkbPfnyut8hT4UiebZPpb-BYGQdI,986
636
+ pip/_vendor/pygments/plugin.py,sha256=j1Fh310RbV2DQ9nvkmkqvlj38gdyuYKllLnGxbc8sJM,2591
637
+ pip/_vendor/pygments/regexopt.py,sha256=jg1ALogcYGU96TQS9isBl6dCrvw5y5--BP_K-uFk_8s,3072
638
+ pip/_vendor/pygments/scanner.py,sha256=b_nu5_f3HCgSdp5S_aNRBQ1MSCm4ZjDwec2OmTRickw,3092
639
+ pip/_vendor/pygments/sphinxext.py,sha256=wBFYm180qea9JKt__UzhRlNRNhczPDFDaqGD21sbuso,6882
640
+ pip/_vendor/pygments/style.py,sha256=C4qyoJrUTkq-OV3iO-8Vz3UtWYpJwSTdh5_vlGCGdNQ,6257
641
+ pip/_vendor/pygments/styles/__init__.py,sha256=he7HjQx7sC0d2kfTVLjUs0J15mtToJM6M1brwIm9--Q,3700
642
+ pip/_vendor/pygments/styles/__pycache__/__init__.cpython-312.pyc,,
643
+ pip/_vendor/pygments/token.py,sha256=seNsmcch9OEHXYirh8Ool7w8xDhfNTbLj5rHAC-gc_o,6184
644
+ pip/_vendor/pygments/unistring.py,sha256=FaUfG14NBJEKLQoY9qj6JYeXrpYcLmKulghdxOGFaOc,63223
645
+ pip/_vendor/pygments/util.py,sha256=AEVY0qonyyEMgv4Do2dINrrqUAwUk2XYSqHM650uzek,10230
646
+ pip/_vendor/pyparsing/__init__.py,sha256=9m1JbE2JTLdBG0Mb6B0lEaZj181Wx5cuPXZpsbHEYgE,9116
647
+ pip/_vendor/pyparsing/__pycache__/__init__.cpython-312.pyc,,
648
+ pip/_vendor/pyparsing/__pycache__/actions.cpython-312.pyc,,
649
+ pip/_vendor/pyparsing/__pycache__/common.cpython-312.pyc,,
650
+ pip/_vendor/pyparsing/__pycache__/core.cpython-312.pyc,,
651
+ pip/_vendor/pyparsing/__pycache__/exceptions.cpython-312.pyc,,
652
+ pip/_vendor/pyparsing/__pycache__/helpers.cpython-312.pyc,,
653
+ pip/_vendor/pyparsing/__pycache__/results.cpython-312.pyc,,
654
+ pip/_vendor/pyparsing/__pycache__/testing.cpython-312.pyc,,
655
+ pip/_vendor/pyparsing/__pycache__/unicode.cpython-312.pyc,,
656
+ pip/_vendor/pyparsing/__pycache__/util.cpython-312.pyc,,
657
+ pip/_vendor/pyparsing/actions.py,sha256=05uaIPOznJPQ7VgRdmGCmG4sDnUPtwgv5qOYIqbL2UY,6567
658
+ pip/_vendor/pyparsing/common.py,sha256=p-3c83E5-DjlkF35G0O9-kjQRpoejP-2_z0hxZ-eol4,13387
659
+ pip/_vendor/pyparsing/core.py,sha256=yvuRlLpXSF8mgk-QhiW3OVLqD9T0rsj9tbibhRH4Yaw,224445
660
+ pip/_vendor/pyparsing/diagram/__init__.py,sha256=nxmDOoYF9NXuLaGYy01tKFjkNReWJlrGFuJNWEiTo84,24215
661
+ pip/_vendor/pyparsing/diagram/__pycache__/__init__.cpython-312.pyc,,
662
+ pip/_vendor/pyparsing/exceptions.py,sha256=6Jc6W1eDZBzyFu1J0YrcdNFVBC-RINujZmveSnB8Rxw,9523
663
+ pip/_vendor/pyparsing/helpers.py,sha256=BZJHCA8SS0pYio30KGQTc9w2qMOaK4YpZ7hcvHbnTgk,38646
664
+ pip/_vendor/pyparsing/results.py,sha256=9dyqQ-w3MjfmxWbFt8KEPU6IfXeyRdoWp2Og802rUQY,26692
665
+ pip/_vendor/pyparsing/testing.py,sha256=eJncg0p83zm1FTPvM9auNT6oavIvXaibmRFDf1qmwkY,13488
666
+ pip/_vendor/pyparsing/unicode.py,sha256=fAPdsJiARFbkPAih6NkYry0dpj4jPqelGVMlE4wWFW8,10646
667
+ pip/_vendor/pyparsing/util.py,sha256=vTMzTdwSDyV8d_dSgquUTdWgBFoA_W30nfxEJDsshRQ,8670
668
+ pip/_vendor/pyproject_hooks/__init__.py,sha256=kCehmy0UaBa9oVMD7ZIZrnswfnP3LXZ5lvnNJAL5JBM,491
669
+ pip/_vendor/pyproject_hooks/__pycache__/__init__.cpython-312.pyc,,
670
+ pip/_vendor/pyproject_hooks/__pycache__/_compat.cpython-312.pyc,,
671
+ pip/_vendor/pyproject_hooks/__pycache__/_impl.cpython-312.pyc,,
672
+ pip/_vendor/pyproject_hooks/_compat.py,sha256=by6evrYnqkisiM-MQcvOKs5bgDMzlOSgZqRHNqf04zE,138
673
+ pip/_vendor/pyproject_hooks/_impl.py,sha256=61GJxzQip0IInhuO69ZI5GbNQ82XEDUB_1Gg5_KtUoc,11920
674
+ pip/_vendor/pyproject_hooks/_in_process/__init__.py,sha256=9gQATptbFkelkIy0OfWFEACzqxXJMQDWCH9rBOAZVwQ,546
675
+ pip/_vendor/pyproject_hooks/_in_process/__pycache__/__init__.cpython-312.pyc,,
676
+ pip/_vendor/pyproject_hooks/_in_process/__pycache__/_in_process.cpython-312.pyc,,
677
+ pip/_vendor/pyproject_hooks/_in_process/_in_process.py,sha256=m2b34c917IW5o-Q_6TYIHlsK9lSUlNiyrITTUH_zwew,10927
678
+ pip/_vendor/requests/__init__.py,sha256=owujob4dk45Siy4EYtbCKR6wcFph7E04a_v_OuAacBA,5169
679
+ pip/_vendor/requests/__pycache__/__init__.cpython-312.pyc,,
680
+ pip/_vendor/requests/__pycache__/__version__.cpython-312.pyc,,
681
+ pip/_vendor/requests/__pycache__/_internal_utils.cpython-312.pyc,,
682
+ pip/_vendor/requests/__pycache__/adapters.cpython-312.pyc,,
683
+ pip/_vendor/requests/__pycache__/api.cpython-312.pyc,,
684
+ pip/_vendor/requests/__pycache__/auth.cpython-312.pyc,,
685
+ pip/_vendor/requests/__pycache__/certs.cpython-312.pyc,,
686
+ pip/_vendor/requests/__pycache__/compat.cpython-312.pyc,,
687
+ pip/_vendor/requests/__pycache__/cookies.cpython-312.pyc,,
688
+ pip/_vendor/requests/__pycache__/exceptions.cpython-312.pyc,,
689
+ pip/_vendor/requests/__pycache__/help.cpython-312.pyc,,
690
+ pip/_vendor/requests/__pycache__/hooks.cpython-312.pyc,,
691
+ pip/_vendor/requests/__pycache__/models.cpython-312.pyc,,
692
+ pip/_vendor/requests/__pycache__/packages.cpython-312.pyc,,
693
+ pip/_vendor/requests/__pycache__/sessions.cpython-312.pyc,,
694
+ pip/_vendor/requests/__pycache__/status_codes.cpython-312.pyc,,
695
+ pip/_vendor/requests/__pycache__/structures.cpython-312.pyc,,
696
+ pip/_vendor/requests/__pycache__/utils.cpython-312.pyc,,
697
+ pip/_vendor/requests/__version__.py,sha256=ssI3Ezt7PaxgkOW45GhtwPUclo_SO_ygtIm4A74IOfw,435
698
+ pip/_vendor/requests/_internal_utils.py,sha256=nMQymr4hs32TqVo5AbCrmcJEhvPUh7xXlluyqwslLiQ,1495
699
+ pip/_vendor/requests/adapters.py,sha256=idj6cZcId3L5xNNeJ7ieOLtw3awJk5A64xUfetHwq3M,19697
700
+ pip/_vendor/requests/api.py,sha256=q61xcXq4tmiImrvcSVLTbFyCiD2F-L_-hWKGbz4y8vg,6449
701
+ pip/_vendor/requests/auth.py,sha256=h-HLlVx9j8rKV5hfSAycP2ApOSglTz77R0tz7qCbbEE,10187
702
+ pip/_vendor/requests/certs.py,sha256=PVPooB0jP5hkZEULSCwC074532UFbR2Ptgu0I5zwmCs,575
703
+ pip/_vendor/requests/compat.py,sha256=IhK9quyX0RRuWTNcg6d2JGSAOUbM6mym2p_2XjLTwf4,1286
704
+ pip/_vendor/requests/cookies.py,sha256=kD3kNEcCj-mxbtf5fJsSaT86eGoEYpD3X0CSgpzl7BM,18560
705
+ pip/_vendor/requests/exceptions.py,sha256=FA-_kVwBZ2jhXauRctN_ewHVK25b-fj0Azyz1THQ0Kk,3823
706
+ pip/_vendor/requests/help.py,sha256=FnAAklv8MGm_qb2UilDQgS6l0cUttiCFKUjx0zn2XNA,3879
707
+ pip/_vendor/requests/hooks.py,sha256=CiuysiHA39V5UfcCBXFIx83IrDpuwfN9RcTUgv28ftQ,733
708
+ pip/_vendor/requests/models.py,sha256=dDZ-iThotky-Noq9yy97cUEJhr3wnY6mv-xR_ePg_lk,35288
709
+ pip/_vendor/requests/packages.py,sha256=njJmVifY4aSctuW3PP5EFRCxjEwMRDO6J_feG2dKWsI,695
710
+ pip/_vendor/requests/sessions.py,sha256=-LvTzrPtetSTrR3buxu4XhdgMrJFLB1q5D7P--L2Xhw,30373
711
+ pip/_vendor/requests/status_codes.py,sha256=FvHmT5uH-_uimtRz5hH9VCbt7VV-Nei2J9upbej6j8g,4235
712
+ pip/_vendor/requests/structures.py,sha256=-IbmhVz06S-5aPSZuUthZ6-6D9XOjRuTXHOabY041XM,2912
713
+ pip/_vendor/requests/utils.py,sha256=kOPn0qYD6xRTzaxbqTdYiSInBZHl6379AJsyIgzYGLY,33460
714
+ pip/_vendor/resolvelib/__init__.py,sha256=h509TdEcpb5-44JonaU3ex2TM15GVBLjM9CNCPwnTTs,537
715
+ pip/_vendor/resolvelib/__pycache__/__init__.cpython-312.pyc,,
716
+ pip/_vendor/resolvelib/__pycache__/providers.cpython-312.pyc,,
717
+ pip/_vendor/resolvelib/__pycache__/reporters.cpython-312.pyc,,
718
+ pip/_vendor/resolvelib/__pycache__/resolvers.cpython-312.pyc,,
719
+ pip/_vendor/resolvelib/__pycache__/structs.cpython-312.pyc,,
720
+ pip/_vendor/resolvelib/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
721
+ pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-312.pyc,,
722
+ pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-312.pyc,,
723
+ pip/_vendor/resolvelib/compat/collections_abc.py,sha256=uy8xUZ-NDEw916tugUXm8HgwCGiMO0f-RcdnpkfXfOs,156
724
+ pip/_vendor/resolvelib/providers.py,sha256=fuuvVrCetu5gsxPB43ERyjfO8aReS3rFQHpDgiItbs4,5871
725
+ pip/_vendor/resolvelib/reporters.py,sha256=TSbRmWzTc26w0ggsV1bxVpeWDB8QNIre6twYl7GIZBE,1601
726
+ pip/_vendor/resolvelib/resolvers.py,sha256=G8rsLZSq64g5VmIq-lB7UcIJ1gjAxIQJmTF4REZleQ0,20511
727
+ pip/_vendor/resolvelib/structs.py,sha256=0_1_XO8z_CLhegP3Vpf9VJ3zJcfLm0NOHRM-i0Ykz3o,4963
728
+ pip/_vendor/rich/__init__.py,sha256=dRxjIL-SbFVY0q3IjSMrfgBTHrm1LZDgLOygVBwiYZc,6090
729
+ pip/_vendor/rich/__main__.py,sha256=TT8sb9PTnsnKhhrGuHkLN0jdN0dtKhtPkEr9CidDbPM,8478
730
+ pip/_vendor/rich/__pycache__/__init__.cpython-312.pyc,,
731
+ pip/_vendor/rich/__pycache__/__main__.cpython-312.pyc,,
732
+ pip/_vendor/rich/__pycache__/_cell_widths.cpython-312.pyc,,
733
+ pip/_vendor/rich/__pycache__/_emoji_codes.cpython-312.pyc,,
734
+ pip/_vendor/rich/__pycache__/_emoji_replace.cpython-312.pyc,,
735
+ pip/_vendor/rich/__pycache__/_export_format.cpython-312.pyc,,
736
+ pip/_vendor/rich/__pycache__/_extension.cpython-312.pyc,,
737
+ pip/_vendor/rich/__pycache__/_fileno.cpython-312.pyc,,
738
+ pip/_vendor/rich/__pycache__/_inspect.cpython-312.pyc,,
739
+ pip/_vendor/rich/__pycache__/_log_render.cpython-312.pyc,,
740
+ pip/_vendor/rich/__pycache__/_loop.cpython-312.pyc,,
741
+ pip/_vendor/rich/__pycache__/_null_file.cpython-312.pyc,,
742
+ pip/_vendor/rich/__pycache__/_palettes.cpython-312.pyc,,
743
+ pip/_vendor/rich/__pycache__/_pick.cpython-312.pyc,,
744
+ pip/_vendor/rich/__pycache__/_ratio.cpython-312.pyc,,
745
+ pip/_vendor/rich/__pycache__/_spinners.cpython-312.pyc,,
746
+ pip/_vendor/rich/__pycache__/_stack.cpython-312.pyc,,
747
+ pip/_vendor/rich/__pycache__/_timer.cpython-312.pyc,,
748
+ pip/_vendor/rich/__pycache__/_win32_console.cpython-312.pyc,,
749
+ pip/_vendor/rich/__pycache__/_windows.cpython-312.pyc,,
750
+ pip/_vendor/rich/__pycache__/_windows_renderer.cpython-312.pyc,,
751
+ pip/_vendor/rich/__pycache__/_wrap.cpython-312.pyc,,
752
+ pip/_vendor/rich/__pycache__/abc.cpython-312.pyc,,
753
+ pip/_vendor/rich/__pycache__/align.cpython-312.pyc,,
754
+ pip/_vendor/rich/__pycache__/ansi.cpython-312.pyc,,
755
+ pip/_vendor/rich/__pycache__/bar.cpython-312.pyc,,
756
+ pip/_vendor/rich/__pycache__/box.cpython-312.pyc,,
757
+ pip/_vendor/rich/__pycache__/cells.cpython-312.pyc,,
758
+ pip/_vendor/rich/__pycache__/color.cpython-312.pyc,,
759
+ pip/_vendor/rich/__pycache__/color_triplet.cpython-312.pyc,,
760
+ pip/_vendor/rich/__pycache__/columns.cpython-312.pyc,,
761
+ pip/_vendor/rich/__pycache__/console.cpython-312.pyc,,
762
+ pip/_vendor/rich/__pycache__/constrain.cpython-312.pyc,,
763
+ pip/_vendor/rich/__pycache__/containers.cpython-312.pyc,,
764
+ pip/_vendor/rich/__pycache__/control.cpython-312.pyc,,
765
+ pip/_vendor/rich/__pycache__/default_styles.cpython-312.pyc,,
766
+ pip/_vendor/rich/__pycache__/diagnose.cpython-312.pyc,,
767
+ pip/_vendor/rich/__pycache__/emoji.cpython-312.pyc,,
768
+ pip/_vendor/rich/__pycache__/errors.cpython-312.pyc,,
769
+ pip/_vendor/rich/__pycache__/file_proxy.cpython-312.pyc,,
770
+ pip/_vendor/rich/__pycache__/filesize.cpython-312.pyc,,
771
+ pip/_vendor/rich/__pycache__/highlighter.cpython-312.pyc,,
772
+ pip/_vendor/rich/__pycache__/json.cpython-312.pyc,,
773
+ pip/_vendor/rich/__pycache__/jupyter.cpython-312.pyc,,
774
+ pip/_vendor/rich/__pycache__/layout.cpython-312.pyc,,
775
+ pip/_vendor/rich/__pycache__/live.cpython-312.pyc,,
776
+ pip/_vendor/rich/__pycache__/live_render.cpython-312.pyc,,
777
+ pip/_vendor/rich/__pycache__/logging.cpython-312.pyc,,
778
+ pip/_vendor/rich/__pycache__/markup.cpython-312.pyc,,
779
+ pip/_vendor/rich/__pycache__/measure.cpython-312.pyc,,
780
+ pip/_vendor/rich/__pycache__/padding.cpython-312.pyc,,
781
+ pip/_vendor/rich/__pycache__/pager.cpython-312.pyc,,
782
+ pip/_vendor/rich/__pycache__/palette.cpython-312.pyc,,
783
+ pip/_vendor/rich/__pycache__/panel.cpython-312.pyc,,
784
+ pip/_vendor/rich/__pycache__/pretty.cpython-312.pyc,,
785
+ pip/_vendor/rich/__pycache__/progress.cpython-312.pyc,,
786
+ pip/_vendor/rich/__pycache__/progress_bar.cpython-312.pyc,,
787
+ pip/_vendor/rich/__pycache__/prompt.cpython-312.pyc,,
788
+ pip/_vendor/rich/__pycache__/protocol.cpython-312.pyc,,
789
+ pip/_vendor/rich/__pycache__/region.cpython-312.pyc,,
790
+ pip/_vendor/rich/__pycache__/repr.cpython-312.pyc,,
791
+ pip/_vendor/rich/__pycache__/rule.cpython-312.pyc,,
792
+ pip/_vendor/rich/__pycache__/scope.cpython-312.pyc,,
793
+ pip/_vendor/rich/__pycache__/screen.cpython-312.pyc,,
794
+ pip/_vendor/rich/__pycache__/segment.cpython-312.pyc,,
795
+ pip/_vendor/rich/__pycache__/spinner.cpython-312.pyc,,
796
+ pip/_vendor/rich/__pycache__/status.cpython-312.pyc,,
797
+ pip/_vendor/rich/__pycache__/style.cpython-312.pyc,,
798
+ pip/_vendor/rich/__pycache__/styled.cpython-312.pyc,,
799
+ pip/_vendor/rich/__pycache__/syntax.cpython-312.pyc,,
800
+ pip/_vendor/rich/__pycache__/table.cpython-312.pyc,,
801
+ pip/_vendor/rich/__pycache__/terminal_theme.cpython-312.pyc,,
802
+ pip/_vendor/rich/__pycache__/text.cpython-312.pyc,,
803
+ pip/_vendor/rich/__pycache__/theme.cpython-312.pyc,,
804
+ pip/_vendor/rich/__pycache__/themes.cpython-312.pyc,,
805
+ pip/_vendor/rich/__pycache__/traceback.cpython-312.pyc,,
806
+ pip/_vendor/rich/__pycache__/tree.cpython-312.pyc,,
807
+ pip/_vendor/rich/_cell_widths.py,sha256=2n4EiJi3X9sqIq0O16kUZ_zy6UYMd3xFfChlKfnW1Hc,10096
808
+ pip/_vendor/rich/_emoji_codes.py,sha256=hu1VL9nbVdppJrVoijVshRlcRRe_v3dju3Mmd2sKZdY,140235
809
+ pip/_vendor/rich/_emoji_replace.py,sha256=n-kcetsEUx2ZUmhQrfeMNc-teeGhpuSQ5F8VPBsyvDo,1064
810
+ pip/_vendor/rich/_export_format.py,sha256=qxgV3nKnXQu1hfbnRVswPYy-AwIg1X0LSC47cK5s8jk,2100
811
+ pip/_vendor/rich/_extension.py,sha256=Xt47QacCKwYruzjDi-gOBq724JReDj9Cm9xUi5fr-34,265
812
+ pip/_vendor/rich/_fileno.py,sha256=HWZxP5C2ajMbHryvAQZseflVfQoGzsKOHzKGsLD8ynQ,799
813
+ pip/_vendor/rich/_inspect.py,sha256=oZJGw31e64dwXSCmrDnvZbwVb1ZKhWfU8wI3VWohjJk,9695
814
+ pip/_vendor/rich/_log_render.py,sha256=1ByI0PA1ZpxZY3CGJOK54hjlq4X-Bz_boIjIqCd8Kns,3225
815
+ pip/_vendor/rich/_loop.py,sha256=hV_6CLdoPm0va22Wpw4zKqM0RYsz3TZxXj0PoS-9eDQ,1236
816
+ pip/_vendor/rich/_null_file.py,sha256=tGSXk_v-IZmbj1GAzHit8A3kYIQMiCpVsCFfsC-_KJ4,1387
817
+ pip/_vendor/rich/_palettes.py,sha256=cdev1JQKZ0JvlguV9ipHgznTdnvlIzUFDBb0It2PzjI,7063
818
+ pip/_vendor/rich/_pick.py,sha256=evDt8QN4lF5CiwrUIXlOJCntitBCOsI3ZLPEIAVRLJU,423
819
+ pip/_vendor/rich/_ratio.py,sha256=2lLSliL025Y-YMfdfGbutkQDevhcyDqc-DtUYW9mU70,5472
820
+ pip/_vendor/rich/_spinners.py,sha256=U2r1_g_1zSjsjiUdAESc2iAMc3i4ri_S8PYP6kQ5z1I,19919
821
+ pip/_vendor/rich/_stack.py,sha256=-C8OK7rxn3sIUdVwxZBBpeHhIzX0eI-VM3MemYfaXm0,351
822
+ pip/_vendor/rich/_timer.py,sha256=zelxbT6oPFZnNrwWPpc1ktUeAT-Vc4fuFcRZLQGLtMI,417
823
+ pip/_vendor/rich/_win32_console.py,sha256=P0vxI2fcndym1UU1S37XAzQzQnkyY7YqAKmxm24_gug,22820
824
+ pip/_vendor/rich/_windows.py,sha256=dvNl9TmfPzNVxiKk5WDFihErZ5796g2UC9-KGGyfXmk,1926
825
+ pip/_vendor/rich/_windows_renderer.py,sha256=t74ZL3xuDCP3nmTp9pH1L5LiI2cakJuQRQleHCJerlk,2783
826
+ pip/_vendor/rich/_wrap.py,sha256=xfV_9t0Sg6rzimmrDru8fCVmUlalYAcHLDfrJZnbbwQ,1840
827
+ pip/_vendor/rich/abc.py,sha256=ON-E-ZqSSheZ88VrKX2M3PXpFbGEUUZPMa_Af0l-4f0,890
828
+ pip/_vendor/rich/align.py,sha256=Ji-Yokfkhnfe_xMmr4ISjZB07TJXggBCOYoYa-HDAr8,10368
829
+ pip/_vendor/rich/ansi.py,sha256=iD6532QYqnBm6hADulKjrV8l8kFJ-9fEVooHJHH3hMg,6906
830
+ pip/_vendor/rich/bar.py,sha256=a7UD303BccRCrEhGjfMElpv5RFYIinaAhAuqYqhUvmw,3264
831
+ pip/_vendor/rich/box.py,sha256=FJ6nI3jD7h2XNFU138bJUt2HYmWOlRbltoCEuIAZhew,9842
832
+ pip/_vendor/rich/cells.py,sha256=627ztJs9zOL-38HJ7kXBerR-gT8KBfYC8UzEwMJDYYo,4509
833
+ pip/_vendor/rich/color.py,sha256=9Gh958U3f75WVdLTeC0U9nkGTn2n0wnojKpJ6jQEkIE,18224
834
+ pip/_vendor/rich/color_triplet.py,sha256=3lhQkdJbvWPoLDO-AnYImAWmJvV5dlgYNCVZ97ORaN4,1054
835
+ pip/_vendor/rich/columns.py,sha256=HUX0KcMm9dsKNi11fTbiM_h2iDtl8ySCaVcxlalEzq8,7131
836
+ pip/_vendor/rich/console.py,sha256=pDvkbLkvtZIMIwQx_jkZ-seyNl4zGBLviXoWXte9fwg,99218
837
+ pip/_vendor/rich/constrain.py,sha256=1VIPuC8AgtKWrcncQrjBdYqA3JVWysu6jZo1rrh7c7Q,1288
838
+ pip/_vendor/rich/containers.py,sha256=aKgm5UDHn5Nmui6IJaKdsZhbHClh_X7D-_Wg8Ehrr7s,5497
839
+ pip/_vendor/rich/control.py,sha256=DSkHTUQLorfSERAKE_oTAEUFefZnZp4bQb4q8rHbKws,6630
840
+ pip/_vendor/rich/default_styles.py,sha256=-Fe318kMVI_IwciK5POpThcO0-9DYJ67TZAN6DlmlmM,8082
841
+ pip/_vendor/rich/diagnose.py,sha256=an6uouwhKPAlvQhYpNNpGq9EJysfMIOvvCbO3oSoR24,972
842
+ pip/_vendor/rich/emoji.py,sha256=omTF9asaAnsM4yLY94eR_9dgRRSm1lHUszX20D1yYCQ,2501
843
+ pip/_vendor/rich/errors.py,sha256=5pP3Kc5d4QJ_c0KFsxrfyhjiPVe7J1zOqSFbFAzcV-Y,642
844
+ pip/_vendor/rich/file_proxy.py,sha256=Tl9THMDZ-Pk5Wm8sI1gGg_U5DhusmxD-FZ0fUbcU0W0,1683
845
+ pip/_vendor/rich/filesize.py,sha256=9fTLAPCAwHmBXdRv7KZU194jSgNrRb6Wx7RIoBgqeKY,2508
846
+ pip/_vendor/rich/highlighter.py,sha256=p3C1g4QYzezFKdR7NF9EhPbzQDvdPUhGRgSyGGEmPko,9584
847
+ pip/_vendor/rich/json.py,sha256=EYp9ucj-nDjYDkHCV6Mk1ve8nUOpuFLaW76X50Mis2M,5032
848
+ pip/_vendor/rich/jupyter.py,sha256=QyoKoE_8IdCbrtiSHp9TsTSNyTHY0FO5whE7jOTd9UE,3252
849
+ pip/_vendor/rich/layout.py,sha256=RFYL6HdCFsHf9WRpcvi3w-fpj-8O5dMZ8W96VdKNdbI,14007
850
+ pip/_vendor/rich/live.py,sha256=vZzYvu7fqwlv3Gthl2xiw1Dc_O80VlGcCV0DOHwCyDM,14273
851
+ pip/_vendor/rich/live_render.py,sha256=zElm3PrfSIvjOce28zETHMIUf9pFYSUA5o0AflgUP64,3667
852
+ pip/_vendor/rich/logging.py,sha256=uB-cB-3Q4bmXDLLpbOWkmFviw-Fde39zyMV6tKJ2WHQ,11903
853
+ pip/_vendor/rich/markup.py,sha256=xzF4uAafiEeEYDJYt_vUnJOGoTU8RrH-PH7WcWYXjCg,8198
854
+ pip/_vendor/rich/measure.py,sha256=HmrIJX8sWRTHbgh8MxEay_83VkqNW_70s8aKP5ZcYI8,5305
855
+ pip/_vendor/rich/padding.py,sha256=kTFGsdGe0os7tXLnHKpwTI90CXEvrceeZGCshmJy5zw,4970
856
+ pip/_vendor/rich/pager.py,sha256=SO_ETBFKbg3n_AgOzXm41Sv36YxXAyI3_R-KOY2_uSc,828
857
+ pip/_vendor/rich/palette.py,sha256=lInvR1ODDT2f3UZMfL1grq7dY_pDdKHw4bdUgOGaM4Y,3396
858
+ pip/_vendor/rich/panel.py,sha256=wGMe40J8KCGgQoM0LyjRErmGIkv2bsYA71RCXThD0xE,10574
859
+ pip/_vendor/rich/pretty.py,sha256=eLEYN9xVaMNuA6EJVYm4li7HdOHxCqmVKvnOqJpyFt0,35852
860
+ pip/_vendor/rich/progress.py,sha256=n4KF9vky8_5iYeXcyZPEvzyLplWlDvFLkM5JI0Bs08A,59706
861
+ pip/_vendor/rich/progress_bar.py,sha256=cEoBfkc3lLwqba4XKsUpy4vSQKDh2QQ5J2J94-ACFoo,8165
862
+ pip/_vendor/rich/prompt.py,sha256=x0mW-pIPodJM4ry6grgmmLrl8VZp99kqcmdnBe70YYA,11303
863
+ pip/_vendor/rich/protocol.py,sha256=5hHHDDNHckdk8iWH5zEbi-zuIVSF5hbU2jIo47R7lTE,1391
864
+ pip/_vendor/rich/region.py,sha256=rNT9xZrVZTYIXZC0NYn41CJQwYNbR-KecPOxTgQvB8Y,166
865
+ pip/_vendor/rich/repr.py,sha256=9Z8otOmM-tyxnyTodvXlectP60lwahjGiDTrbrxPSTg,4431
866
+ pip/_vendor/rich/rule.py,sha256=0fNaS_aERa3UMRc3T5WMpN_sumtDxfaor2y3of1ftBk,4602
867
+ pip/_vendor/rich/scope.py,sha256=TMUU8qo17thyqQCPqjDLYpg_UU1k5qVd-WwiJvnJVas,2843
868
+ pip/_vendor/rich/screen.py,sha256=YoeReESUhx74grqb0mSSb9lghhysWmFHYhsbMVQjXO8,1591
869
+ pip/_vendor/rich/segment.py,sha256=XLnJEFvcV3bjaVzMNUJiem3n8lvvI9TJ5PTu-IG2uTg,24247
870
+ pip/_vendor/rich/spinner.py,sha256=15koCmF0DQeD8-k28Lpt6X_zJQUlzEhgo_6A6uy47lc,4339
871
+ pip/_vendor/rich/status.py,sha256=gJsIXIZeSo3urOyxRUjs6VrhX5CZrA0NxIQ-dxhCnwo,4425
872
+ pip/_vendor/rich/style.py,sha256=3hiocH_4N8vwRm3-8yFWzM7tSwjjEven69XqWasSQwM,27073
873
+ pip/_vendor/rich/styled.py,sha256=eZNnzGrI4ki_54pgY3Oj0T-x3lxdXTYh4_ryDB24wBU,1258
874
+ pip/_vendor/rich/syntax.py,sha256=jgDiVCK6cpR0NmBOpZmIu-Ud4eaW7fHvjJZkDbjpcSA,35173
875
+ pip/_vendor/rich/table.py,sha256=-WzesL-VJKsaiDU3uyczpJMHy6VCaSewBYJwx8RudI8,39684
876
+ pip/_vendor/rich/terminal_theme.py,sha256=1j5-ufJfnvlAo5Qsi_ACZiXDmwMXzqgmFByObT9-yJY,3370
877
+ pip/_vendor/rich/text.py,sha256=_8JBlSau0c2z8ENOZMi1hJ7M1ZGY408E4-hXjHyyg1A,45525
878
+ pip/_vendor/rich/theme.py,sha256=belFJogzA0W0HysQabKaHOc3RWH2ko3fQAJhoN-AFdo,3777
879
+ pip/_vendor/rich/themes.py,sha256=0xgTLozfabebYtcJtDdC5QkX5IVUEaviqDUJJh4YVFk,102
880
+ pip/_vendor/rich/traceback.py,sha256=yCLVrCtyoFNENd9mkm2xeG3KmqkTwH9xpFOO7p2Bq0A,29604
881
+ pip/_vendor/rich/tree.py,sha256=BMbUYNjS9uodNPfvtY_odmU09GA5QzcMbQ5cJZhllQI,9169
882
+ pip/_vendor/six.py,sha256=TOOfQi7nFGfMrIvtdr6wX4wyHH8M7aknmuLfo2cBBrM,34549
883
+ pip/_vendor/tenacity/__init__.py,sha256=3kvAL6KClq8GFo2KFhmOzskRKSDQI-ubrlfZ8AQEEI0,20493
884
+ pip/_vendor/tenacity/__pycache__/__init__.cpython-312.pyc,,
885
+ pip/_vendor/tenacity/__pycache__/_asyncio.cpython-312.pyc,,
886
+ pip/_vendor/tenacity/__pycache__/_utils.cpython-312.pyc,,
887
+ pip/_vendor/tenacity/__pycache__/after.cpython-312.pyc,,
888
+ pip/_vendor/tenacity/__pycache__/before.cpython-312.pyc,,
889
+ pip/_vendor/tenacity/__pycache__/before_sleep.cpython-312.pyc,,
890
+ pip/_vendor/tenacity/__pycache__/nap.cpython-312.pyc,,
891
+ pip/_vendor/tenacity/__pycache__/retry.cpython-312.pyc,,
892
+ pip/_vendor/tenacity/__pycache__/stop.cpython-312.pyc,,
893
+ pip/_vendor/tenacity/__pycache__/tornadoweb.cpython-312.pyc,,
894
+ pip/_vendor/tenacity/__pycache__/wait.cpython-312.pyc,,
895
+ pip/_vendor/tenacity/_asyncio.py,sha256=Qi6wgQsGa9MQibYRy3OXqcDQswIZZ00dLOoSUGN-6o8,3551
896
+ pip/_vendor/tenacity/_utils.py,sha256=ubs6a7sxj3JDNRKWCyCU2j5r1CB7rgyONgZzYZq6D_4,2179
897
+ pip/_vendor/tenacity/after.py,sha256=S5NCISScPeIrKwIeXRwdJl3kV9Q4nqZfnNPDx6Hf__g,1682
898
+ pip/_vendor/tenacity/before.py,sha256=dIZE9gmBTffisfwNkK0F1xFwGPV41u5GK70UY4Pi5Kc,1562
899
+ pip/_vendor/tenacity/before_sleep.py,sha256=YmpgN9Y7HGlH97U24vvq_YWb5deaK4_DbiD8ZuFmy-E,2372
900
+ pip/_vendor/tenacity/nap.py,sha256=fRWvnz1aIzbIq9Ap3gAkAZgDH6oo5zxMrU6ZOVByq0I,1383
901
+ pip/_vendor/tenacity/retry.py,sha256=jrzD_mxA5mSTUEdiYB7SHpxltjhPSYZSnSRATb-ggRc,8746
902
+ pip/_vendor/tenacity/stop.py,sha256=YMJs7ZgZfND65PRLqlGB_agpfGXlemx_5Hm4PKnBqpQ,3086
903
+ pip/_vendor/tenacity/tornadoweb.py,sha256=po29_F1Mt8qZpsFjX7EVwAT0ydC_NbVia9gVi7R_wXA,2142
904
+ pip/_vendor/tenacity/wait.py,sha256=3FcBJoCDgym12_dN6xfK8C1gROY0Hn4NSI2u8xv50uE,8024
905
+ pip/_vendor/tomli/__init__.py,sha256=JhUwV66DB1g4Hvt1UQCVMdfCu-IgAV8FXmvDU9onxd4,396
906
+ pip/_vendor/tomli/__pycache__/__init__.cpython-312.pyc,,
907
+ pip/_vendor/tomli/__pycache__/_parser.cpython-312.pyc,,
908
+ pip/_vendor/tomli/__pycache__/_re.cpython-312.pyc,,
909
+ pip/_vendor/tomli/__pycache__/_types.cpython-312.pyc,,
910
+ pip/_vendor/tomli/_parser.py,sha256=g9-ENaALS-B8dokYpCuzUFalWlog7T-SIYMjLZSWrtM,22633
911
+ pip/_vendor/tomli/_re.py,sha256=dbjg5ChZT23Ka9z9DHOXfdtSpPwUfdgMXnj8NOoly-w,2943
912
+ pip/_vendor/tomli/_types.py,sha256=-GTG2VUqkpxwMqzmVO4F7ybKddIbAnuAHXfmWQcTi3Q,254
913
+ pip/_vendor/typing_extensions.py,sha256=EWpcpyQnVmc48E9fSyPGs-vXgHcAk9tQABQIxmMsCGk,111130
914
+ pip/_vendor/urllib3/__init__.py,sha256=iXLcYiJySn0GNbWOOZDDApgBL1JgP44EZ8i1760S8Mc,3333
915
+ pip/_vendor/urllib3/__pycache__/__init__.cpython-312.pyc,,
916
+ pip/_vendor/urllib3/__pycache__/_collections.cpython-312.pyc,,
917
+ pip/_vendor/urllib3/__pycache__/_version.cpython-312.pyc,,
918
+ pip/_vendor/urllib3/__pycache__/connection.cpython-312.pyc,,
919
+ pip/_vendor/urllib3/__pycache__/connectionpool.cpython-312.pyc,,
920
+ pip/_vendor/urllib3/__pycache__/exceptions.cpython-312.pyc,,
921
+ pip/_vendor/urllib3/__pycache__/fields.cpython-312.pyc,,
922
+ pip/_vendor/urllib3/__pycache__/filepost.cpython-312.pyc,,
923
+ pip/_vendor/urllib3/__pycache__/poolmanager.cpython-312.pyc,,
924
+ pip/_vendor/urllib3/__pycache__/request.cpython-312.pyc,,
925
+ pip/_vendor/urllib3/__pycache__/response.cpython-312.pyc,,
926
+ pip/_vendor/urllib3/_collections.py,sha256=Rp1mVyBgc_UlAcp6M3at1skJBXR5J43NawRTvW2g_XY,10811
927
+ pip/_vendor/urllib3/_version.py,sha256=6zoYnDykPLfe92fHqXalH8SxhWVl31yYLCP0lDri_SA,64
928
+ pip/_vendor/urllib3/connection.py,sha256=92k9td_y4PEiTIjNufCUa1NzMB3J3w0LEdyokYgXnW8,20300
929
+ pip/_vendor/urllib3/connectionpool.py,sha256=ItVDasDnPRPP9R8bNxY7tPBlC724nJ9nlxVgXG_SLbI,39990
930
+ pip/_vendor/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
931
+ pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-312.pyc,,
932
+ pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-312.pyc,,
933
+ pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-312.pyc,,
934
+ pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-312.pyc,,
935
+ pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-312.pyc,,
936
+ pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-312.pyc,,
937
+ pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-312.pyc,,
938
+ pip/_vendor/urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957
939
+ pip/_vendor/urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
940
+ pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-312.pyc,,
941
+ pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-312.pyc,,
942
+ pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-312.pyc,,
943
+ pip/_vendor/urllib3/contrib/_securetransport/bindings.py,sha256=4Xk64qIkPBt09A5q-RIFUuDhNc9mXilVapm7WnYnzRw,17632
944
+ pip/_vendor/urllib3/contrib/_securetransport/low_level.py,sha256=B2JBB2_NRP02xK6DCa1Pa9IuxrPwxzDzZbixQkb7U9M,13922
945
+ pip/_vendor/urllib3/contrib/appengine.py,sha256=VR68eAVE137lxTgjBDwCna5UiBZTOKa01Aj_-5BaCz4,11036
946
+ pip/_vendor/urllib3/contrib/ntlmpool.py,sha256=NlfkW7WMdW8ziqudopjHoW299og1BTWi0IeIibquFwk,4528
947
+ pip/_vendor/urllib3/contrib/pyopenssl.py,sha256=hDJh4MhyY_p-oKlFcYcQaVQRDv6GMmBGuW9yjxyeejM,17081
948
+ pip/_vendor/urllib3/contrib/securetransport.py,sha256=yhZdmVjY6PI6EeFbp7qYOp6-vp1Rkv2NMuOGaEj7pmc,34448
949
+ pip/_vendor/urllib3/contrib/socks.py,sha256=aRi9eWXo9ZEb95XUxef4Z21CFlnnjbEiAo9HOseoMt4,7097
950
+ pip/_vendor/urllib3/exceptions.py,sha256=0Mnno3KHTNfXRfY7638NufOPkUb6mXOm-Lqj-4x2w8A,8217
951
+ pip/_vendor/urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579
952
+ pip/_vendor/urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440
953
+ pip/_vendor/urllib3/packages/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
954
+ pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-312.pyc,,
955
+ pip/_vendor/urllib3/packages/__pycache__/six.cpython-312.pyc,,
956
+ pip/_vendor/urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
957
+ pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-312.pyc,,
958
+ pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-312.pyc,,
959
+ pip/_vendor/urllib3/packages/backports/__pycache__/weakref_finalize.cpython-312.pyc,,
960
+ pip/_vendor/urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417
961
+ pip/_vendor/urllib3/packages/backports/weakref_finalize.py,sha256=tRCal5OAhNSRyb0DhHp-38AtIlCsRP8BxF3NX-6rqIA,5343
962
+ pip/_vendor/urllib3/packages/six.py,sha256=b9LM0wBXv7E7SrbCjAm4wwN-hrH-iNxv18LgWNMMKPo,34665
963
+ pip/_vendor/urllib3/poolmanager.py,sha256=0i8cJgrqupza67IBPZ_u9jXvnSxr5UBlVEiUqdkPtYI,19752
964
+ pip/_vendor/urllib3/request.py,sha256=ZFSIqX0C6WizixecChZ3_okyu7BEv0lZu1VT0s6h4SM,5985
965
+ pip/_vendor/urllib3/response.py,sha256=fmDJAFkG71uFTn-sVSTh2Iw0WmcXQYqkbRjihvwBjU8,30641
966
+ pip/_vendor/urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155
967
+ pip/_vendor/urllib3/util/__pycache__/__init__.cpython-312.pyc,,
968
+ pip/_vendor/urllib3/util/__pycache__/connection.cpython-312.pyc,,
969
+ pip/_vendor/urllib3/util/__pycache__/proxy.cpython-312.pyc,,
970
+ pip/_vendor/urllib3/util/__pycache__/queue.cpython-312.pyc,,
971
+ pip/_vendor/urllib3/util/__pycache__/request.cpython-312.pyc,,
972
+ pip/_vendor/urllib3/util/__pycache__/response.cpython-312.pyc,,
973
+ pip/_vendor/urllib3/util/__pycache__/retry.cpython-312.pyc,,
974
+ pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-312.pyc,,
975
+ pip/_vendor/urllib3/util/__pycache__/ssl_match_hostname.cpython-312.pyc,,
976
+ pip/_vendor/urllib3/util/__pycache__/ssltransport.cpython-312.pyc,,
977
+ pip/_vendor/urllib3/util/__pycache__/timeout.cpython-312.pyc,,
978
+ pip/_vendor/urllib3/util/__pycache__/url.cpython-312.pyc,,
979
+ pip/_vendor/urllib3/util/__pycache__/wait.cpython-312.pyc,,
980
+ pip/_vendor/urllib3/util/connection.py,sha256=5Lx2B1PW29KxBn2T0xkN1CBgRBa3gGVJBKoQoRogEVk,4901
981
+ pip/_vendor/urllib3/util/proxy.py,sha256=zUvPPCJrp6dOF0N4GAVbOcl6o-4uXKSrGiTkkr5vUS4,1605
982
+ pip/_vendor/urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498
983
+ pip/_vendor/urllib3/util/request.py,sha256=C0OUt2tcU6LRiQJ7YYNP9GvPrSvl7ziIBekQ-5nlBZk,3997
984
+ pip/_vendor/urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510
985
+ pip/_vendor/urllib3/util/retry.py,sha256=4laWh0HpwGijLiBmdBIYtbhYekQnNzzhx2W9uys0RHA,22003
986
+ pip/_vendor/urllib3/util/ssl_.py,sha256=X4-AqW91aYPhPx6-xbf66yHFQKbqqfC_5Zt4WkLX1Hc,17177
987
+ pip/_vendor/urllib3/util/ssl_match_hostname.py,sha256=Ir4cZVEjmAk8gUAIHWSi7wtOO83UCYABY2xFD1Ql_WA,5758
988
+ pip/_vendor/urllib3/util/ssltransport.py,sha256=NA-u5rMTrDFDFC8QzRKUEKMG0561hOD4qBTr3Z4pv6E,6895
989
+ pip/_vendor/urllib3/util/timeout.py,sha256=cwq4dMk87mJHSBktK1miYJ-85G-3T3RmT20v7SFCpno,10168
990
+ pip/_vendor/urllib3/util/url.py,sha256=lCAE7M5myA8EDdW0sJuyyZhVB9K_j38ljWhHAnFaWoE,14296
991
+ pip/_vendor/urllib3/util/wait.py,sha256=fOX0_faozG2P7iVojQoE1mbydweNyTcm-hXEfFrTtLI,5403
992
+ pip/_vendor/vendor.txt,sha256=EyWEHCgXKFKiE8Mku6LONUDLF6UwDwjX1NP2ccKLrLo,475
993
+ pip/_vendor/webencodings/__init__.py,sha256=qOBJIuPy_4ByYH6W_bNgJF-qYQ2DoU-dKsDu5yRWCXg,10579
994
+ pip/_vendor/webencodings/__pycache__/__init__.cpython-312.pyc,,
995
+ pip/_vendor/webencodings/__pycache__/labels.cpython-312.pyc,,
996
+ pip/_vendor/webencodings/__pycache__/mklabels.cpython-312.pyc,,
997
+ pip/_vendor/webencodings/__pycache__/tests.cpython-312.pyc,,
998
+ pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-312.pyc,,
999
+ pip/_vendor/webencodings/labels.py,sha256=4AO_KxTddqGtrL9ns7kAPjb0CcN6xsCIxbK37HY9r3E,8979
1000
+ pip/_vendor/webencodings/mklabels.py,sha256=GYIeywnpaLnP0GSic8LFWgd0UVvO_l1Nc6YoF-87R_4,1305
1001
+ pip/_vendor/webencodings/tests.py,sha256=OtGLyjhNY1fvkW1GvLJ_FV9ZoqC9Anyjr7q3kxTbzNs,6563
1002
+ pip/_vendor/webencodings/x_user_defined.py,sha256=yOqWSdmpytGfUgh_Z6JYgDNhoc-BAHyyeeT15Fr42tM,4307
1003
+ pip/py.typed,sha256=EBVvvPRTn_eIpz5e5QztSCdrMX7Qwd7VP93RSoIlZ2I,286
myenv/Lib/site-packages/pip-23.2.1.dist-info/REQUESTED ADDED
File without changes
myenv/Lib/site-packages/pip-23.2.1.dist-info/WHEEL ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: bdist_wheel (0.40.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
myenv/Lib/site-packages/pip-23.2.1.dist-info/entry_points.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ [console_scripts]
2
+ pip = pip._internal.cli.main:main
3
+ pip3 = pip._internal.cli.main:main
4
+ pip3.11 = pip._internal.cli.main:main
myenv/Lib/site-packages/pip-23.2.1.dist-info/top_level.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
myenv/Lib/site-packages/pip/__init__.py ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import List, Optional
2
+
3
+ __version__ = "23.2.1"
4
+
5
+
6
+ def main(args: Optional[List[str]] = None) -> int:
7
+ """This is an internal API only meant for use by pip's own console scripts.
8
+
9
+ For additional details, see https://github.com/pypa/pip/issues/7498.
10
+ """
11
+ from pip._internal.utils.entrypoints import _wrapper
12
+
13
+ return _wrapper(args)
myenv/Lib/site-packages/pip/__main__.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+
4
+ # Remove '' and current working directory from the first entry
5
+ # of sys.path, if present to avoid using current directory
6
+ # in pip commands check, freeze, install, list and show,
7
+ # when invoked as python -m pip <command>
8
+ if sys.path[0] in ("", os.getcwd()):
9
+ sys.path.pop(0)
10
+
11
+ # If we are running from a wheel, add the wheel to sys.path
12
+ # This allows the usage python pip-*.whl/pip install pip-*.whl
13
+ if __package__ == "":
14
+ # __file__ is pip-*.whl/pip/__main__.py
15
+ # first dirname call strips of '/__main__.py', second strips off '/pip'
16
+ # Resulting path is the name of the wheel itself
17
+ # Add that to sys.path so we can import pip
18
+ path = os.path.dirname(os.path.dirname(__file__))
19
+ sys.path.insert(0, path)
20
+
21
+ if __name__ == "__main__":
22
+ from pip._internal.cli.main import main as _main
23
+
24
+ sys.exit(_main())
myenv/Lib/site-packages/pip/__pip-runner__.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Execute exactly this copy of pip, within a different environment.
2
+
3
+ This file is named as it is, to ensure that this module can't be imported via
4
+ an import statement.
5
+ """
6
+
7
+ # /!\ This version compatibility check section must be Python 2 compatible. /!\
8
+
9
+ import sys
10
+
11
+ # Copied from setup.py
12
+ PYTHON_REQUIRES = (3, 7)
13
+
14
+
15
+ def version_str(version): # type: ignore
16
+ return ".".join(str(v) for v in version)
17
+
18
+
19
+ if sys.version_info[:2] < PYTHON_REQUIRES:
20
+ raise SystemExit(
21
+ "This version of pip does not support python {} (requires >={}).".format(
22
+ version_str(sys.version_info[:2]), version_str(PYTHON_REQUIRES)
23
+ )
24
+ )
25
+
26
+ # From here on, we can use Python 3 features, but the syntax must remain
27
+ # Python 2 compatible.
28
+
29
+ import runpy # noqa: E402
30
+ from importlib.machinery import PathFinder # noqa: E402
31
+ from os.path import dirname # noqa: E402
32
+
33
+ PIP_SOURCES_ROOT = dirname(dirname(__file__))
34
+
35
+
36
+ class PipImportRedirectingFinder:
37
+ @classmethod
38
+ def find_spec(self, fullname, path=None, target=None): # type: ignore
39
+ if fullname != "pip":
40
+ return None
41
+
42
+ spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target)
43
+ assert spec, (PIP_SOURCES_ROOT, fullname)
44
+ return spec
45
+
46
+
47
+ sys.meta_path.insert(0, PipImportRedirectingFinder())
48
+
49
+ assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module"
50
+ runpy.run_module("pip", run_name="__main__", alter_sys=True)
myenv/Lib/site-packages/pip/_internal/__init__.py ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import List, Optional
2
+
3
+ import pip._internal.utils.inject_securetransport # noqa
4
+ from pip._internal.utils import _log
5
+
6
+ # init_logging() must be called before any call to logging.getLogger()
7
+ # which happens at import of most modules.
8
+ _log.init_logging()
9
+
10
+
11
+ def main(args: (Optional[List[str]]) = None) -> int:
12
+ """This is preserved for old console scripts that may still be referencing
13
+ it.
14
+
15
+ For additional details, see https://github.com/pypa/pip/issues/7498.
16
+ """
17
+ from pip._internal.utils.entrypoints import _wrapper
18
+
19
+ return _wrapper(args)
myenv/Lib/site-packages/pip/_internal/build_env.py ADDED
@@ -0,0 +1,311 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Build Environment used for isolation during sdist building
2
+ """
3
+
4
+ import logging
5
+ import os
6
+ import pathlib
7
+ import site
8
+ import sys
9
+ import textwrap
10
+ from collections import OrderedDict
11
+ from types import TracebackType
12
+ from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type, Union
13
+
14
+ from pip._vendor.certifi import where
15
+ from pip._vendor.packaging.requirements import Requirement
16
+ from pip._vendor.packaging.version import Version
17
+
18
+ from pip import __file__ as pip_location
19
+ from pip._internal.cli.spinners import open_spinner
20
+ from pip._internal.locations import get_platlib, get_purelib, get_scheme
21
+ from pip._internal.metadata import get_default_environment, get_environment
22
+ from pip._internal.utils.subprocess import call_subprocess
23
+ from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
24
+
25
+ if TYPE_CHECKING:
26
+ from pip._internal.index.package_finder import PackageFinder
27
+
28
+ logger = logging.getLogger(__name__)
29
+
30
+
31
+ def _dedup(a: str, b: str) -> Union[Tuple[str], Tuple[str, str]]:
32
+ return (a, b) if a != b else (a,)
33
+
34
+
35
+ class _Prefix:
36
+ def __init__(self, path: str) -> None:
37
+ self.path = path
38
+ self.setup = False
39
+ scheme = get_scheme("", prefix=path)
40
+ self.bin_dir = scheme.scripts
41
+ self.lib_dirs = _dedup(scheme.purelib, scheme.platlib)
42
+
43
+
44
+ def get_runnable_pip() -> str:
45
+ """Get a file to pass to a Python executable, to run the currently-running pip.
46
+
47
+ This is used to run a pip subprocess, for installing requirements into the build
48
+ environment.
49
+ """
50
+ source = pathlib.Path(pip_location).resolve().parent
51
+
52
+ if not source.is_dir():
53
+ # This would happen if someone is using pip from inside a zip file. In that
54
+ # case, we can use that directly.
55
+ return str(source)
56
+
57
+ return os.fsdecode(source / "__pip-runner__.py")
58
+
59
+
60
+ def _get_system_sitepackages() -> Set[str]:
61
+ """Get system site packages
62
+
63
+ Usually from site.getsitepackages,
64
+ but fallback on `get_purelib()/get_platlib()` if unavailable
65
+ (e.g. in a virtualenv created by virtualenv<20)
66
+
67
+ Returns normalized set of strings.
68
+ """
69
+ if hasattr(site, "getsitepackages"):
70
+ system_sites = site.getsitepackages()
71
+ else:
72
+ # virtualenv < 20 overwrites site.py without getsitepackages
73
+ # fallback on get_purelib/get_platlib.
74
+ # this is known to miss things, but shouldn't in the cases
75
+ # where getsitepackages() has been removed (inside a virtualenv)
76
+ system_sites = [get_purelib(), get_platlib()]
77
+ return {os.path.normcase(path) for path in system_sites}
78
+
79
+
80
+ class BuildEnvironment:
81
+ """Creates and manages an isolated environment to install build deps"""
82
+
83
+ def __init__(self) -> None:
84
+ temp_dir = TempDirectory(kind=tempdir_kinds.BUILD_ENV, globally_managed=True)
85
+
86
+ self._prefixes = OrderedDict(
87
+ (name, _Prefix(os.path.join(temp_dir.path, name)))
88
+ for name in ("normal", "overlay")
89
+ )
90
+
91
+ self._bin_dirs: List[str] = []
92
+ self._lib_dirs: List[str] = []
93
+ for prefix in reversed(list(self._prefixes.values())):
94
+ self._bin_dirs.append(prefix.bin_dir)
95
+ self._lib_dirs.extend(prefix.lib_dirs)
96
+
97
+ # Customize site to:
98
+ # - ensure .pth files are honored
99
+ # - prevent access to system site packages
100
+ system_sites = _get_system_sitepackages()
101
+
102
+ self._site_dir = os.path.join(temp_dir.path, "site")
103
+ if not os.path.exists(self._site_dir):
104
+ os.mkdir(self._site_dir)
105
+ with open(
106
+ os.path.join(self._site_dir, "sitecustomize.py"), "w", encoding="utf-8"
107
+ ) as fp:
108
+ fp.write(
109
+ textwrap.dedent(
110
+ """
111
+ import os, site, sys
112
+
113
+ # First, drop system-sites related paths.
114
+ original_sys_path = sys.path[:]
115
+ known_paths = set()
116
+ for path in {system_sites!r}:
117
+ site.addsitedir(path, known_paths=known_paths)
118
+ system_paths = set(
119
+ os.path.normcase(path)
120
+ for path in sys.path[len(original_sys_path):]
121
+ )
122
+ original_sys_path = [
123
+ path for path in original_sys_path
124
+ if os.path.normcase(path) not in system_paths
125
+ ]
126
+ sys.path = original_sys_path
127
+
128
+ # Second, add lib directories.
129
+ # ensuring .pth file are processed.
130
+ for path in {lib_dirs!r}:
131
+ assert not path in sys.path
132
+ site.addsitedir(path)
133
+ """
134
+ ).format(system_sites=system_sites, lib_dirs=self._lib_dirs)
135
+ )
136
+
137
+ def __enter__(self) -> None:
138
+ self._save_env = {
139
+ name: os.environ.get(name, None)
140
+ for name in ("PATH", "PYTHONNOUSERSITE", "PYTHONPATH")
141
+ }
142
+
143
+ path = self._bin_dirs[:]
144
+ old_path = self._save_env["PATH"]
145
+ if old_path:
146
+ path.extend(old_path.split(os.pathsep))
147
+
148
+ pythonpath = [self._site_dir]
149
+
150
+ os.environ.update(
151
+ {
152
+ "PATH": os.pathsep.join(path),
153
+ "PYTHONNOUSERSITE": "1",
154
+ "PYTHONPATH": os.pathsep.join(pythonpath),
155
+ }
156
+ )
157
+
158
+ def __exit__(
159
+ self,
160
+ exc_type: Optional[Type[BaseException]],
161
+ exc_val: Optional[BaseException],
162
+ exc_tb: Optional[TracebackType],
163
+ ) -> None:
164
+ for varname, old_value in self._save_env.items():
165
+ if old_value is None:
166
+ os.environ.pop(varname, None)
167
+ else:
168
+ os.environ[varname] = old_value
169
+
170
+ def check_requirements(
171
+ self, reqs: Iterable[str]
172
+ ) -> Tuple[Set[Tuple[str, str]], Set[str]]:
173
+ """Return 2 sets:
174
+ - conflicting requirements: set of (installed, wanted) reqs tuples
175
+ - missing requirements: set of reqs
176
+ """
177
+ missing = set()
178
+ conflicting = set()
179
+ if reqs:
180
+ env = (
181
+ get_environment(self._lib_dirs)
182
+ if hasattr(self, "_lib_dirs")
183
+ else get_default_environment()
184
+ )
185
+ for req_str in reqs:
186
+ req = Requirement(req_str)
187
+ # We're explicitly evaluating with an empty extra value, since build
188
+ # environments are not provided any mechanism to select specific extras.
189
+ if req.marker is not None and not req.marker.evaluate({"extra": ""}):
190
+ continue
191
+ dist = env.get_distribution(req.name)
192
+ if not dist:
193
+ missing.add(req_str)
194
+ continue
195
+ if isinstance(dist.version, Version):
196
+ installed_req_str = f"{req.name}=={dist.version}"
197
+ else:
198
+ installed_req_str = f"{req.name}==={dist.version}"
199
+ if not req.specifier.contains(dist.version, prereleases=True):
200
+ conflicting.add((installed_req_str, req_str))
201
+ # FIXME: Consider direct URL?
202
+ return conflicting, missing
203
+
204
+ def install_requirements(
205
+ self,
206
+ finder: "PackageFinder",
207
+ requirements: Iterable[str],
208
+ prefix_as_string: str,
209
+ *,
210
+ kind: str,
211
+ ) -> None:
212
+ prefix = self._prefixes[prefix_as_string]
213
+ assert not prefix.setup
214
+ prefix.setup = True
215
+ if not requirements:
216
+ return
217
+ self._install_requirements(
218
+ get_runnable_pip(),
219
+ finder,
220
+ requirements,
221
+ prefix,
222
+ kind=kind,
223
+ )
224
+
225
+ @staticmethod
226
+ def _install_requirements(
227
+ pip_runnable: str,
228
+ finder: "PackageFinder",
229
+ requirements: Iterable[str],
230
+ prefix: _Prefix,
231
+ *,
232
+ kind: str,
233
+ ) -> None:
234
+ args: List[str] = [
235
+ sys.executable,
236
+ pip_runnable,
237
+ "install",
238
+ "--ignore-installed",
239
+ "--no-user",
240
+ "--prefix",
241
+ prefix.path,
242
+ "--no-warn-script-location",
243
+ ]
244
+ if logger.getEffectiveLevel() <= logging.DEBUG:
245
+ args.append("-v")
246
+ for format_control in ("no_binary", "only_binary"):
247
+ formats = getattr(finder.format_control, format_control)
248
+ args.extend(
249
+ (
250
+ "--" + format_control.replace("_", "-"),
251
+ ",".join(sorted(formats or {":none:"})),
252
+ )
253
+ )
254
+
255
+ index_urls = finder.index_urls
256
+ if index_urls:
257
+ args.extend(["-i", index_urls[0]])
258
+ for extra_index in index_urls[1:]:
259
+ args.extend(["--extra-index-url", extra_index])
260
+ else:
261
+ args.append("--no-index")
262
+ for link in finder.find_links:
263
+ args.extend(["--find-links", link])
264
+
265
+ for host in finder.trusted_hosts:
266
+ args.extend(["--trusted-host", host])
267
+ if finder.allow_all_prereleases:
268
+ args.append("--pre")
269
+ if finder.prefer_binary:
270
+ args.append("--prefer-binary")
271
+ args.append("--")
272
+ args.extend(requirements)
273
+ extra_environ = {"_PIP_STANDALONE_CERT": where()}
274
+ with open_spinner(f"Installing {kind}") as spinner:
275
+ call_subprocess(
276
+ args,
277
+ command_desc=f"pip subprocess to install {kind}",
278
+ spinner=spinner,
279
+ extra_environ=extra_environ,
280
+ )
281
+
282
+
283
+ class NoOpBuildEnvironment(BuildEnvironment):
284
+ """A no-op drop-in replacement for BuildEnvironment"""
285
+
286
+ def __init__(self) -> None:
287
+ pass
288
+
289
+ def __enter__(self) -> None:
290
+ pass
291
+
292
+ def __exit__(
293
+ self,
294
+ exc_type: Optional[Type[BaseException]],
295
+ exc_val: Optional[BaseException],
296
+ exc_tb: Optional[TracebackType],
297
+ ) -> None:
298
+ pass
299
+
300
+ def cleanup(self) -> None:
301
+ pass
302
+
303
+ def install_requirements(
304
+ self,
305
+ finder: "PackageFinder",
306
+ requirements: Iterable[str],
307
+ prefix_as_string: str,
308
+ *,
309
+ kind: str,
310
+ ) -> None:
311
+ raise NotImplementedError()
myenv/Lib/site-packages/pip/_internal/cache.py ADDED
@@ -0,0 +1,292 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Cache Management
2
+ """
3
+
4
+ import hashlib
5
+ import json
6
+ import logging
7
+ import os
8
+ from pathlib import Path
9
+ from typing import Any, Dict, List, Optional
10
+
11
+ from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version
12
+ from pip._vendor.packaging.utils import canonicalize_name
13
+
14
+ from pip._internal.exceptions import InvalidWheelFilename
15
+ from pip._internal.models.direct_url import DirectUrl
16
+ from pip._internal.models.link import Link
17
+ from pip._internal.models.wheel import Wheel
18
+ from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
19
+ from pip._internal.utils.urls import path_to_url
20
+
21
+ logger = logging.getLogger(__name__)
22
+
23
+ ORIGIN_JSON_NAME = "origin.json"
24
+
25
+
26
+ def _hash_dict(d: Dict[str, str]) -> str:
27
+ """Return a stable sha224 of a dictionary."""
28
+ s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
29
+ return hashlib.sha224(s.encode("ascii")).hexdigest()
30
+
31
+
32
+ class Cache:
33
+ """An abstract class - provides cache directories for data from links
34
+
35
+ :param cache_dir: The root of the cache.
36
+ """
37
+
38
+ def __init__(self, cache_dir: str) -> None:
39
+ super().__init__()
40
+ assert not cache_dir or os.path.isabs(cache_dir)
41
+ self.cache_dir = cache_dir or None
42
+
43
+ def _get_cache_path_parts(self, link: Link) -> List[str]:
44
+ """Get parts of part that must be os.path.joined with cache_dir"""
45
+
46
+ # We want to generate an url to use as our cache key, we don't want to
47
+ # just re-use the URL because it might have other items in the fragment
48
+ # and we don't care about those.
49
+ key_parts = {"url": link.url_without_fragment}
50
+ if link.hash_name is not None and link.hash is not None:
51
+ key_parts[link.hash_name] = link.hash
52
+ if link.subdirectory_fragment:
53
+ key_parts["subdirectory"] = link.subdirectory_fragment
54
+
55
+ # Include interpreter name, major and minor version in cache key
56
+ # to cope with ill-behaved sdists that build a different wheel
57
+ # depending on the python version their setup.py is being run on,
58
+ # and don't encode the difference in compatibility tags.
59
+ # https://github.com/pypa/pip/issues/7296
60
+ key_parts["interpreter_name"] = interpreter_name()
61
+ key_parts["interpreter_version"] = interpreter_version()
62
+
63
+ # Encode our key url with sha224, we'll use this because it has similar
64
+ # security properties to sha256, but with a shorter total output (and
65
+ # thus less secure). However the differences don't make a lot of
66
+ # difference for our use case here.
67
+ hashed = _hash_dict(key_parts)
68
+
69
+ # We want to nest the directories some to prevent having a ton of top
70
+ # level directories where we might run out of sub directories on some
71
+ # FS.
72
+ parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
73
+
74
+ return parts
75
+
76
+ def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]:
77
+ can_not_cache = not self.cache_dir or not canonical_package_name or not link
78
+ if can_not_cache:
79
+ return []
80
+
81
+ candidates = []
82
+ path = self.get_path_for_link(link)
83
+ if os.path.isdir(path):
84
+ for candidate in os.listdir(path):
85
+ candidates.append((candidate, path))
86
+ return candidates
87
+
88
+ def get_path_for_link(self, link: Link) -> str:
89
+ """Return a directory to store cached items in for link."""
90
+ raise NotImplementedError()
91
+
92
+ def get(
93
+ self,
94
+ link: Link,
95
+ package_name: Optional[str],
96
+ supported_tags: List[Tag],
97
+ ) -> Link:
98
+ """Returns a link to a cached item if it exists, otherwise returns the
99
+ passed link.
100
+ """
101
+ raise NotImplementedError()
102
+
103
+
104
+ class SimpleWheelCache(Cache):
105
+ """A cache of wheels for future installs."""
106
+
107
+ def __init__(self, cache_dir: str) -> None:
108
+ super().__init__(cache_dir)
109
+
110
+ def get_path_for_link(self, link: Link) -> str:
111
+ """Return a directory to store cached wheels for link
112
+
113
+ Because there are M wheels for any one sdist, we provide a directory
114
+ to cache them in, and then consult that directory when looking up
115
+ cache hits.
116
+
117
+ We only insert things into the cache if they have plausible version
118
+ numbers, so that we don't contaminate the cache with things that were
119
+ not unique. E.g. ./package might have dozens of installs done for it
120
+ and build a version of 0.0...and if we built and cached a wheel, we'd
121
+ end up using the same wheel even if the source has been edited.
122
+
123
+ :param link: The link of the sdist for which this will cache wheels.
124
+ """
125
+ parts = self._get_cache_path_parts(link)
126
+ assert self.cache_dir
127
+ # Store wheels within the root cache_dir
128
+ return os.path.join(self.cache_dir, "wheels", *parts)
129
+
130
+ def get(
131
+ self,
132
+ link: Link,
133
+ package_name: Optional[str],
134
+ supported_tags: List[Tag],
135
+ ) -> Link:
136
+ candidates = []
137
+
138
+ if not package_name:
139
+ return link
140
+
141
+ canonical_package_name = canonicalize_name(package_name)
142
+ for wheel_name, wheel_dir in self._get_candidates(link, canonical_package_name):
143
+ try:
144
+ wheel = Wheel(wheel_name)
145
+ except InvalidWheelFilename:
146
+ continue
147
+ if canonicalize_name(wheel.name) != canonical_package_name:
148
+ logger.debug(
149
+ "Ignoring cached wheel %s for %s as it "
150
+ "does not match the expected distribution name %s.",
151
+ wheel_name,
152
+ link,
153
+ package_name,
154
+ )
155
+ continue
156
+ if not wheel.supported(supported_tags):
157
+ # Built for a different python/arch/etc
158
+ continue
159
+ candidates.append(
160
+ (
161
+ wheel.support_index_min(supported_tags),
162
+ wheel_name,
163
+ wheel_dir,
164
+ )
165
+ )
166
+
167
+ if not candidates:
168
+ return link
169
+
170
+ _, wheel_name, wheel_dir = min(candidates)
171
+ return Link(path_to_url(os.path.join(wheel_dir, wheel_name)))
172
+
173
+
174
+ class EphemWheelCache(SimpleWheelCache):
175
+ """A SimpleWheelCache that creates it's own temporary cache directory"""
176
+
177
+ def __init__(self) -> None:
178
+ self._temp_dir = TempDirectory(
179
+ kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
180
+ globally_managed=True,
181
+ )
182
+
183
+ super().__init__(self._temp_dir.path)
184
+
185
+
186
+ class CacheEntry:
187
+ def __init__(
188
+ self,
189
+ link: Link,
190
+ persistent: bool,
191
+ ):
192
+ self.link = link
193
+ self.persistent = persistent
194
+ self.origin: Optional[DirectUrl] = None
195
+ origin_direct_url_path = Path(self.link.file_path).parent / ORIGIN_JSON_NAME
196
+ if origin_direct_url_path.exists():
197
+ try:
198
+ self.origin = DirectUrl.from_json(
199
+ origin_direct_url_path.read_text(encoding="utf-8")
200
+ )
201
+ except Exception as e:
202
+ logger.warning(
203
+ "Ignoring invalid cache entry origin file %s for %s (%s)",
204
+ origin_direct_url_path,
205
+ link.filename,
206
+ e,
207
+ )
208
+
209
+
210
+ class WheelCache(Cache):
211
+ """Wraps EphemWheelCache and SimpleWheelCache into a single Cache
212
+
213
+ This Cache allows for gracefully degradation, using the ephem wheel cache
214
+ when a certain link is not found in the simple wheel cache first.
215
+ """
216
+
217
+ def __init__(self, cache_dir: str) -> None:
218
+ super().__init__(cache_dir)
219
+ self._wheel_cache = SimpleWheelCache(cache_dir)
220
+ self._ephem_cache = EphemWheelCache()
221
+
222
+ def get_path_for_link(self, link: Link) -> str:
223
+ return self._wheel_cache.get_path_for_link(link)
224
+
225
+ def get_ephem_path_for_link(self, link: Link) -> str:
226
+ return self._ephem_cache.get_path_for_link(link)
227
+
228
+ def get(
229
+ self,
230
+ link: Link,
231
+ package_name: Optional[str],
232
+ supported_tags: List[Tag],
233
+ ) -> Link:
234
+ cache_entry = self.get_cache_entry(link, package_name, supported_tags)
235
+ if cache_entry is None:
236
+ return link
237
+ return cache_entry.link
238
+
239
+ def get_cache_entry(
240
+ self,
241
+ link: Link,
242
+ package_name: Optional[str],
243
+ supported_tags: List[Tag],
244
+ ) -> Optional[CacheEntry]:
245
+ """Returns a CacheEntry with a link to a cached item if it exists or
246
+ None. The cache entry indicates if the item was found in the persistent
247
+ or ephemeral cache.
248
+ """
249
+ retval = self._wheel_cache.get(
250
+ link=link,
251
+ package_name=package_name,
252
+ supported_tags=supported_tags,
253
+ )
254
+ if retval is not link:
255
+ return CacheEntry(retval, persistent=True)
256
+
257
+ retval = self._ephem_cache.get(
258
+ link=link,
259
+ package_name=package_name,
260
+ supported_tags=supported_tags,
261
+ )
262
+ if retval is not link:
263
+ return CacheEntry(retval, persistent=False)
264
+
265
+ return None
266
+
267
+ @staticmethod
268
+ def record_download_origin(cache_dir: str, download_info: DirectUrl) -> None:
269
+ origin_path = Path(cache_dir) / ORIGIN_JSON_NAME
270
+ if origin_path.exists():
271
+ try:
272
+ origin = DirectUrl.from_json(origin_path.read_text(encoding="utf-8"))
273
+ except Exception as e:
274
+ logger.warning(
275
+ "Could not read origin file %s in cache entry (%s). "
276
+ "Will attempt to overwrite it.",
277
+ origin_path,
278
+ e,
279
+ )
280
+ else:
281
+ # TODO: use DirectUrl.equivalent when
282
+ # https://github.com/pypa/pip/pull/10564 is merged.
283
+ if origin.url != download_info.url:
284
+ logger.warning(
285
+ "Origin URL %s in cache entry %s does not match download URL "
286
+ "%s. This is likely a pip bug or a cache corruption issue. "
287
+ "Will overwrite it with the new value.",
288
+ origin.url,
289
+ cache_dir,
290
+ download_info.url,
291
+ )
292
+ origin_path.write_text(download_info.to_json(), encoding="utf-8")
myenv/Lib/site-packages/pip/_internal/cli/__init__.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ """Subpackage containing all of pip's command line interface related code
2
+ """
3
+
4
+ # This file intentionally does not import submodules
myenv/Lib/site-packages/pip/_internal/cli/autocompletion.py ADDED
@@ -0,0 +1,171 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Logic that powers autocompletion installed by ``pip completion``.
2
+ """
3
+
4
+ import optparse
5
+ import os
6
+ import sys
7
+ from itertools import chain
8
+ from typing import Any, Iterable, List, Optional
9
+
10
+ from pip._internal.cli.main_parser import create_main_parser
11
+ from pip._internal.commands import commands_dict, create_command
12
+ from pip._internal.metadata import get_default_environment
13
+
14
+
15
+ def autocomplete() -> None:
16
+ """Entry Point for completion of main and subcommand options."""
17
+ # Don't complete if user hasn't sourced bash_completion file.
18
+ if "PIP_AUTO_COMPLETE" not in os.environ:
19
+ return
20
+ cwords = os.environ["COMP_WORDS"].split()[1:]
21
+ cword = int(os.environ["COMP_CWORD"])
22
+ try:
23
+ current = cwords[cword - 1]
24
+ except IndexError:
25
+ current = ""
26
+
27
+ parser = create_main_parser()
28
+ subcommands = list(commands_dict)
29
+ options = []
30
+
31
+ # subcommand
32
+ subcommand_name: Optional[str] = None
33
+ for word in cwords:
34
+ if word in subcommands:
35
+ subcommand_name = word
36
+ break
37
+ # subcommand options
38
+ if subcommand_name is not None:
39
+ # special case: 'help' subcommand has no options
40
+ if subcommand_name == "help":
41
+ sys.exit(1)
42
+ # special case: list locally installed dists for show and uninstall
43
+ should_list_installed = not current.startswith("-") and subcommand_name in [
44
+ "show",
45
+ "uninstall",
46
+ ]
47
+ if should_list_installed:
48
+ env = get_default_environment()
49
+ lc = current.lower()
50
+ installed = [
51
+ dist.canonical_name
52
+ for dist in env.iter_installed_distributions(local_only=True)
53
+ if dist.canonical_name.startswith(lc)
54
+ and dist.canonical_name not in cwords[1:]
55
+ ]
56
+ # if there are no dists installed, fall back to option completion
57
+ if installed:
58
+ for dist in installed:
59
+ print(dist)
60
+ sys.exit(1)
61
+
62
+ should_list_installables = (
63
+ not current.startswith("-") and subcommand_name == "install"
64
+ )
65
+ if should_list_installables:
66
+ for path in auto_complete_paths(current, "path"):
67
+ print(path)
68
+ sys.exit(1)
69
+
70
+ subcommand = create_command(subcommand_name)
71
+
72
+ for opt in subcommand.parser.option_list_all:
73
+ if opt.help != optparse.SUPPRESS_HELP:
74
+ for opt_str in opt._long_opts + opt._short_opts:
75
+ options.append((opt_str, opt.nargs))
76
+
77
+ # filter out previously specified options from available options
78
+ prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]
79
+ options = [(x, v) for (x, v) in options if x not in prev_opts]
80
+ # filter options by current input
81
+ options = [(k, v) for k, v in options if k.startswith(current)]
82
+ # get completion type given cwords and available subcommand options
83
+ completion_type = get_path_completion_type(
84
+ cwords,
85
+ cword,
86
+ subcommand.parser.option_list_all,
87
+ )
88
+ # get completion files and directories if ``completion_type`` is
89
+ # ``<file>``, ``<dir>`` or ``<path>``
90
+ if completion_type:
91
+ paths = auto_complete_paths(current, completion_type)
92
+ options = [(path, 0) for path in paths]
93
+ for option in options:
94
+ opt_label = option[0]
95
+ # append '=' to options which require args
96
+ if option[1] and option[0][:2] == "--":
97
+ opt_label += "="
98
+ print(opt_label)
99
+ else:
100
+ # show main parser options only when necessary
101
+
102
+ opts = [i.option_list for i in parser.option_groups]
103
+ opts.append(parser.option_list)
104
+ flattened_opts = chain.from_iterable(opts)
105
+ if current.startswith("-"):
106
+ for opt in flattened_opts:
107
+ if opt.help != optparse.SUPPRESS_HELP:
108
+ subcommands += opt._long_opts + opt._short_opts
109
+ else:
110
+ # get completion type given cwords and all available options
111
+ completion_type = get_path_completion_type(cwords, cword, flattened_opts)
112
+ if completion_type:
113
+ subcommands = list(auto_complete_paths(current, completion_type))
114
+
115
+ print(" ".join([x for x in subcommands if x.startswith(current)]))
116
+ sys.exit(1)
117
+
118
+
119
+ def get_path_completion_type(
120
+ cwords: List[str], cword: int, opts: Iterable[Any]
121
+ ) -> Optional[str]:
122
+ """Get the type of path completion (``file``, ``dir``, ``path`` or None)
123
+
124
+ :param cwords: same as the environmental variable ``COMP_WORDS``
125
+ :param cword: same as the environmental variable ``COMP_CWORD``
126
+ :param opts: The available options to check
127
+ :return: path completion type (``file``, ``dir``, ``path`` or None)
128
+ """
129
+ if cword < 2 or not cwords[cword - 2].startswith("-"):
130
+ return None
131
+ for opt in opts:
132
+ if opt.help == optparse.SUPPRESS_HELP:
133
+ continue
134
+ for o in str(opt).split("/"):
135
+ if cwords[cword - 2].split("=")[0] == o:
136
+ if not opt.metavar or any(
137
+ x in ("path", "file", "dir") for x in opt.metavar.split("/")
138
+ ):
139
+ return opt.metavar
140
+ return None
141
+
142
+
143
+ def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]:
144
+ """If ``completion_type`` is ``file`` or ``path``, list all regular files
145
+ and directories starting with ``current``; otherwise only list directories
146
+ starting with ``current``.
147
+
148
+ :param current: The word to be completed
149
+ :param completion_type: path completion type(``file``, ``path`` or ``dir``)
150
+ :return: A generator of regular files and/or directories
151
+ """
152
+ directory, filename = os.path.split(current)
153
+ current_path = os.path.abspath(directory)
154
+ # Don't complete paths if they can't be accessed
155
+ if not os.access(current_path, os.R_OK):
156
+ return
157
+ filename = os.path.normcase(filename)
158
+ # list all files that start with ``filename``
159
+ file_list = (
160
+ x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename)
161
+ )
162
+ for f in file_list:
163
+ opt = os.path.join(current_path, f)
164
+ comp_file = os.path.normcase(os.path.join(directory, f))
165
+ # complete regular files when there is not ``<dir>`` after option
166
+ # complete directories when there is ``<file>``, ``<path>`` or
167
+ # ``<dir>``after option
168
+ if completion_type != "dir" and os.path.isfile(opt):
169
+ yield comp_file
170
+ elif os.path.isdir(opt):
171
+ yield os.path.join(comp_file, "")
myenv/Lib/site-packages/pip/_internal/cli/base_command.py ADDED
@@ -0,0 +1,236 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Base Command class, and related routines"""
2
+
3
+ import functools
4
+ import logging
5
+ import logging.config
6
+ import optparse
7
+ import os
8
+ import sys
9
+ import traceback
10
+ from optparse import Values
11
+ from typing import Any, Callable, List, Optional, Tuple
12
+
13
+ from pip._vendor.rich import traceback as rich_traceback
14
+
15
+ from pip._internal.cli import cmdoptions
16
+ from pip._internal.cli.command_context import CommandContextMixIn
17
+ from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
18
+ from pip._internal.cli.status_codes import (
19
+ ERROR,
20
+ PREVIOUS_BUILD_DIR_ERROR,
21
+ UNKNOWN_ERROR,
22
+ VIRTUALENV_NOT_FOUND,
23
+ )
24
+ from pip._internal.exceptions import (
25
+ BadCommand,
26
+ CommandError,
27
+ DiagnosticPipError,
28
+ InstallationError,
29
+ NetworkConnectionError,
30
+ PreviousBuildDirError,
31
+ UninstallationError,
32
+ )
33
+ from pip._internal.utils.filesystem import check_path_owner
34
+ from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
35
+ from pip._internal.utils.misc import get_prog, normalize_path
36
+ from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirRegistry
37
+ from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry
38
+ from pip._internal.utils.virtualenv import running_under_virtualenv
39
+
40
+ __all__ = ["Command"]
41
+
42
+ logger = logging.getLogger(__name__)
43
+
44
+
45
+ class Command(CommandContextMixIn):
46
+ usage: str = ""
47
+ ignore_require_venv: bool = False
48
+
49
+ def __init__(self, name: str, summary: str, isolated: bool = False) -> None:
50
+ super().__init__()
51
+
52
+ self.name = name
53
+ self.summary = summary
54
+ self.parser = ConfigOptionParser(
55
+ usage=self.usage,
56
+ prog=f"{get_prog()} {name}",
57
+ formatter=UpdatingDefaultsHelpFormatter(),
58
+ add_help_option=False,
59
+ name=name,
60
+ description=self.__doc__,
61
+ isolated=isolated,
62
+ )
63
+
64
+ self.tempdir_registry: Optional[TempDirRegistry] = None
65
+
66
+ # Commands should add options to this option group
67
+ optgroup_name = f"{self.name.capitalize()} Options"
68
+ self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
69
+
70
+ # Add the general options
71
+ gen_opts = cmdoptions.make_option_group(
72
+ cmdoptions.general_group,
73
+ self.parser,
74
+ )
75
+ self.parser.add_option_group(gen_opts)
76
+
77
+ self.add_options()
78
+
79
+ def add_options(self) -> None:
80
+ pass
81
+
82
+ def handle_pip_version_check(self, options: Values) -> None:
83
+ """
84
+ This is a no-op so that commands by default do not do the pip version
85
+ check.
86
+ """
87
+ # Make sure we do the pip version check if the index_group options
88
+ # are present.
89
+ assert not hasattr(options, "no_index")
90
+
91
+ def run(self, options: Values, args: List[str]) -> int:
92
+ raise NotImplementedError
93
+
94
+ def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]:
95
+ # factored out for testability
96
+ return self.parser.parse_args(args)
97
+
98
+ def main(self, args: List[str]) -> int:
99
+ try:
100
+ with self.main_context():
101
+ return self._main(args)
102
+ finally:
103
+ logging.shutdown()
104
+
105
+ def _main(self, args: List[str]) -> int:
106
+ # We must initialize this before the tempdir manager, otherwise the
107
+ # configuration would not be accessible by the time we clean up the
108
+ # tempdir manager.
109
+ self.tempdir_registry = self.enter_context(tempdir_registry())
110
+ # Intentionally set as early as possible so globally-managed temporary
111
+ # directories are available to the rest of the code.
112
+ self.enter_context(global_tempdir_manager())
113
+
114
+ options, args = self.parse_args(args)
115
+
116
+ # Set verbosity so that it can be used elsewhere.
117
+ self.verbosity = options.verbose - options.quiet
118
+
119
+ level_number = setup_logging(
120
+ verbosity=self.verbosity,
121
+ no_color=options.no_color,
122
+ user_log_file=options.log,
123
+ )
124
+
125
+ always_enabled_features = set(options.features_enabled) & set(
126
+ cmdoptions.ALWAYS_ENABLED_FEATURES
127
+ )
128
+ if always_enabled_features:
129
+ logger.warning(
130
+ "The following features are always enabled: %s. ",
131
+ ", ".join(sorted(always_enabled_features)),
132
+ )
133
+
134
+ # Make sure that the --python argument isn't specified after the
135
+ # subcommand. We can tell, because if --python was specified,
136
+ # we should only reach this point if we're running in the created
137
+ # subprocess, which has the _PIP_RUNNING_IN_SUBPROCESS environment
138
+ # variable set.
139
+ if options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
140
+ logger.critical(
141
+ "The --python option must be placed before the pip subcommand name"
142
+ )
143
+ sys.exit(ERROR)
144
+
145
+ # TODO: Try to get these passing down from the command?
146
+ # without resorting to os.environ to hold these.
147
+ # This also affects isolated builds and it should.
148
+
149
+ if options.no_input:
150
+ os.environ["PIP_NO_INPUT"] = "1"
151
+
152
+ if options.exists_action:
153
+ os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action)
154
+
155
+ if options.require_venv and not self.ignore_require_venv:
156
+ # If a venv is required check if it can really be found
157
+ if not running_under_virtualenv():
158
+ logger.critical("Could not find an activated virtualenv (required).")
159
+ sys.exit(VIRTUALENV_NOT_FOUND)
160
+
161
+ if options.cache_dir:
162
+ options.cache_dir = normalize_path(options.cache_dir)
163
+ if not check_path_owner(options.cache_dir):
164
+ logger.warning(
165
+ "The directory '%s' or its parent directory is not owned "
166
+ "or is not writable by the current user. The cache "
167
+ "has been disabled. Check the permissions and owner of "
168
+ "that directory. If executing pip with sudo, you should "
169
+ "use sudo's -H flag.",
170
+ options.cache_dir,
171
+ )
172
+ options.cache_dir = None
173
+
174
+ def intercepts_unhandled_exc(
175
+ run_func: Callable[..., int]
176
+ ) -> Callable[..., int]:
177
+ @functools.wraps(run_func)
178
+ def exc_logging_wrapper(*args: Any) -> int:
179
+ try:
180
+ status = run_func(*args)
181
+ assert isinstance(status, int)
182
+ return status
183
+ except DiagnosticPipError as exc:
184
+ logger.error("[present-rich] %s", exc)
185
+ logger.debug("Exception information:", exc_info=True)
186
+
187
+ return ERROR
188
+ except PreviousBuildDirError as exc:
189
+ logger.critical(str(exc))
190
+ logger.debug("Exception information:", exc_info=True)
191
+
192
+ return PREVIOUS_BUILD_DIR_ERROR
193
+ except (
194
+ InstallationError,
195
+ UninstallationError,
196
+ BadCommand,
197
+ NetworkConnectionError,
198
+ ) as exc:
199
+ logger.critical(str(exc))
200
+ logger.debug("Exception information:", exc_info=True)
201
+
202
+ return ERROR
203
+ except CommandError as exc:
204
+ logger.critical("%s", exc)
205
+ logger.debug("Exception information:", exc_info=True)
206
+
207
+ return ERROR
208
+ except BrokenStdoutLoggingError:
209
+ # Bypass our logger and write any remaining messages to
210
+ # stderr because stdout no longer works.
211
+ print("ERROR: Pipe to stdout was broken", file=sys.stderr)
212
+ if level_number <= logging.DEBUG:
213
+ traceback.print_exc(file=sys.stderr)
214
+
215
+ return ERROR
216
+ except KeyboardInterrupt:
217
+ logger.critical("Operation cancelled by user")
218
+ logger.debug("Exception information:", exc_info=True)
219
+
220
+ return ERROR
221
+ except BaseException:
222
+ logger.critical("Exception:", exc_info=True)
223
+
224
+ return UNKNOWN_ERROR
225
+
226
+ return exc_logging_wrapper
227
+
228
+ try:
229
+ if not options.debug_mode:
230
+ run = intercepts_unhandled_exc(self.run)
231
+ else:
232
+ run = self.run
233
+ rich_traceback.install(show_locals=True)
234
+ return run(options, args)
235
+ finally:
236
+ self.handle_pip_version_check(options)
myenv/Lib/site-packages/pip/_internal/cli/cmdoptions.py ADDED
@@ -0,0 +1,1074 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ shared options and groups
3
+
4
+ The principle here is to define options once, but *not* instantiate them
5
+ globally. One reason being that options with action='append' can carry state
6
+ between parses. pip parses general options twice internally, and shouldn't
7
+ pass on state. To be consistent, all options will follow this design.
8
+ """
9
+
10
+ # The following comment should be removed at some point in the future.
11
+ # mypy: strict-optional=False
12
+
13
+ import importlib.util
14
+ import logging
15
+ import os
16
+ import textwrap
17
+ from functools import partial
18
+ from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values
19
+ from textwrap import dedent
20
+ from typing import Any, Callable, Dict, Optional, Tuple
21
+
22
+ from pip._vendor.packaging.utils import canonicalize_name
23
+
24
+ from pip._internal.cli.parser import ConfigOptionParser
25
+ from pip._internal.exceptions import CommandError
26
+ from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
27
+ from pip._internal.models.format_control import FormatControl
28
+ from pip._internal.models.index import PyPI
29
+ from pip._internal.models.target_python import TargetPython
30
+ from pip._internal.utils.hashes import STRONG_HASHES
31
+ from pip._internal.utils.misc import strtobool
32
+
33
+ logger = logging.getLogger(__name__)
34
+
35
+
36
+ def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None:
37
+ """
38
+ Raise an option parsing error using parser.error().
39
+
40
+ Args:
41
+ parser: an OptionParser instance.
42
+ option: an Option instance.
43
+ msg: the error text.
44
+ """
45
+ msg = f"{option} error: {msg}"
46
+ msg = textwrap.fill(" ".join(msg.split()))
47
+ parser.error(msg)
48
+
49
+
50
+ def make_option_group(group: Dict[str, Any], parser: ConfigOptionParser) -> OptionGroup:
51
+ """
52
+ Return an OptionGroup object
53
+ group -- assumed to be dict with 'name' and 'options' keys
54
+ parser -- an optparse Parser
55
+ """
56
+ option_group = OptionGroup(parser, group["name"])
57
+ for option in group["options"]:
58
+ option_group.add_option(option())
59
+ return option_group
60
+
61
+
62
+ def check_dist_restriction(options: Values, check_target: bool = False) -> None:
63
+ """Function for determining if custom platform options are allowed.
64
+
65
+ :param options: The OptionParser options.
66
+ :param check_target: Whether or not to check if --target is being used.
67
+ """
68
+ dist_restriction_set = any(
69
+ [
70
+ options.python_version,
71
+ options.platforms,
72
+ options.abis,
73
+ options.implementation,
74
+ ]
75
+ )
76
+
77
+ binary_only = FormatControl(set(), {":all:"})
78
+ sdist_dependencies_allowed = (
79
+ options.format_control != binary_only and not options.ignore_dependencies
80
+ )
81
+
82
+ # Installations or downloads using dist restrictions must not combine
83
+ # source distributions and dist-specific wheels, as they are not
84
+ # guaranteed to be locally compatible.
85
+ if dist_restriction_set and sdist_dependencies_allowed:
86
+ raise CommandError(
87
+ "When restricting platform and interpreter constraints using "
88
+ "--python-version, --platform, --abi, or --implementation, "
89
+ "either --no-deps must be set, or --only-binary=:all: must be "
90
+ "set and --no-binary must not be set (or must be set to "
91
+ ":none:)."
92
+ )
93
+
94
+ if check_target:
95
+ if dist_restriction_set and not options.target_dir:
96
+ raise CommandError(
97
+ "Can not use any platform or abi specific options unless "
98
+ "installing via '--target'"
99
+ )
100
+
101
+
102
+ def _path_option_check(option: Option, opt: str, value: str) -> str:
103
+ return os.path.expanduser(value)
104
+
105
+
106
+ def _package_name_option_check(option: Option, opt: str, value: str) -> str:
107
+ return canonicalize_name(value)
108
+
109
+
110
+ class PipOption(Option):
111
+ TYPES = Option.TYPES + ("path", "package_name")
112
+ TYPE_CHECKER = Option.TYPE_CHECKER.copy()
113
+ TYPE_CHECKER["package_name"] = _package_name_option_check
114
+ TYPE_CHECKER["path"] = _path_option_check
115
+
116
+
117
+ ###########
118
+ # options #
119
+ ###########
120
+
121
+ help_: Callable[..., Option] = partial(
122
+ Option,
123
+ "-h",
124
+ "--help",
125
+ dest="help",
126
+ action="help",
127
+ help="Show help.",
128
+ )
129
+
130
+ debug_mode: Callable[..., Option] = partial(
131
+ Option,
132
+ "--debug",
133
+ dest="debug_mode",
134
+ action="store_true",
135
+ default=False,
136
+ help=(
137
+ "Let unhandled exceptions propagate outside the main subroutine, "
138
+ "instead of logging them to stderr."
139
+ ),
140
+ )
141
+
142
+ isolated_mode: Callable[..., Option] = partial(
143
+ Option,
144
+ "--isolated",
145
+ dest="isolated_mode",
146
+ action="store_true",
147
+ default=False,
148
+ help=(
149
+ "Run pip in an isolated mode, ignoring environment variables and user "
150
+ "configuration."
151
+ ),
152
+ )
153
+
154
+ require_virtualenv: Callable[..., Option] = partial(
155
+ Option,
156
+ "--require-virtualenv",
157
+ "--require-venv",
158
+ dest="require_venv",
159
+ action="store_true",
160
+ default=False,
161
+ help=(
162
+ "Allow pip to only run in a virtual environment; "
163
+ "exit with an error otherwise."
164
+ ),
165
+ )
166
+
167
+ override_externally_managed: Callable[..., Option] = partial(
168
+ Option,
169
+ "--break-system-packages",
170
+ dest="override_externally_managed",
171
+ action="store_true",
172
+ help="Allow pip to modify an EXTERNALLY-MANAGED Python installation",
173
+ )
174
+
175
+ python: Callable[..., Option] = partial(
176
+ Option,
177
+ "--python",
178
+ dest="python",
179
+ help="Run pip with the specified Python interpreter.",
180
+ )
181
+
182
+ verbose: Callable[..., Option] = partial(
183
+ Option,
184
+ "-v",
185
+ "--verbose",
186
+ dest="verbose",
187
+ action="count",
188
+ default=0,
189
+ help="Give more output. Option is additive, and can be used up to 3 times.",
190
+ )
191
+
192
+ no_color: Callable[..., Option] = partial(
193
+ Option,
194
+ "--no-color",
195
+ dest="no_color",
196
+ action="store_true",
197
+ default=False,
198
+ help="Suppress colored output.",
199
+ )
200
+
201
+ version: Callable[..., Option] = partial(
202
+ Option,
203
+ "-V",
204
+ "--version",
205
+ dest="version",
206
+ action="store_true",
207
+ help="Show version and exit.",
208
+ )
209
+
210
+ quiet: Callable[..., Option] = partial(
211
+ Option,
212
+ "-q",
213
+ "--quiet",
214
+ dest="quiet",
215
+ action="count",
216
+ default=0,
217
+ help=(
218
+ "Give less output. Option is additive, and can be used up to 3"
219
+ " times (corresponding to WARNING, ERROR, and CRITICAL logging"
220
+ " levels)."
221
+ ),
222
+ )
223
+
224
+ progress_bar: Callable[..., Option] = partial(
225
+ Option,
226
+ "--progress-bar",
227
+ dest="progress_bar",
228
+ type="choice",
229
+ choices=["on", "off"],
230
+ default="on",
231
+ help="Specify whether the progress bar should be used [on, off] (default: on)",
232
+ )
233
+
234
+ log: Callable[..., Option] = partial(
235
+ PipOption,
236
+ "--log",
237
+ "--log-file",
238
+ "--local-log",
239
+ dest="log",
240
+ metavar="path",
241
+ type="path",
242
+ help="Path to a verbose appending log.",
243
+ )
244
+
245
+ no_input: Callable[..., Option] = partial(
246
+ Option,
247
+ # Don't ask for input
248
+ "--no-input",
249
+ dest="no_input",
250
+ action="store_true",
251
+ default=False,
252
+ help="Disable prompting for input.",
253
+ )
254
+
255
+ keyring_provider: Callable[..., Option] = partial(
256
+ Option,
257
+ "--keyring-provider",
258
+ dest="keyring_provider",
259
+ choices=["auto", "disabled", "import", "subprocess"],
260
+ default="auto",
261
+ help=(
262
+ "Enable the credential lookup via the keyring library if user input is allowed."
263
+ " Specify which mechanism to use [disabled, import, subprocess]."
264
+ " (default: disabled)"
265
+ ),
266
+ )
267
+
268
+ proxy: Callable[..., Option] = partial(
269
+ Option,
270
+ "--proxy",
271
+ dest="proxy",
272
+ type="str",
273
+ default="",
274
+ help="Specify a proxy in the form scheme://[user:passwd@]proxy.server:port.",
275
+ )
276
+
277
+ retries: Callable[..., Option] = partial(
278
+ Option,
279
+ "--retries",
280
+ dest="retries",
281
+ type="int",
282
+ default=5,
283
+ help="Maximum number of retries each connection should attempt "
284
+ "(default %default times).",
285
+ )
286
+
287
+ timeout: Callable[..., Option] = partial(
288
+ Option,
289
+ "--timeout",
290
+ "--default-timeout",
291
+ metavar="sec",
292
+ dest="timeout",
293
+ type="float",
294
+ default=15,
295
+ help="Set the socket timeout (default %default seconds).",
296
+ )
297
+
298
+
299
+ def exists_action() -> Option:
300
+ return Option(
301
+ # Option when path already exist
302
+ "--exists-action",
303
+ dest="exists_action",
304
+ type="choice",
305
+ choices=["s", "i", "w", "b", "a"],
306
+ default=[],
307
+ action="append",
308
+ metavar="action",
309
+ help="Default action when a path already exists: "
310
+ "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.",
311
+ )
312
+
313
+
314
+ cert: Callable[..., Option] = partial(
315
+ PipOption,
316
+ "--cert",
317
+ dest="cert",
318
+ type="path",
319
+ metavar="path",
320
+ help=(
321
+ "Path to PEM-encoded CA certificate bundle. "
322
+ "If provided, overrides the default. "
323
+ "See 'SSL Certificate Verification' in pip documentation "
324
+ "for more information."
325
+ ),
326
+ )
327
+
328
+ client_cert: Callable[..., Option] = partial(
329
+ PipOption,
330
+ "--client-cert",
331
+ dest="client_cert",
332
+ type="path",
333
+ default=None,
334
+ metavar="path",
335
+ help="Path to SSL client certificate, a single file containing the "
336
+ "private key and the certificate in PEM format.",
337
+ )
338
+
339
+ index_url: Callable[..., Option] = partial(
340
+ Option,
341
+ "-i",
342
+ "--index-url",
343
+ "--pypi-url",
344
+ dest="index_url",
345
+ metavar="URL",
346
+ default=PyPI.simple_url,
347
+ help="Base URL of the Python Package Index (default %default). "
348
+ "This should point to a repository compliant with PEP 503 "
349
+ "(the simple repository API) or a local directory laid out "
350
+ "in the same format.",
351
+ )
352
+
353
+
354
+ def extra_index_url() -> Option:
355
+ return Option(
356
+ "--extra-index-url",
357
+ dest="extra_index_urls",
358
+ metavar="URL",
359
+ action="append",
360
+ default=[],
361
+ help="Extra URLs of package indexes to use in addition to "
362
+ "--index-url. Should follow the same rules as "
363
+ "--index-url.",
364
+ )
365
+
366
+
367
+ no_index: Callable[..., Option] = partial(
368
+ Option,
369
+ "--no-index",
370
+ dest="no_index",
371
+ action="store_true",
372
+ default=False,
373
+ help="Ignore package index (only looking at --find-links URLs instead).",
374
+ )
375
+
376
+
377
+ def find_links() -> Option:
378
+ return Option(
379
+ "-f",
380
+ "--find-links",
381
+ dest="find_links",
382
+ action="append",
383
+ default=[],
384
+ metavar="url",
385
+ help="If a URL or path to an html file, then parse for links to "
386
+ "archives such as sdist (.tar.gz) or wheel (.whl) files. "
387
+ "If a local path or file:// URL that's a directory, "
388
+ "then look for archives in the directory listing. "
389
+ "Links to VCS project URLs are not supported.",
390
+ )
391
+
392
+
393
+ def trusted_host() -> Option:
394
+ return Option(
395
+ "--trusted-host",
396
+ dest="trusted_hosts",
397
+ action="append",
398
+ metavar="HOSTNAME",
399
+ default=[],
400
+ help="Mark this host or host:port pair as trusted, even though it "
401
+ "does not have valid or any HTTPS.",
402
+ )
403
+
404
+
405
+ def constraints() -> Option:
406
+ return Option(
407
+ "-c",
408
+ "--constraint",
409
+ dest="constraints",
410
+ action="append",
411
+ default=[],
412
+ metavar="file",
413
+ help="Constrain versions using the given constraints file. "
414
+ "This option can be used multiple times.",
415
+ )
416
+
417
+
418
+ def requirements() -> Option:
419
+ return Option(
420
+ "-r",
421
+ "--requirement",
422
+ dest="requirements",
423
+ action="append",
424
+ default=[],
425
+ metavar="file",
426
+ help="Install from the given requirements file. "
427
+ "This option can be used multiple times.",
428
+ )
429
+
430
+
431
+ def editable() -> Option:
432
+ return Option(
433
+ "-e",
434
+ "--editable",
435
+ dest="editables",
436
+ action="append",
437
+ default=[],
438
+ metavar="path/url",
439
+ help=(
440
+ "Install a project in editable mode (i.e. setuptools "
441
+ '"develop mode") from a local project path or a VCS url.'
442
+ ),
443
+ )
444
+
445
+
446
+ def _handle_src(option: Option, opt_str: str, value: str, parser: OptionParser) -> None:
447
+ value = os.path.abspath(value)
448
+ setattr(parser.values, option.dest, value)
449
+
450
+
451
+ src: Callable[..., Option] = partial(
452
+ PipOption,
453
+ "--src",
454
+ "--source",
455
+ "--source-dir",
456
+ "--source-directory",
457
+ dest="src_dir",
458
+ type="path",
459
+ metavar="dir",
460
+ default=get_src_prefix(),
461
+ action="callback",
462
+ callback=_handle_src,
463
+ help="Directory to check out editable projects into. "
464
+ 'The default in a virtualenv is "<venv path>/src". '
465
+ 'The default for global installs is "<current dir>/src".',
466
+ )
467
+
468
+
469
+ def _get_format_control(values: Values, option: Option) -> Any:
470
+ """Get a format_control object."""
471
+ return getattr(values, option.dest)
472
+
473
+
474
+ def _handle_no_binary(
475
+ option: Option, opt_str: str, value: str, parser: OptionParser
476
+ ) -> None:
477
+ existing = _get_format_control(parser.values, option)
478
+ FormatControl.handle_mutual_excludes(
479
+ value,
480
+ existing.no_binary,
481
+ existing.only_binary,
482
+ )
483
+
484
+
485
+ def _handle_only_binary(
486
+ option: Option, opt_str: str, value: str, parser: OptionParser
487
+ ) -> None:
488
+ existing = _get_format_control(parser.values, option)
489
+ FormatControl.handle_mutual_excludes(
490
+ value,
491
+ existing.only_binary,
492
+ existing.no_binary,
493
+ )
494
+
495
+
496
+ def no_binary() -> Option:
497
+ format_control = FormatControl(set(), set())
498
+ return Option(
499
+ "--no-binary",
500
+ dest="format_control",
501
+ action="callback",
502
+ callback=_handle_no_binary,
503
+ type="str",
504
+ default=format_control,
505
+ help="Do not use binary packages. Can be supplied multiple times, and "
506
+ 'each time adds to the existing value. Accepts either ":all:" to '
507
+ 'disable all binary packages, ":none:" to empty the set (notice '
508
+ "the colons), or one or more package names with commas between "
509
+ "them (no colons). Note that some packages are tricky to compile "
510
+ "and may fail to install when this option is used on them.",
511
+ )
512
+
513
+
514
+ def only_binary() -> Option:
515
+ format_control = FormatControl(set(), set())
516
+ return Option(
517
+ "--only-binary",
518
+ dest="format_control",
519
+ action="callback",
520
+ callback=_handle_only_binary,
521
+ type="str",
522
+ default=format_control,
523
+ help="Do not use source packages. Can be supplied multiple times, and "
524
+ 'each time adds to the existing value. Accepts either ":all:" to '
525
+ 'disable all source packages, ":none:" to empty the set, or one '
526
+ "or more package names with commas between them. Packages "
527
+ "without binary distributions will fail to install when this "
528
+ "option is used on them.",
529
+ )
530
+
531
+
532
+ platforms: Callable[..., Option] = partial(
533
+ Option,
534
+ "--platform",
535
+ dest="platforms",
536
+ metavar="platform",
537
+ action="append",
538
+ default=None,
539
+ help=(
540
+ "Only use wheels compatible with <platform>. Defaults to the "
541
+ "platform of the running system. Use this option multiple times to "
542
+ "specify multiple platforms supported by the target interpreter."
543
+ ),
544
+ )
545
+
546
+
547
+ # This was made a separate function for unit-testing purposes.
548
+ def _convert_python_version(value: str) -> Tuple[Tuple[int, ...], Optional[str]]:
549
+ """
550
+ Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
551
+
552
+ :return: A 2-tuple (version_info, error_msg), where `error_msg` is
553
+ non-None if and only if there was a parsing error.
554
+ """
555
+ if not value:
556
+ # The empty string is the same as not providing a value.
557
+ return (None, None)
558
+
559
+ parts = value.split(".")
560
+ if len(parts) > 3:
561
+ return ((), "at most three version parts are allowed")
562
+
563
+ if len(parts) == 1:
564
+ # Then we are in the case of "3" or "37".
565
+ value = parts[0]
566
+ if len(value) > 1:
567
+ parts = [value[0], value[1:]]
568
+
569
+ try:
570
+ version_info = tuple(int(part) for part in parts)
571
+ except ValueError:
572
+ return ((), "each version part must be an integer")
573
+
574
+ return (version_info, None)
575
+
576
+
577
+ def _handle_python_version(
578
+ option: Option, opt_str: str, value: str, parser: OptionParser
579
+ ) -> None:
580
+ """
581
+ Handle a provided --python-version value.
582
+ """
583
+ version_info, error_msg = _convert_python_version(value)
584
+ if error_msg is not None:
585
+ msg = "invalid --python-version value: {!r}: {}".format(
586
+ value,
587
+ error_msg,
588
+ )
589
+ raise_option_error(parser, option=option, msg=msg)
590
+
591
+ parser.values.python_version = version_info
592
+
593
+
594
+ python_version: Callable[..., Option] = partial(
595
+ Option,
596
+ "--python-version",
597
+ dest="python_version",
598
+ metavar="python_version",
599
+ action="callback",
600
+ callback=_handle_python_version,
601
+ type="str",
602
+ default=None,
603
+ help=dedent(
604
+ """\
605
+ The Python interpreter version to use for wheel and "Requires-Python"
606
+ compatibility checks. Defaults to a version derived from the running
607
+ interpreter. The version can be specified using up to three dot-separated
608
+ integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor
609
+ version can also be given as a string without dots (e.g. "37" for 3.7.0).
610
+ """
611
+ ),
612
+ )
613
+
614
+
615
+ implementation: Callable[..., Option] = partial(
616
+ Option,
617
+ "--implementation",
618
+ dest="implementation",
619
+ metavar="implementation",
620
+ default=None,
621
+ help=(
622
+ "Only use wheels compatible with Python "
623
+ "implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
624
+ " or 'ip'. If not specified, then the current "
625
+ "interpreter implementation is used. Use 'py' to force "
626
+ "implementation-agnostic wheels."
627
+ ),
628
+ )
629
+
630
+
631
+ abis: Callable[..., Option] = partial(
632
+ Option,
633
+ "--abi",
634
+ dest="abis",
635
+ metavar="abi",
636
+ action="append",
637
+ default=None,
638
+ help=(
639
+ "Only use wheels compatible with Python abi <abi>, e.g. 'pypy_41'. "
640
+ "If not specified, then the current interpreter abi tag is used. "
641
+ "Use this option multiple times to specify multiple abis supported "
642
+ "by the target interpreter. Generally you will need to specify "
643
+ "--implementation, --platform, and --python-version when using this "
644
+ "option."
645
+ ),
646
+ )
647
+
648
+
649
+ def add_target_python_options(cmd_opts: OptionGroup) -> None:
650
+ cmd_opts.add_option(platforms())
651
+ cmd_opts.add_option(python_version())
652
+ cmd_opts.add_option(implementation())
653
+ cmd_opts.add_option(abis())
654
+
655
+
656
+ def make_target_python(options: Values) -> TargetPython:
657
+ target_python = TargetPython(
658
+ platforms=options.platforms,
659
+ py_version_info=options.python_version,
660
+ abis=options.abis,
661
+ implementation=options.implementation,
662
+ )
663
+
664
+ return target_python
665
+
666
+
667
+ def prefer_binary() -> Option:
668
+ return Option(
669
+ "--prefer-binary",
670
+ dest="prefer_binary",
671
+ action="store_true",
672
+ default=False,
673
+ help="Prefer older binary packages over newer source packages.",
674
+ )
675
+
676
+
677
+ cache_dir: Callable[..., Option] = partial(
678
+ PipOption,
679
+ "--cache-dir",
680
+ dest="cache_dir",
681
+ default=USER_CACHE_DIR,
682
+ metavar="dir",
683
+ type="path",
684
+ help="Store the cache data in <dir>.",
685
+ )
686
+
687
+
688
+ def _handle_no_cache_dir(
689
+ option: Option, opt: str, value: str, parser: OptionParser
690
+ ) -> None:
691
+ """
692
+ Process a value provided for the --no-cache-dir option.
693
+
694
+ This is an optparse.Option callback for the --no-cache-dir option.
695
+ """
696
+ # The value argument will be None if --no-cache-dir is passed via the
697
+ # command-line, since the option doesn't accept arguments. However,
698
+ # the value can be non-None if the option is triggered e.g. by an
699
+ # environment variable, like PIP_NO_CACHE_DIR=true.
700
+ if value is not None:
701
+ # Then parse the string value to get argument error-checking.
702
+ try:
703
+ strtobool(value)
704
+ except ValueError as exc:
705
+ raise_option_error(parser, option=option, msg=str(exc))
706
+
707
+ # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
708
+ # converted to 0 (like "false" or "no") caused cache_dir to be disabled
709
+ # rather than enabled (logic would say the latter). Thus, we disable
710
+ # the cache directory not just on values that parse to True, but (for
711
+ # backwards compatibility reasons) also on values that parse to False.
712
+ # In other words, always set it to False if the option is provided in
713
+ # some (valid) form.
714
+ parser.values.cache_dir = False
715
+
716
+
717
+ no_cache: Callable[..., Option] = partial(
718
+ Option,
719
+ "--no-cache-dir",
720
+ dest="cache_dir",
721
+ action="callback",
722
+ callback=_handle_no_cache_dir,
723
+ help="Disable the cache.",
724
+ )
725
+
726
+ no_deps: Callable[..., Option] = partial(
727
+ Option,
728
+ "--no-deps",
729
+ "--no-dependencies",
730
+ dest="ignore_dependencies",
731
+ action="store_true",
732
+ default=False,
733
+ help="Don't install package dependencies.",
734
+ )
735
+
736
+ ignore_requires_python: Callable[..., Option] = partial(
737
+ Option,
738
+ "--ignore-requires-python",
739
+ dest="ignore_requires_python",
740
+ action="store_true",
741
+ help="Ignore the Requires-Python information.",
742
+ )
743
+
744
+ no_build_isolation: Callable[..., Option] = partial(
745
+ Option,
746
+ "--no-build-isolation",
747
+ dest="build_isolation",
748
+ action="store_false",
749
+ default=True,
750
+ help="Disable isolation when building a modern source distribution. "
751
+ "Build dependencies specified by PEP 518 must be already installed "
752
+ "if this option is used.",
753
+ )
754
+
755
+ check_build_deps: Callable[..., Option] = partial(
756
+ Option,
757
+ "--check-build-dependencies",
758
+ dest="check_build_deps",
759
+ action="store_true",
760
+ default=False,
761
+ help="Check the build dependencies when PEP517 is used.",
762
+ )
763
+
764
+
765
+ def _handle_no_use_pep517(
766
+ option: Option, opt: str, value: str, parser: OptionParser
767
+ ) -> None:
768
+ """
769
+ Process a value provided for the --no-use-pep517 option.
770
+
771
+ This is an optparse.Option callback for the no_use_pep517 option.
772
+ """
773
+ # Since --no-use-pep517 doesn't accept arguments, the value argument
774
+ # will be None if --no-use-pep517 is passed via the command-line.
775
+ # However, the value can be non-None if the option is triggered e.g.
776
+ # by an environment variable, for example "PIP_NO_USE_PEP517=true".
777
+ if value is not None:
778
+ msg = """A value was passed for --no-use-pep517,
779
+ probably using either the PIP_NO_USE_PEP517 environment variable
780
+ or the "no-use-pep517" config file option. Use an appropriate value
781
+ of the PIP_USE_PEP517 environment variable or the "use-pep517"
782
+ config file option instead.
783
+ """
784
+ raise_option_error(parser, option=option, msg=msg)
785
+
786
+ # If user doesn't wish to use pep517, we check if setuptools and wheel are installed
787
+ # and raise error if it is not.
788
+ packages = ("setuptools", "wheel")
789
+ if not all(importlib.util.find_spec(package) for package in packages):
790
+ msg = (
791
+ f"It is not possible to use --no-use-pep517 "
792
+ f"without {' and '.join(packages)} installed."
793
+ )
794
+ raise_option_error(parser, option=option, msg=msg)
795
+
796
+ # Otherwise, --no-use-pep517 was passed via the command-line.
797
+ parser.values.use_pep517 = False
798
+
799
+
800
+ use_pep517: Any = partial(
801
+ Option,
802
+ "--use-pep517",
803
+ dest="use_pep517",
804
+ action="store_true",
805
+ default=None,
806
+ help="Use PEP 517 for building source distributions "
807
+ "(use --no-use-pep517 to force legacy behaviour).",
808
+ )
809
+
810
+ no_use_pep517: Any = partial(
811
+ Option,
812
+ "--no-use-pep517",
813
+ dest="use_pep517",
814
+ action="callback",
815
+ callback=_handle_no_use_pep517,
816
+ default=None,
817
+ help=SUPPRESS_HELP,
818
+ )
819
+
820
+
821
+ def _handle_config_settings(
822
+ option: Option, opt_str: str, value: str, parser: OptionParser
823
+ ) -> None:
824
+ key, sep, val = value.partition("=")
825
+ if sep != "=":
826
+ parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL") # noqa
827
+ dest = getattr(parser.values, option.dest)
828
+ if dest is None:
829
+ dest = {}
830
+ setattr(parser.values, option.dest, dest)
831
+ if key in dest:
832
+ if isinstance(dest[key], list):
833
+ dest[key].append(val)
834
+ else:
835
+ dest[key] = [dest[key], val]
836
+ else:
837
+ dest[key] = val
838
+
839
+
840
+ config_settings: Callable[..., Option] = partial(
841
+ Option,
842
+ "-C",
843
+ "--config-settings",
844
+ dest="config_settings",
845
+ type=str,
846
+ action="callback",
847
+ callback=_handle_config_settings,
848
+ metavar="settings",
849
+ help="Configuration settings to be passed to the PEP 517 build backend. "
850
+ "Settings take the form KEY=VALUE. Use multiple --config-settings options "
851
+ "to pass multiple keys to the backend.",
852
+ )
853
+
854
+ build_options: Callable[..., Option] = partial(
855
+ Option,
856
+ "--build-option",
857
+ dest="build_options",
858
+ metavar="options",
859
+ action="append",
860
+ help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
861
+ )
862
+
863
+ global_options: Callable[..., Option] = partial(
864
+ Option,
865
+ "--global-option",
866
+ dest="global_options",
867
+ action="append",
868
+ metavar="options",
869
+ help="Extra global options to be supplied to the setup.py "
870
+ "call before the install or bdist_wheel command.",
871
+ )
872
+
873
+ no_clean: Callable[..., Option] = partial(
874
+ Option,
875
+ "--no-clean",
876
+ action="store_true",
877
+ default=False,
878
+ help="Don't clean up build directories.",
879
+ )
880
+
881
+ pre: Callable[..., Option] = partial(
882
+ Option,
883
+ "--pre",
884
+ action="store_true",
885
+ default=False,
886
+ help="Include pre-release and development versions. By default, "
887
+ "pip only finds stable versions.",
888
+ )
889
+
890
+ disable_pip_version_check: Callable[..., Option] = partial(
891
+ Option,
892
+ "--disable-pip-version-check",
893
+ dest="disable_pip_version_check",
894
+ action="store_true",
895
+ default=False,
896
+ help="Don't periodically check PyPI to determine whether a new version "
897
+ "of pip is available for download. Implied with --no-index.",
898
+ )
899
+
900
+ root_user_action: Callable[..., Option] = partial(
901
+ Option,
902
+ "--root-user-action",
903
+ dest="root_user_action",
904
+ default="warn",
905
+ choices=["warn", "ignore"],
906
+ help="Action if pip is run as a root user. By default, a warning message is shown.",
907
+ )
908
+
909
+
910
+ def _handle_merge_hash(
911
+ option: Option, opt_str: str, value: str, parser: OptionParser
912
+ ) -> None:
913
+ """Given a value spelled "algo:digest", append the digest to a list
914
+ pointed to in a dict by the algo name."""
915
+ if not parser.values.hashes:
916
+ parser.values.hashes = {}
917
+ try:
918
+ algo, digest = value.split(":", 1)
919
+ except ValueError:
920
+ parser.error(
921
+ "Arguments to {} must be a hash name " # noqa
922
+ "followed by a value, like --hash=sha256:"
923
+ "abcde...".format(opt_str)
924
+ )
925
+ if algo not in STRONG_HASHES:
926
+ parser.error(
927
+ "Allowed hash algorithms for {} are {}.".format( # noqa
928
+ opt_str, ", ".join(STRONG_HASHES)
929
+ )
930
+ )
931
+ parser.values.hashes.setdefault(algo, []).append(digest)
932
+
933
+
934
+ hash: Callable[..., Option] = partial(
935
+ Option,
936
+ "--hash",
937
+ # Hash values eventually end up in InstallRequirement.hashes due to
938
+ # __dict__ copying in process_line().
939
+ dest="hashes",
940
+ action="callback",
941
+ callback=_handle_merge_hash,
942
+ type="string",
943
+ help="Verify that the package's archive matches this "
944
+ "hash before installing. Example: --hash=sha256:abcdef...",
945
+ )
946
+
947
+
948
+ require_hashes: Callable[..., Option] = partial(
949
+ Option,
950
+ "--require-hashes",
951
+ dest="require_hashes",
952
+ action="store_true",
953
+ default=False,
954
+ help="Require a hash to check each requirement against, for "
955
+ "repeatable installs. This option is implied when any package in a "
956
+ "requirements file has a --hash option.",
957
+ )
958
+
959
+
960
+ list_path: Callable[..., Option] = partial(
961
+ PipOption,
962
+ "--path",
963
+ dest="path",
964
+ type="path",
965
+ action="append",
966
+ help="Restrict to the specified installation path for listing "
967
+ "packages (can be used multiple times).",
968
+ )
969
+
970
+
971
+ def check_list_path_option(options: Values) -> None:
972
+ if options.path and (options.user or options.local):
973
+ raise CommandError("Cannot combine '--path' with '--user' or '--local'")
974
+
975
+
976
+ list_exclude: Callable[..., Option] = partial(
977
+ PipOption,
978
+ "--exclude",
979
+ dest="excludes",
980
+ action="append",
981
+ metavar="package",
982
+ type="package_name",
983
+ help="Exclude specified package from the output",
984
+ )
985
+
986
+
987
+ no_python_version_warning: Callable[..., Option] = partial(
988
+ Option,
989
+ "--no-python-version-warning",
990
+ dest="no_python_version_warning",
991
+ action="store_true",
992
+ default=False,
993
+ help="Silence deprecation warnings for upcoming unsupported Pythons.",
994
+ )
995
+
996
+
997
+ # Features that are now always on. A warning is printed if they are used.
998
+ ALWAYS_ENABLED_FEATURES = [
999
+ "no-binary-enable-wheel-cache", # always on since 23.1
1000
+ ]
1001
+
1002
+ use_new_feature: Callable[..., Option] = partial(
1003
+ Option,
1004
+ "--use-feature",
1005
+ dest="features_enabled",
1006
+ metavar="feature",
1007
+ action="append",
1008
+ default=[],
1009
+ choices=[
1010
+ "fast-deps",
1011
+ "truststore",
1012
+ ]
1013
+ + ALWAYS_ENABLED_FEATURES,
1014
+ help="Enable new functionality, that may be backward incompatible.",
1015
+ )
1016
+
1017
+ use_deprecated_feature: Callable[..., Option] = partial(
1018
+ Option,
1019
+ "--use-deprecated",
1020
+ dest="deprecated_features_enabled",
1021
+ metavar="feature",
1022
+ action="append",
1023
+ default=[],
1024
+ choices=[
1025
+ "legacy-resolver",
1026
+ ],
1027
+ help=("Enable deprecated functionality, that will be removed in the future."),
1028
+ )
1029
+
1030
+
1031
+ ##########
1032
+ # groups #
1033
+ ##########
1034
+
1035
+ general_group: Dict[str, Any] = {
1036
+ "name": "General Options",
1037
+ "options": [
1038
+ help_,
1039
+ debug_mode,
1040
+ isolated_mode,
1041
+ require_virtualenv,
1042
+ python,
1043
+ verbose,
1044
+ version,
1045
+ quiet,
1046
+ log,
1047
+ no_input,
1048
+ keyring_provider,
1049
+ proxy,
1050
+ retries,
1051
+ timeout,
1052
+ exists_action,
1053
+ trusted_host,
1054
+ cert,
1055
+ client_cert,
1056
+ cache_dir,
1057
+ no_cache,
1058
+ disable_pip_version_check,
1059
+ no_color,
1060
+ no_python_version_warning,
1061
+ use_new_feature,
1062
+ use_deprecated_feature,
1063
+ ],
1064
+ }
1065
+
1066
+ index_group: Dict[str, Any] = {
1067
+ "name": "Package Index Options",
1068
+ "options": [
1069
+ index_url,
1070
+ extra_index_url,
1071
+ no_index,
1072
+ find_links,
1073
+ ],
1074
+ }
myenv/Lib/site-packages/pip/_internal/cli/command_context.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from contextlib import ExitStack, contextmanager
2
+ from typing import ContextManager, Generator, TypeVar
3
+
4
+ _T = TypeVar("_T", covariant=True)
5
+
6
+
7
+ class CommandContextMixIn:
8
+ def __init__(self) -> None:
9
+ super().__init__()
10
+ self._in_main_context = False
11
+ self._main_context = ExitStack()
12
+
13
+ @contextmanager
14
+ def main_context(self) -> Generator[None, None, None]:
15
+ assert not self._in_main_context
16
+
17
+ self._in_main_context = True
18
+ try:
19
+ with self._main_context:
20
+ yield
21
+ finally:
22
+ self._in_main_context = False
23
+
24
+ def enter_context(self, context_provider: ContextManager[_T]) -> _T:
25
+ assert self._in_main_context
26
+
27
+ return self._main_context.enter_context(context_provider)
myenv/Lib/site-packages/pip/_internal/cli/main.py ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Primary application entrypoint.
2
+ """
3
+ import locale
4
+ import logging
5
+ import os
6
+ import sys
7
+ import warnings
8
+ from typing import List, Optional
9
+
10
+ from pip._internal.cli.autocompletion import autocomplete
11
+ from pip._internal.cli.main_parser import parse_command
12
+ from pip._internal.commands import create_command
13
+ from pip._internal.exceptions import PipError
14
+ from pip._internal.utils import deprecation
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ # Do not import and use main() directly! Using it directly is actively
20
+ # discouraged by pip's maintainers. The name, location and behavior of
21
+ # this function is subject to change, so calling it directly is not
22
+ # portable across different pip versions.
23
+
24
+ # In addition, running pip in-process is unsupported and unsafe. This is
25
+ # elaborated in detail at
26
+ # https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program.
27
+ # That document also provides suggestions that should work for nearly
28
+ # all users that are considering importing and using main() directly.
29
+
30
+ # However, we know that certain users will still want to invoke pip
31
+ # in-process. If you understand and accept the implications of using pip
32
+ # in an unsupported manner, the best approach is to use runpy to avoid
33
+ # depending on the exact location of this entry point.
34
+
35
+ # The following example shows how to use runpy to invoke pip in that
36
+ # case:
37
+ #
38
+ # sys.argv = ["pip", your, args, here]
39
+ # runpy.run_module("pip", run_name="__main__")
40
+ #
41
+ # Note that this will exit the process after running, unlike a direct
42
+ # call to main. As it is not safe to do any processing after calling
43
+ # main, this should not be an issue in practice.
44
+
45
+
46
+ def main(args: Optional[List[str]] = None) -> int:
47
+ if args is None:
48
+ args = sys.argv[1:]
49
+
50
+ # Suppress the pkg_resources deprecation warning
51
+ # Note - we use a module of .*pkg_resources to cover
52
+ # the normal case (pip._vendor.pkg_resources) and the
53
+ # devendored case (a bare pkg_resources)
54
+ warnings.filterwarnings(
55
+ action="ignore", category=DeprecationWarning, module=".*pkg_resources"
56
+ )
57
+
58
+ # Configure our deprecation warnings to be sent through loggers
59
+ deprecation.install_warning_logger()
60
+
61
+ autocomplete()
62
+
63
+ try:
64
+ cmd_name, cmd_args = parse_command(args)
65
+ except PipError as exc:
66
+ sys.stderr.write(f"ERROR: {exc}")
67
+ sys.stderr.write(os.linesep)
68
+ sys.exit(1)
69
+
70
+ # Needed for locale.getpreferredencoding(False) to work
71
+ # in pip._internal.utils.encoding.auto_decode
72
+ try:
73
+ locale.setlocale(locale.LC_ALL, "")
74
+ except locale.Error as e:
75
+ # setlocale can apparently crash if locale are uninitialized
76
+ logger.debug("Ignoring error %s when setting locale", e)
77
+ command = create_command(cmd_name, isolated=("--isolated" in cmd_args))
78
+
79
+ return command.main(cmd_args)
myenv/Lib/site-packages/pip/_internal/cli/main_parser.py ADDED
@@ -0,0 +1,134 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """A single place for constructing and exposing the main parser
2
+ """
3
+
4
+ import os
5
+ import subprocess
6
+ import sys
7
+ from typing import List, Optional, Tuple
8
+
9
+ from pip._internal.build_env import get_runnable_pip
10
+ from pip._internal.cli import cmdoptions
11
+ from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
12
+ from pip._internal.commands import commands_dict, get_similar_commands
13
+ from pip._internal.exceptions import CommandError
14
+ from pip._internal.utils.misc import get_pip_version, get_prog
15
+
16
+ __all__ = ["create_main_parser", "parse_command"]
17
+
18
+
19
+ def create_main_parser() -> ConfigOptionParser:
20
+ """Creates and returns the main parser for pip's CLI"""
21
+
22
+ parser = ConfigOptionParser(
23
+ usage="\n%prog <command> [options]",
24
+ add_help_option=False,
25
+ formatter=UpdatingDefaultsHelpFormatter(),
26
+ name="global",
27
+ prog=get_prog(),
28
+ )
29
+ parser.disable_interspersed_args()
30
+
31
+ parser.version = get_pip_version()
32
+
33
+ # add the general options
34
+ gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
35
+ parser.add_option_group(gen_opts)
36
+
37
+ # so the help formatter knows
38
+ parser.main = True # type: ignore
39
+
40
+ # create command listing for description
41
+ description = [""] + [
42
+ f"{name:27} {command_info.summary}"
43
+ for name, command_info in commands_dict.items()
44
+ ]
45
+ parser.description = "\n".join(description)
46
+
47
+ return parser
48
+
49
+
50
+ def identify_python_interpreter(python: str) -> Optional[str]:
51
+ # If the named file exists, use it.
52
+ # If it's a directory, assume it's a virtual environment and
53
+ # look for the environment's Python executable.
54
+ if os.path.exists(python):
55
+ if os.path.isdir(python):
56
+ # bin/python for Unix, Scripts/python.exe for Windows
57
+ # Try both in case of odd cases like cygwin.
58
+ for exe in ("bin/python", "Scripts/python.exe"):
59
+ py = os.path.join(python, exe)
60
+ if os.path.exists(py):
61
+ return py
62
+ else:
63
+ return python
64
+
65
+ # Could not find the interpreter specified
66
+ return None
67
+
68
+
69
+ def parse_command(args: List[str]) -> Tuple[str, List[str]]:
70
+ parser = create_main_parser()
71
+
72
+ # Note: parser calls disable_interspersed_args(), so the result of this
73
+ # call is to split the initial args into the general options before the
74
+ # subcommand and everything else.
75
+ # For example:
76
+ # args: ['--timeout=5', 'install', '--user', 'INITools']
77
+ # general_options: ['--timeout==5']
78
+ # args_else: ['install', '--user', 'INITools']
79
+ general_options, args_else = parser.parse_args(args)
80
+
81
+ # --python
82
+ if general_options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
83
+ # Re-invoke pip using the specified Python interpreter
84
+ interpreter = identify_python_interpreter(general_options.python)
85
+ if interpreter is None:
86
+ raise CommandError(
87
+ f"Could not locate Python interpreter {general_options.python}"
88
+ )
89
+
90
+ pip_cmd = [
91
+ interpreter,
92
+ get_runnable_pip(),
93
+ ]
94
+ pip_cmd.extend(args)
95
+
96
+ # Set a flag so the child doesn't re-invoke itself, causing
97
+ # an infinite loop.
98
+ os.environ["_PIP_RUNNING_IN_SUBPROCESS"] = "1"
99
+ returncode = 0
100
+ try:
101
+ proc = subprocess.run(pip_cmd)
102
+ returncode = proc.returncode
103
+ except (subprocess.SubprocessError, OSError) as exc:
104
+ raise CommandError(f"Failed to run pip under {interpreter}: {exc}")
105
+ sys.exit(returncode)
106
+
107
+ # --version
108
+ if general_options.version:
109
+ sys.stdout.write(parser.version)
110
+ sys.stdout.write(os.linesep)
111
+ sys.exit()
112
+
113
+ # pip || pip help -> print_help()
114
+ if not args_else or (args_else[0] == "help" and len(args_else) == 1):
115
+ parser.print_help()
116
+ sys.exit()
117
+
118
+ # the subcommand name
119
+ cmd_name = args_else[0]
120
+
121
+ if cmd_name not in commands_dict:
122
+ guess = get_similar_commands(cmd_name)
123
+
124
+ msg = [f'unknown command "{cmd_name}"']
125
+ if guess:
126
+ msg.append(f'maybe you meant "{guess}"')
127
+
128
+ raise CommandError(" - ".join(msg))
129
+
130
+ # all the args without the subcommand
131
+ cmd_args = args[:]
132
+ cmd_args.remove(cmd_name)
133
+
134
+ return cmd_name, cmd_args
myenv/Lib/site-packages/pip/_internal/cli/parser.py ADDED
@@ -0,0 +1,294 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Base option parser setup"""
2
+
3
+ import logging
4
+ import optparse
5
+ import shutil
6
+ import sys
7
+ import textwrap
8
+ from contextlib import suppress
9
+ from typing import Any, Dict, Generator, List, Tuple
10
+
11
+ from pip._internal.cli.status_codes import UNKNOWN_ERROR
12
+ from pip._internal.configuration import Configuration, ConfigurationError
13
+ from pip._internal.utils.misc import redact_auth_from_url, strtobool
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
19
+ """A prettier/less verbose help formatter for optparse."""
20
+
21
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
22
+ # help position must be aligned with __init__.parseopts.description
23
+ kwargs["max_help_position"] = 30
24
+ kwargs["indent_increment"] = 1
25
+ kwargs["width"] = shutil.get_terminal_size()[0] - 2
26
+ super().__init__(*args, **kwargs)
27
+
28
+ def format_option_strings(self, option: optparse.Option) -> str:
29
+ return self._format_option_strings(option)
30
+
31
+ def _format_option_strings(
32
+ self, option: optparse.Option, mvarfmt: str = " <{}>", optsep: str = ", "
33
+ ) -> str:
34
+ """
35
+ Return a comma-separated list of option strings and metavars.
36
+
37
+ :param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
38
+ :param mvarfmt: metavar format string
39
+ :param optsep: separator
40
+ """
41
+ opts = []
42
+
43
+ if option._short_opts:
44
+ opts.append(option._short_opts[0])
45
+ if option._long_opts:
46
+ opts.append(option._long_opts[0])
47
+ if len(opts) > 1:
48
+ opts.insert(1, optsep)
49
+
50
+ if option.takes_value():
51
+ assert option.dest is not None
52
+ metavar = option.metavar or option.dest.lower()
53
+ opts.append(mvarfmt.format(metavar.lower()))
54
+
55
+ return "".join(opts)
56
+
57
+ def format_heading(self, heading: str) -> str:
58
+ if heading == "Options":
59
+ return ""
60
+ return heading + ":\n"
61
+
62
+ def format_usage(self, usage: str) -> str:
63
+ """
64
+ Ensure there is only one newline between usage and the first heading
65
+ if there is no description.
66
+ """
67
+ msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), " "))
68
+ return msg
69
+
70
+ def format_description(self, description: str) -> str:
71
+ # leave full control over description to us
72
+ if description:
73
+ if hasattr(self.parser, "main"):
74
+ label = "Commands"
75
+ else:
76
+ label = "Description"
77
+ # some doc strings have initial newlines, some don't
78
+ description = description.lstrip("\n")
79
+ # some doc strings have final newlines and spaces, some don't
80
+ description = description.rstrip()
81
+ # dedent, then reindent
82
+ description = self.indent_lines(textwrap.dedent(description), " ")
83
+ description = f"{label}:\n{description}\n"
84
+ return description
85
+ else:
86
+ return ""
87
+
88
+ def format_epilog(self, epilog: str) -> str:
89
+ # leave full control over epilog to us
90
+ if epilog:
91
+ return epilog
92
+ else:
93
+ return ""
94
+
95
+ def indent_lines(self, text: str, indent: str) -> str:
96
+ new_lines = [indent + line for line in text.split("\n")]
97
+ return "\n".join(new_lines)
98
+
99
+
100
+ class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
101
+ """Custom help formatter for use in ConfigOptionParser.
102
+
103
+ This is updates the defaults before expanding them, allowing
104
+ them to show up correctly in the help listing.
105
+
106
+ Also redact auth from url type options
107
+ """
108
+
109
+ def expand_default(self, option: optparse.Option) -> str:
110
+ default_values = None
111
+ if self.parser is not None:
112
+ assert isinstance(self.parser, ConfigOptionParser)
113
+ self.parser._update_defaults(self.parser.defaults)
114
+ assert option.dest is not None
115
+ default_values = self.parser.defaults.get(option.dest)
116
+ help_text = super().expand_default(option)
117
+
118
+ if default_values and option.metavar == "URL":
119
+ if isinstance(default_values, str):
120
+ default_values = [default_values]
121
+
122
+ # If its not a list, we should abort and just return the help text
123
+ if not isinstance(default_values, list):
124
+ default_values = []
125
+
126
+ for val in default_values:
127
+ help_text = help_text.replace(val, redact_auth_from_url(val))
128
+
129
+ return help_text
130
+
131
+
132
+ class CustomOptionParser(optparse.OptionParser):
133
+ def insert_option_group(
134
+ self, idx: int, *args: Any, **kwargs: Any
135
+ ) -> optparse.OptionGroup:
136
+ """Insert an OptionGroup at a given position."""
137
+ group = self.add_option_group(*args, **kwargs)
138
+
139
+ self.option_groups.pop()
140
+ self.option_groups.insert(idx, group)
141
+
142
+ return group
143
+
144
+ @property
145
+ def option_list_all(self) -> List[optparse.Option]:
146
+ """Get a list of all options, including those in option groups."""
147
+ res = self.option_list[:]
148
+ for i in self.option_groups:
149
+ res.extend(i.option_list)
150
+
151
+ return res
152
+
153
+
154
+ class ConfigOptionParser(CustomOptionParser):
155
+ """Custom option parser which updates its defaults by checking the
156
+ configuration files and environmental variables"""
157
+
158
+ def __init__(
159
+ self,
160
+ *args: Any,
161
+ name: str,
162
+ isolated: bool = False,
163
+ **kwargs: Any,
164
+ ) -> None:
165
+ self.name = name
166
+ self.config = Configuration(isolated)
167
+
168
+ assert self.name
169
+ super().__init__(*args, **kwargs)
170
+
171
+ def check_default(self, option: optparse.Option, key: str, val: Any) -> Any:
172
+ try:
173
+ return option.check_value(key, val)
174
+ except optparse.OptionValueError as exc:
175
+ print(f"An error occurred during configuration: {exc}")
176
+ sys.exit(3)
177
+
178
+ def _get_ordered_configuration_items(
179
+ self,
180
+ ) -> Generator[Tuple[str, Any], None, None]:
181
+ # Configuration gives keys in an unordered manner. Order them.
182
+ override_order = ["global", self.name, ":env:"]
183
+
184
+ # Pool the options into different groups
185
+ section_items: Dict[str, List[Tuple[str, Any]]] = {
186
+ name: [] for name in override_order
187
+ }
188
+ for section_key, val in self.config.items():
189
+ # ignore empty values
190
+ if not val:
191
+ logger.debug(
192
+ "Ignoring configuration key '%s' as it's value is empty.",
193
+ section_key,
194
+ )
195
+ continue
196
+
197
+ section, key = section_key.split(".", 1)
198
+ if section in override_order:
199
+ section_items[section].append((key, val))
200
+
201
+ # Yield each group in their override order
202
+ for section in override_order:
203
+ for key, val in section_items[section]:
204
+ yield key, val
205
+
206
+ def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]:
207
+ """Updates the given defaults with values from the config files and
208
+ the environ. Does a little special handling for certain types of
209
+ options (lists)."""
210
+
211
+ # Accumulate complex default state.
212
+ self.values = optparse.Values(self.defaults)
213
+ late_eval = set()
214
+ # Then set the options with those values
215
+ for key, val in self._get_ordered_configuration_items():
216
+ # '--' because configuration supports only long names
217
+ option = self.get_option("--" + key)
218
+
219
+ # Ignore options not present in this parser. E.g. non-globals put
220
+ # in [global] by users that want them to apply to all applicable
221
+ # commands.
222
+ if option is None:
223
+ continue
224
+
225
+ assert option.dest is not None
226
+
227
+ if option.action in ("store_true", "store_false"):
228
+ try:
229
+ val = strtobool(val)
230
+ except ValueError:
231
+ self.error(
232
+ "{} is not a valid value for {} option, " # noqa
233
+ "please specify a boolean value like yes/no, "
234
+ "true/false or 1/0 instead.".format(val, key)
235
+ )
236
+ elif option.action == "count":
237
+ with suppress(ValueError):
238
+ val = strtobool(val)
239
+ with suppress(ValueError):
240
+ val = int(val)
241
+ if not isinstance(val, int) or val < 0:
242
+ self.error(
243
+ "{} is not a valid value for {} option, " # noqa
244
+ "please instead specify either a non-negative integer "
245
+ "or a boolean value like yes/no or false/true "
246
+ "which is equivalent to 1/0.".format(val, key)
247
+ )
248
+ elif option.action == "append":
249
+ val = val.split()
250
+ val = [self.check_default(option, key, v) for v in val]
251
+ elif option.action == "callback":
252
+ assert option.callback is not None
253
+ late_eval.add(option.dest)
254
+ opt_str = option.get_opt_string()
255
+ val = option.convert_value(opt_str, val)
256
+ # From take_action
257
+ args = option.callback_args or ()
258
+ kwargs = option.callback_kwargs or {}
259
+ option.callback(option, opt_str, val, self, *args, **kwargs)
260
+ else:
261
+ val = self.check_default(option, key, val)
262
+
263
+ defaults[option.dest] = val
264
+
265
+ for key in late_eval:
266
+ defaults[key] = getattr(self.values, key)
267
+ self.values = None
268
+ return defaults
269
+
270
+ def get_default_values(self) -> optparse.Values:
271
+ """Overriding to make updating the defaults after instantiation of
272
+ the option parser possible, _update_defaults() does the dirty work."""
273
+ if not self.process_default_values:
274
+ # Old, pre-Optik 1.5 behaviour.
275
+ return optparse.Values(self.defaults)
276
+
277
+ # Load the configuration, or error out in case of an error
278
+ try:
279
+ self.config.load()
280
+ except ConfigurationError as err:
281
+ self.exit(UNKNOWN_ERROR, str(err))
282
+
283
+ defaults = self._update_defaults(self.defaults.copy()) # ours
284
+ for option in self._get_all_options():
285
+ assert option.dest is not None
286
+ default = defaults.get(option.dest)
287
+ if isinstance(default, str):
288
+ opt_str = option.get_opt_string()
289
+ defaults[option.dest] = option.check_value(opt_str, default)
290
+ return optparse.Values(defaults)
291
+
292
+ def error(self, msg: str) -> None:
293
+ self.print_usage(sys.stderr)
294
+ self.exit(UNKNOWN_ERROR, f"{msg}\n")