Compare commits
310 Commits
add-licens
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8b0cb0cbc1 | ||
|
|
f8f9882fe0 | ||
|
|
e47236b137 | ||
|
|
501e7bcb42 | ||
|
|
d5f5a09ec9 | ||
|
|
408a169c41 | ||
|
|
6e3cfe41c7 | ||
|
|
b4aadbbac5 | ||
|
|
f710441a6d | ||
|
|
08abfe0ebb | ||
|
|
5867f14aa4 | ||
|
|
21c82d873a | ||
|
|
e02094de5b | ||
|
|
70395755f8 | ||
|
|
ed196d8aba | ||
|
|
22b9b1fc69 | ||
|
|
dfc9e01889 | ||
|
|
c6619ffc3e | ||
|
|
d1ac26833a | ||
|
|
6575871c32 | ||
|
|
b8802544b9 | ||
|
|
e3958951e5 | ||
|
|
566ae09bb7 | ||
|
|
ef9127d876 | ||
|
|
f7dab64069 | ||
|
|
ea9ff431d2 | ||
|
|
2af20fd6bf | ||
|
|
3b13c3df92 | ||
|
|
8a76728343 | ||
|
|
76e15efe75 | ||
|
|
5bbd0f5a6b | ||
|
|
0953981b53 | ||
|
|
70acea30a5 | ||
|
|
dab9b75ef3 | ||
|
|
a86ddffb9f | ||
|
|
dece50601f | ||
|
|
55990be732 | ||
|
|
9489dda310 | ||
|
|
d6819fd631 | ||
|
|
27add5f4b2 | ||
|
|
656f2ff262 | ||
|
|
e34f4bab61 | ||
|
|
60d4ca8e24 | ||
|
|
de2bf42f92 | ||
|
|
ea88a3a650 | ||
|
|
b82e787d4f | ||
|
|
afae71255f | ||
|
|
8847f0a4fd | ||
|
|
3af895a0b6 | ||
|
|
604c40928a | ||
|
|
2e6b523a70 | ||
|
|
786dc89ee8 | ||
|
|
e4aac28186 | ||
|
|
d555d0c247 | ||
|
|
1981d379d5 | ||
|
|
c7f9f44f99 | ||
|
|
afcaf7b70c | ||
|
|
54b4c789a7 | ||
|
|
94b30306f0 | ||
|
|
55217bb628 | ||
|
|
c99031acbe | ||
|
|
7bfb7fcb3c | ||
|
|
22bd61e56c | ||
|
|
698b8fddde | ||
|
|
49e8f297dd | ||
|
|
224a9a5871 | ||
|
|
c65875f3b0 | ||
|
|
b2d5309ce6 | ||
|
|
5c2896ed5e | ||
|
|
dc912e0fc9 | ||
|
|
d304de513d | ||
|
|
7542aac4c1 | ||
|
|
eb434543e9 | ||
|
|
9fe270819a | ||
|
|
aaad13f6d5 | ||
|
|
c63d291f22 | ||
|
|
06d20cbb42 | ||
|
|
b3d964b59e | ||
|
|
99c34046a3 | ||
|
|
9b67c45d15 | ||
|
|
281fbdd845 | ||
|
|
c00b21e0a3 | ||
|
|
19a09d76a2 | ||
|
|
88b4ebba3d | ||
|
|
fdb6fbdcc4 | ||
|
|
1941bffc1b | ||
|
|
3630149afa | ||
|
|
144143c84e | ||
|
|
3c5baa488a | ||
|
|
d8961b97d2 | ||
|
|
5b0fa35185 | ||
|
|
396d7e11b2 | ||
|
|
b136f556e7 | ||
|
|
7ef85f4671 | ||
|
|
61d6d03453 | ||
|
|
be04fcd7f3 | ||
|
|
5b38bcf8e6 | ||
|
|
f95ace3870 | ||
|
|
8fe493a38a | ||
|
|
99550a44f2 | ||
|
|
3307675844 | ||
|
|
62014fa85f | ||
|
|
d717511236 | ||
|
|
19769b8982 | ||
|
|
c5cdbaf6e1 | ||
|
|
5e8870ec1b | ||
|
|
ec33603bb7 | ||
|
|
6719bee133 | ||
|
|
51697d3216 | ||
|
|
210f8958ce | ||
|
|
81480d0c7e | ||
|
|
efcab48689 | ||
|
|
2c5b29260b | ||
|
|
15d0150cee | ||
|
|
104e58a342 | ||
|
|
6f6a739872 | ||
|
|
f34f561617 | ||
|
|
355dee533b | ||
|
|
9fc341a6c8 | ||
|
|
78e338b5de | ||
|
|
194acff111 | ||
|
|
952f7dbee9 | ||
|
|
7c5398e6e1 | ||
|
|
ac92b8a2ad | ||
|
|
789d60ef1c | ||
|
|
e2de6a01fe | ||
|
|
bca970ebc7 | ||
|
|
479b8d6d10 | ||
|
|
dc1d9834f9 | ||
|
|
b206a1eb63 | ||
|
|
2b4079f8a9 | ||
|
|
920fe19608 | ||
|
|
25428d58d5 | ||
|
|
c9d0e6dda7 | ||
|
|
c9c2c630de | ||
|
|
c855c6f06e | ||
|
|
ac96278d74 | ||
|
|
011d0babc2 | ||
|
|
10104774c1 | ||
|
|
1704cbebc8 | ||
|
|
bbb4d69a93 | ||
|
|
f1594cfdde | ||
|
|
86e0fbe556 | ||
|
|
814c960998 | ||
|
|
f1a2060bfb | ||
|
|
99ccfc24e3 | ||
|
|
a5b4d8b10c | ||
|
|
d8ec09cebb | ||
|
|
d2d81b7648 | ||
|
|
b2037302e9 | ||
|
|
e4aefb08db | ||
|
|
28a820a8bc | ||
|
|
4260c23a23 | ||
|
|
1829e536d5 | ||
|
|
de2a29bceb | ||
|
|
f42ed825a0 | ||
|
|
355b2979d8 | ||
|
|
846ba8c135 | ||
|
|
d9e6cbc1df | ||
|
|
992caddc30 | ||
|
|
9f1a159c57 | ||
|
|
3e48284e40 | ||
|
|
98d2b4109e | ||
|
|
d5d5d8b1d0 | ||
|
|
16e85fff4e | ||
|
|
aacd2cf5f4 | ||
|
|
bf5d773e52 | ||
|
|
d72c33b680 | ||
|
|
4b9875b99f | ||
|
|
68359d9a9a | ||
|
|
7ad5b95842 | ||
|
|
90e7797a4c | ||
|
|
a6e2d65a15 | ||
|
|
75e97d01b0 | ||
|
|
a9581e098f | ||
|
|
e53697f082 | ||
|
|
f43107aa76 | ||
|
|
f028d1d6a4 | ||
|
|
9f09650780 | ||
|
|
42ed99472d | ||
|
|
3f067ac81a | ||
|
|
097faaca1e | ||
|
|
96eab8ed1d | ||
|
|
db9f3a5fa7 | ||
|
|
3bacb31832 | ||
|
|
73618a9161 | ||
|
|
d0e9b9de97 | ||
|
|
d60d6fa355 | ||
|
|
0def87377b | ||
|
|
afcfb8e959 | ||
|
|
e91e1fbaff | ||
|
|
71f507ee94 | ||
|
|
0265144b38 | ||
|
|
592bbaab86 | ||
|
|
89cbaf7b61 | ||
|
|
670b2b664e | ||
|
|
b0bfbf8fdc | ||
|
|
06b89317ae | ||
|
|
54d343aa71 | ||
|
|
81e1c5d5a8 | ||
|
|
0380c8508b | ||
|
|
b8bfe308e6 | ||
|
|
2213246de9 | ||
|
|
56813e0ebc | ||
|
|
20abe04049 | ||
|
|
f57b949d30 | ||
|
|
12da6922dc | ||
|
|
bede76377b | ||
|
|
7700f92840 | ||
|
|
7e858de0d4 | ||
|
|
af6dcde39b | ||
|
|
197391d3e8 | ||
|
|
e6e3377437 | ||
|
|
c50cca3870 | ||
|
|
8c2e8a233f | ||
|
|
e341c75e5e | ||
|
|
c7f00a9dac | ||
|
|
d308f753be | ||
|
|
733395cc0a | ||
|
|
cfce83bb84 | ||
|
|
f236f0764e | ||
|
|
a2ed9c690d | ||
|
|
746dd870b2 | ||
|
|
7e27bfff71 | ||
|
|
a0c518896e | ||
|
|
c02eb6c71d | ||
|
|
8350bc6842 | ||
|
|
600525e550 | ||
|
|
1520d8c484 | ||
|
|
0f5be8831a | ||
|
|
f48a1b7aa9 | ||
|
|
c85ee31963 | ||
|
|
304d149ba6 | ||
|
|
640a542d1f | ||
|
|
c08375d0bb | ||
|
|
8332af1181 | ||
|
|
a34566b55b | ||
|
|
310ae52d3f | ||
|
|
77006dd9e0 | ||
|
|
98afd5b985 | ||
|
|
4f94ba1604 | ||
|
|
878a2100bc | ||
|
|
40746635be | ||
|
|
78662c779a | ||
|
|
e430481aa7 | ||
|
|
da890d8599 | ||
|
|
2c35840190 | ||
|
|
2c720f83a8 | ||
|
|
94282777ea | ||
|
|
2200e777c1 | ||
|
|
95fa113a60 | ||
|
|
d97cbceb0d | ||
|
|
5f93e91c35 | ||
|
|
ceaa47b9b2 | ||
|
|
a37920b27b | ||
|
|
027cc87505 | ||
|
|
8bc0d254aa | ||
|
|
a9d4b38a7e | ||
|
|
20af89c0c2 | ||
|
|
99d36a7753 | ||
|
|
1be1e440fd | ||
|
|
8076fa5e02 | ||
|
|
dfeeac4943 | ||
|
|
66811991ad | ||
|
|
a03827c753 | ||
|
|
fce2dfe387 | ||
|
|
0aa328dd9f | ||
|
|
b42264437a | ||
|
|
d816f87410 | ||
|
|
aebb88da37 | ||
|
|
0d252005c8 | ||
|
|
8f76adbf60 | ||
|
|
a67ffd8d8b | ||
|
|
d3eebd2527 | ||
|
|
85a1961b64 | ||
|
|
121976b3b7 | ||
|
|
261b39eebe | ||
|
|
789b209d03 | ||
|
|
0ed755680e | ||
|
|
dbfeab7242 | ||
|
|
68fe711061 | ||
|
|
de1a201c61 | ||
|
|
c70a257baa | ||
|
|
2b30701b2e | ||
|
|
f8780b03b2 | ||
|
|
aefb74822a | ||
|
|
0ae0268d54 | ||
|
|
c7a7c7bced | ||
|
|
e6fc0dc96b | ||
|
|
3c579964ad | ||
|
|
b9cafe8c23 | ||
|
|
a485cfb180 | ||
|
|
fce0656ab1 | ||
|
|
fa113e6fa7 | ||
|
|
b2459a5897 | ||
|
|
b31d053191 | ||
|
|
aa387bd4af | ||
|
|
b88dbfe4d3 | ||
|
|
17cc9ea06c | ||
|
|
bfd2259684 | ||
|
|
e6289bf53a | ||
|
|
9f5552b1e3 | ||
|
|
05f3695e27 | ||
|
|
3388d70696 | ||
|
|
2bcafcd27c | ||
|
|
001768b77d | ||
|
|
ab538ac3e0 | ||
|
|
cad2b5a9da | ||
|
|
63a1ee45b5 | ||
|
|
ba48140de9 |
13
.github/FUNDING.yml
vendored
Normal file
13
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: [onlp]
|
||||
patreon: xtekky
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
ko_fi: xtekky
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: tekky
|
||||
issuehunt: xtekky
|
||||
otechie: # Replace with a single Otechie username
|
||||
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
|
||||
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
||||
32
.gitignore
vendored
Normal file
32
.gitignore
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
# Default ignored files
|
||||
/shelf/
|
||||
/workspace.xml
|
||||
# Editor-based HTTP Client requests
|
||||
/httpRequests/
|
||||
# Datasource local storage ignored files
|
||||
/dataSources/
|
||||
/dataSources.local.xml
|
||||
|
||||
# Ignore local python virtual environment
|
||||
venv/
|
||||
|
||||
# Ignore streamlit_chat_app.py conversations pickle
|
||||
conversations.pkl
|
||||
*.pkl
|
||||
|
||||
# Ignore accounts created by api's
|
||||
accounts.txt
|
||||
|
||||
.idea/
|
||||
|
||||
**/__pycache__/
|
||||
|
||||
__pycache__/
|
||||
|
||||
*.log
|
||||
|
||||
cookie.json
|
||||
|
||||
*.pyc
|
||||
|
||||
dist/
|
||||
18
Dockerfile
Normal file
18
Dockerfile
Normal file
@@ -0,0 +1,18 @@
|
||||
FROM python:3.10
|
||||
|
||||
RUN apt-get update && apt-get install -y git
|
||||
|
||||
RUN mkdir -p /usr/src/gpt4free
|
||||
WORKDIR /usr/src/gpt4free
|
||||
|
||||
# RUN pip config set global.index-url https://mirrors.aliyun.com/pypi/simple/
|
||||
# RUN pip config set global.trusted-host mirrors.aliyun.com
|
||||
|
||||
COPY requirements.txt /usr/src/gpt4free/
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
COPY . /usr/src/gpt4free
|
||||
RUN cp gui/streamlit_app.py .
|
||||
|
||||
EXPOSE 8501
|
||||
|
||||
CMD ["streamlit", "run", "streamlit_app.py"]
|
||||
423
README.md
423
README.md
@@ -1,316 +1,193 @@
|
||||
# Free LLM APIs
|
||||
<img alt="gpt4free logo" src="https://user-images.githubusercontent.com/98614666/233799515-1a7cb6a3-b17f-42c4-956d-8d2a0664466f.png">
|
||||
<img src="https://media.giphy.com/media/LnQjpWaON8nhr21vNW/giphy.gif" width="100" align="left">
|
||||
Just API's from some language model sites.
|
||||
<p>Join our <a href="https://discord.com/invite/gpt4free">discord.gg/gpt4free<a> Discord community! <a href="https://discord.gg/gpt4free"><img align="center" alt="gpt4free Discord" width="22px" src="https://raw.githubusercontent.com/peterthehan/peterthehan/master/assets/discord.svg" /></a></p>
|
||||
|
||||
This repository provides reverse-engineered language models from various sources. Some of these models are already available in the repo, while others are currently being worked on.
|
||||
|
||||
> **Important:** If you come across any website offering free language models, please create an issue or submit a pull request with the details. We will reverse engineer it and add it to this repository.
|
||||
# Related gpt4free projects
|
||||
|
||||
## Best Chatgpt site
|
||||
> https://chat.chatbot.sex/chat
|
||||
> This site was developed by me and includes **gpt-4**, **internet access** and **gpt-jailbreak's** like DAN
|
||||
|
||||
## To-Do List
|
||||
|
||||
- [x] implement poe.com create bot feature | AVAILABLE NOW
|
||||
- [x] renaming the 'poe' module to 'quora'
|
||||
- [x] add you.com api
|
||||
<table>
|
||||
<thead align="center">
|
||||
<tr border: none;>
|
||||
<td><b>🎁 Projects</b></td>
|
||||
<td><b>⭐ Stars</b></td>
|
||||
<td><b>📚 Forks</b></td>
|
||||
<td><b>🛎 Issues</b></td>
|
||||
<td><b>📬 Pull requests</b></td>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td><a href="https://github.com/xtekky/gpt4free"><b>gpt4free</b></a></td>
|
||||
<td><a href="https://github.com/xtekky/gpt4free/stargazers"><img alt="Stars" src="https://img.shields.io/github/stars/xtekky/gpt4free?style=flat-square&labelColor=343b41"/></a></td>
|
||||
<td><a href="https://github.com/xtekky/gpt4free/network/members"><img alt="Forks" src="https://img.shields.io/github/forks/xtekky/gpt4free?style=flat-square&labelColor=343b41"/></a></td>
|
||||
<td><a href="https://github.com/xtekky/gpt4free/issues"><img alt="Issues" src="https://img.shields.io/github/issues/xtekky/gpt4free?style=flat-square&labelColor=343b41"/></a></td>
|
||||
<td><a href="https://github.com/xtekky/gpt4free/pulls"><img alt="Pull Requests" src="https://img.shields.io/github/issues-pr/xtekky/gpt4free?style=flat-square&labelColor=343b41"/></a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a href="https://github.com/xtekky/chatgpt-clone"><b>ChatGPT-Clone</b></a></td>
|
||||
<td><a href="https://github.com/xtekky/chatgpt-clone/stargazers"><img alt="Stars" src="https://img.shields.io/github/stars/xtekky/chatgpt-clone?style=flat-square&labelColor=343b41"/></a></td>
|
||||
<td><a href="https://github.com/xtekky/chatgpt-clone/network/members"><img alt="Forks" src="https://img.shields.io/github/forks/xtekky/chatgpt-clone?style=flat-square&labelColor=343b41"/></a></td>
|
||||
<td><a href="https://github.com/xtekky/chatgpt-clone/issues"><img alt="Issues" src="https://img.shields.io/github/issues/xtekky/chatgpt-clone?style=flat-square&labelColor=343b41"/></a></td>
|
||||
<td><a href="https://github.com/xtekky/chatgpt-clone/pulls"><img alt="Pull Requests" src="https://img.shields.io/github/issues-pr/xtekky/chatgpt-clone?style=flat-square&labelColor=343b41"/></a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a href="https://github.com/mishalhossin/Coding-Chatbot-Gpt4Free"><b>ChatGpt Discord Bot</b></a></td>
|
||||
<td><a href="https://github.com/mishalhossin/Coding-Chatbot-Gpt4Free/stargazers"><img alt="Stars" src="https://img.shields.io/github/stars/mishalhossin/Coding-Chatbot-Gpt4Free?style=flat-square&labelColor=343b41"/></a></td>
|
||||
<td><a href="https://github.com/mishalhossin/Coding-Chatbot-Gpt4Free/network/members"><img alt="Forks" src="https://img.shields.io/github/forks/mishalhossin/Coding-Chatbot-Gpt4Free?style=flat-square&labelColor=343b41"/></a></td>
|
||||
<td><a href="https://github.com/mishalhossin/Coding-Chatbot-Gpt4Free/issues"><img alt="Issues" src="https://img.shields.io/github/issues/mishalhossin/Coding-Chatbot-Gpt4Free?style=flat-square&labelColor=343b41"/></a></td>
|
||||
<td><a href="https://github.com/mishalhossin/Coding-Chatbot-Gpt4Free/pulls"><img alt="Pull Requests" src="https://img.shields.io/github/issues-pr/mishalhossin/Coding-Chatbot-Gpt4Free?style=flat-square&labelColor=343b41"/></a></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
|
||||
## Table of Contents
|
||||
| Section | Description | Link | Status |
|
||||
| ------- | ----------- | ---- | ------ |
|
||||
| **To do list** | List of tasks to be done | [](#todo) | - |
|
||||
| **Current Sites** | Current websites or platforms that can be used as APIs | [](#current-sites) | - |
|
||||
| **Best Sites for gpt4** | Recommended websites or platforms for gpt4 | [](#best-sites) | - |
|
||||
| **Streamlit GPT4Free GUI** | Web-based graphical user interface for interacting with gpt4free | [](#streamlit-gpt4free-gui) | - |
|
||||
| **Docker** | Instructions on how to run gpt4free in a Docker container | [](#docker-instructions) | - |
|
||||
| **ChatGPT clone** | A ChatGPT clone with new features and scalability | [](https://chat.chatbot.sex/chat) | - |
|
||||
| **How to install** | Instructions on how to install gpt4free | [](#install) | - |
|
||||
| **Usage Examples** | | | |
|
||||
| `theb` | Example usage for theb (gpt-3.5) | [](gpt4free/theb/README.md) |  |
|
||||
| `forefront` | Example usage for forefront (gpt-4) | [](gpt4free/forefront/README.md) |  | ||
|
||||
| `quora (poe)` | Example usage for quora | [](gpt4free/quora/README.md) |  |
|
||||
| `you` | Example usage for you | [](gpt4free/you/README.md) |  |
|
||||
| **Try it Out** | | | |
|
||||
| Google Colab Jupyter Notebook | Example usage for gpt4free | [](https://colab.research.google.com/github/DanielShemesh/gpt4free-colab/blob/main/gpt4free.ipynb) | - |
|
||||
| replit Example (feel free to fork this repl) | Example usage for gpt4free | [](https://replit.com/@gpt4free/gpt4free-webui) | - |
|
||||
| **Legal Notice** | Legal notice or disclaimer | [](#legal-notice) | - |
|
||||
| **Copyright** | Copyright information | [](#copyright) | - |
|
||||
| **Star History** | Star History | [](#star-history) | - |
|
||||
|
||||
- [Current Sites (No Authentication / Easy Account Creation)](#current-sites)
|
||||
- [Sites with Authentication (Will Reverse Engineer but Need Account Access)](#sites-with-authentication)
|
||||
- [Usage Examples](#usage-examples)
|
||||
- [`quora (poe)`](#example-poe)
|
||||
- [`phind`](#example-phind)
|
||||
- [`t3nsor`](#example-t3nsor)
|
||||
- [`ora`](#example-ora)
|
||||
- [`writesonic`](#example-writesonic)
|
||||
- [`you`](#example-you)
|
||||
|
||||
## To do list <a name="todo"></a>
|
||||
|
||||
- [x] Add a GUI for the repo
|
||||
- [ ] Make a general package named `gpt4free`, instead of different folders
|
||||
- [ ] Live api status to know which are down and which can be used
|
||||
- [ ] Integrate more API's in `./unfinished` as well as other ones in the lists
|
||||
- [ ] Make an API to use as proxy for other projects
|
||||
- [ ] Make a pypi package
|
||||
|
||||
## Current Sites <a name="current-sites"></a>
|
||||
|
||||
| Website | Model(s) |
|
||||
| -------------------------- | -------------------- |
|
||||
| [ora.sh](https://ora.sh) | GPT-3.5 / 4 |
|
||||
| [poe.com](https://poe.com) | GPT-4/3.5 |
|
||||
| [writesonic.com](https://writesonic.com)|GPT-3.5 / Internet|
|
||||
| [t3nsor.com](https://t3nsor.com)|GPT-3.5|
|
||||
| [you.com](https://you.com)|GPT-3.5 / Internet / good search|
|
||||
| [phind.com](https://phind.com)|GPT-4 / Internet / good search|
|
||||
| Website s | Model(s) |
|
||||
| ------------------------------------------------ | -------------------------------- |
|
||||
| [forefront.ai](https://chat.forefront.ai) | GPT-4/3.5 |
|
||||
| [poe.com](https://poe.com) | GPT-4/3.5 |
|
||||
| [writesonic.com](https://writesonic.com) | GPT-3.5 / Internet |
|
||||
| [t3nsor.com](https://t3nsor.com) | GPT-3.5 |
|
||||
| [you.com](https://you.com) | GPT-3.5 / Internet / good search |
|
||||
| [sqlchat.ai](https://sqlchat.ai) | GPT-3.5 |
|
||||
| [bard.google.com](https://bard.google.com) | custom / search |
|
||||
| [bing.com/chat](https://bing.com/chat) | GPT-4/3.5 |
|
||||
| [chat.forefront.ai/](https://chat.forefront.ai/) | GPT-4/3.5 |
|
||||
|
||||
## Sites with Authentication <a name="sites-with-authentication"></a>
|
||||
## Best sites <a name="best-sites"></a>
|
||||
|
||||
These sites will be reverse engineered but need account access:
|
||||
#### gpt-4
|
||||
|
||||
* [chat.openai.com/chat](https://chat.openai.com/chat)
|
||||
* [bard.google.com](https://bard.google.com)
|
||||
* [bing.com/chat](https://bing.com/chat)
|
||||
- [`/forefront`](gpt4free/forefront/README.md)
|
||||
|
||||
## Usage Examples <a name="usage-examples"></a>
|
||||
#### gpt-3.5
|
||||
|
||||
### Example: `quora (poe)` (use like openai pypi package) - GPT-4 <a name="example-poe"></a>
|
||||
- [`/you`](gpt4free/you/README.md)
|
||||
|
||||
```python
|
||||
# quora model names: (use left key as argument)
|
||||
models = {
|
||||
'sage' : 'capybara',
|
||||
'gpt-4' : 'beaver',
|
||||
'claude-v1.2' : 'a2_2',
|
||||
'claude-instant-v1.0' : 'a2',
|
||||
'gpt-3.5-turbo' : 'chinchilla'
|
||||
}
|
||||
## Install <a name="install"></a>
|
||||
|
||||
Download or clone this GitHub repo
|
||||
install requirements with:
|
||||
|
||||
```sh
|
||||
pip3 install -r requirements.txt
|
||||
```
|
||||
|
||||
#### !! new: bot creation
|
||||
|
||||
```python
|
||||
# import quora (poe) package
|
||||
import quora
|
||||
## To start gpt4free GUI <a name="streamlit-gpt4free-gui"></a>
|
||||
|
||||
# create account
|
||||
# make shure to set enable_bot_creation to True
|
||||
token = quora.Account.create(logging = True, enable_bot_creation=True)
|
||||
Move `streamlit_app.py` from `./gui` to the base folder
|
||||
then run:
|
||||
`streamlit run streamlit_app.py` or `python3 -m streamlit run streamlit_app.py`
|
||||
|
||||
model = quora.Model.create(
|
||||
token = token,
|
||||
model = 'gpt-3.5-turbo', # or claude-instant-v1.0
|
||||
system_prompt = 'you are ChatGPT a large language model ...'
|
||||
)
|
||||
## Docker <a name="docker-instructions"></a>
|
||||
|
||||
print(model.name) # gptx....
|
||||
|
||||
# streaming response
|
||||
for response in quora.StreamingCompletion.create(
|
||||
custom_model = model.name,
|
||||
prompt ='hello world',
|
||||
token = token):
|
||||
|
||||
print(response.completion.choices[0].text)
|
||||
```
|
||||
|
||||
#### Normal Response:
|
||||
```python
|
||||
|
||||
response = quora.Completion.create(model = 'gpt-4',
|
||||
prompt = 'hello world',
|
||||
token = token)
|
||||
|
||||
print(response.completion.choices[0].text)
|
||||
```
|
||||
|
||||
### Example: `phind` (use like openai pypi package) <a name="example-phind"></a>
|
||||
|
||||
```python
|
||||
# HELP WANTED: tls_client does not accept stream and timeout gets hit with long responses
|
||||
|
||||
import phind
|
||||
|
||||
prompt = 'hello world'
|
||||
|
||||
result = phind.Completion.create(
|
||||
model = 'gpt-4',
|
||||
prompt = prompt,
|
||||
results = phind.Search.create(prompt, actualSearch = False), # create search (set actualSearch to False to disable internet)
|
||||
creative = False,
|
||||
detailed = False,
|
||||
codeContext = '') # up to 3000 chars of code
|
||||
|
||||
print(result.completion.choices[0].text)
|
||||
```
|
||||
|
||||
### Example: `t3nsor` (use like openai pypi package) <a name="example-t3nsor"></a>
|
||||
|
||||
```python
|
||||
# Import t3nsor
|
||||
import t3nsor
|
||||
|
||||
# t3nsor.Completion.create
|
||||
# t3nsor.StreamCompletion.create
|
||||
|
||||
[...]
|
||||
Build
|
||||
|
||||
```
|
||||
|
||||
#### Example Chatbot
|
||||
```python
|
||||
messages = []
|
||||
|
||||
while True:
|
||||
user = input('you: ')
|
||||
|
||||
t3nsor_cmpl = t3nsor.Completion.create(
|
||||
prompt = user,
|
||||
messages = messages
|
||||
)
|
||||
|
||||
print('gpt:', t3nsor_cmpl.completion.choices[0].text)
|
||||
|
||||
messages.extend([
|
||||
{'role': 'user', 'content': user },
|
||||
{'role': 'assistant', 'content': t3nsor_cmpl.completion.choices[0].text}
|
||||
])
|
||||
docker build -t gpt4free:latest -f Docker/Dockerfile .
|
||||
```
|
||||
|
||||
#### Streaming Response:
|
||||
Run
|
||||
|
||||
```python
|
||||
for response in t3nsor.StreamCompletion.create(
|
||||
prompt = 'write python code to reverse a string',
|
||||
messages = []):
|
||||
|
||||
print(response.completion.choices[0].text)
|
||||
```
|
||||
docker run -p 8501:8501 gpt4free:latest
|
||||
```
|
||||
Another way - docker-compose (no docker build/run needed)
|
||||
```
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
### Example: `ora` (use like openai pypi package) <a name="example-ora"></a>
|
||||
## Deploy using docker-compose
|
||||
|
||||
### load model (new)
|
||||
Run the following:
|
||||
|
||||
more gpt4 models in `/testing/ora_gpt4.py`
|
||||
|
||||
```python
|
||||
# normal gpt-4: b8b12eaa-5d47-44d3-92a6-4d706f2bcacf
|
||||
model = ora.CompletionModel.load(chatbot_id, 'gpt-4') # or gpt-3.5
|
||||
```
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
#### create model / chatbot:
|
||||
```python
|
||||
# inport ora
|
||||
import ora
|
||||
## ChatGPT clone
|
||||
|
||||
# create model
|
||||
model = ora.CompletionModel.create(
|
||||
system_prompt = 'You are ChatGPT, a large language model trained by OpenAI. Answer as concisely as possible',
|
||||
description = 'ChatGPT Openai Language Model',
|
||||
name = 'gpt-3.5')
|
||||
> currently implementing new features and trying to scale it, please be patient it may be unstable
|
||||
> https://chat.chatbot.sex/chat
|
||||
> This site was developed by me and includes **gpt-4/3.5**, **internet access** and **gpt-jailbreak's** like DAN
|
||||
> run locally here: https://github.com/xtekky/chatgpt-clone
|
||||
|
||||
# init conversation (will give you a conversationId)
|
||||
init = ora.Completion.create(
|
||||
model = model,
|
||||
prompt = 'hello world')
|
||||
## Legal Notice <a name="legal-notice"></a>
|
||||
|
||||
print(init.completion.choices[0].text)
|
||||
This repository uses third-party APIs and is _not_ associated with or endorsed by the API providers. This project is intended **for educational purposes only**. This is just a little personal project. Sites may contact me to improve their security.
|
||||
|
||||
while True:
|
||||
# pass in conversationId to continue conversation
|
||||
Please note the following:
|
||||
|
||||
prompt = input('>>> ')
|
||||
response = ora.Completion.create(
|
||||
model = model,
|
||||
prompt = prompt,
|
||||
includeHistory = True, # remember history
|
||||
conversationId = init.id)
|
||||
1. **Disclaimer**: The APIs, services, and trademarks mentioned in this repository belong to their respective owners. This project is _not_ claiming any right over them.
|
||||
|
||||
print(response.completion.choices[0].text)
|
||||
2. **Responsibility**: The author of this repository is _not_ responsible for any consequences arising from the use or misuse of this repository or the content provided by the third-party APIs and any damage or losses caused by users' actions.
|
||||
|
||||
3. **Educational Purposes Only**: This repository and its content are provided strictly for educational purposes. By using the information and code provided, users acknowledge that they are using the APIs and models at their own risk and agree to comply with any applicable laws and regulations.
|
||||
|
||||
## Copyright:
|
||||
|
||||
This program is licensed under the [GNU GPL v3](https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
Most code, with the exception of `quora/api.py` (by [ading2210](https://github.com/ading2210)), has been written by me, [xtekky](https://github.com/xtekky).
|
||||
|
||||
### Copyright Notice: <a name="copyright"></a>
|
||||
|
||||
```
|
||||
xtekky/gpt4free: multiple reverse engineered language-model api's to decentralise the ai industry.
|
||||
Copyright (C) 2023 xtekky
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
```
|
||||
|
||||
### Example: `writesonic` (use like openai pypi package) <a name="example-writesonic"></a>
|
||||
|
||||
```python
|
||||
# import writesonic
|
||||
import writesonic
|
||||
## Star History <a name="star-history"></a>
|
||||
|
||||
# create account (3-4s)
|
||||
account = writesonic.Account.create(logging = True)
|
||||
|
||||
# with loging:
|
||||
# 2023-04-06 21:50:25 INFO __main__ -> register success : '{"id":"51aa0809-3053-44f7-922a...' (2s)
|
||||
# 2023-04-06 21:50:25 INFO __main__ -> id : '51aa0809-3053-44f7-922a-2b85d8d07edf'
|
||||
# 2023-04-06 21:50:25 INFO __main__ -> token : 'eyJhbGciOiJIUzI1NiIsInR5cCI6Ik...'
|
||||
# 2023-04-06 21:50:28 INFO __main__ -> got key : '194158c4-d249-4be0-82c6-5049e869533c' (2s)
|
||||
|
||||
# simple completion
|
||||
response = writesonic.Completion.create(
|
||||
api_key = account.key,
|
||||
prompt = 'hello world'
|
||||
)
|
||||
|
||||
print(response.completion.choices[0].text) # Hello! How may I assist you today?
|
||||
|
||||
# conversation
|
||||
|
||||
response = writesonic.Completion.create(
|
||||
api_key = account.key,
|
||||
prompt = 'what is my name ?',
|
||||
enable_memory = True,
|
||||
history_data = [
|
||||
{
|
||||
'is_sent': True,
|
||||
'message': 'my name is Tekky'
|
||||
},
|
||||
{
|
||||
'is_sent': False,
|
||||
'message': 'hello Tekky'
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
print(response.completion.choices[0].text) # Your name is Tekky.
|
||||
|
||||
# enable internet
|
||||
|
||||
response = writesonic.Completion.create(
|
||||
api_key = account.key,
|
||||
prompt = 'who won the quatar world cup ?',
|
||||
enable_google_results = True
|
||||
)
|
||||
|
||||
print(response.completion.choices[0].text) # Argentina won the 2022 FIFA World Cup tournament held in Qatar ...
|
||||
```
|
||||
|
||||
### Example: `you` (use like openai pypi package) <a name="example-you"></a>
|
||||
|
||||
```python
|
||||
import you
|
||||
|
||||
# simple request with links and details
|
||||
response = you.Completion.create(
|
||||
prompt = "hello world",
|
||||
detailed = True,
|
||||
includelinks = True,)
|
||||
|
||||
print(response)
|
||||
|
||||
# {
|
||||
# "response": "...",
|
||||
# "links": [...],
|
||||
# "extra": {...},
|
||||
# "slots": {...}
|
||||
# }
|
||||
# }
|
||||
|
||||
#chatbot
|
||||
|
||||
chat = []
|
||||
|
||||
while True:
|
||||
prompt = input("You: ")
|
||||
|
||||
response = you.Completion.create(
|
||||
prompt = prompt,
|
||||
chat = chat)
|
||||
|
||||
print("Bot:", response["response"])
|
||||
|
||||
chat.append({"question": prompt, "answer": response["response"]})
|
||||
```
|
||||
|
||||
## Dependencies
|
||||
|
||||
The repository is written in Python and requires the following packages:
|
||||
|
||||
* websocket-client
|
||||
* requests
|
||||
* tls-client
|
||||
|
||||
You can install these packages using the provided `requirements.txt` file.
|
||||
|
||||
## Repository structure:
|
||||
.
|
||||
├── ora/
|
||||
├── quora/ (/poe)
|
||||
├── t3nsor/
|
||||
├── testing/
|
||||
├── writesonic/
|
||||
├── you/
|
||||
├── README.md <-- this file.
|
||||
└── requirements.txt
|
||||
<a href="https://github.com/xtekky/gpt4free/stargazers">
|
||||
<img width="500" alt="Star History Chart" src="https://api.star-history.com/svg?repos=xtekky/gpt4free&type=Date">
|
||||
</a>
|
||||
|
||||
15
Singularity/gpt4free.sif
Normal file
15
Singularity/gpt4free.sif
Normal file
@@ -0,0 +1,15 @@
|
||||
Bootstrap: docker
|
||||
From: python:3.10-slim
|
||||
|
||||
%post
|
||||
apt-get update && apt-get install -y git
|
||||
git clone https://github.com/xtekky/gpt4free.git
|
||||
cd gpt4free
|
||||
pip install --no-cache-dir -r requirements.txt
|
||||
cp gui/streamlit_app.py .
|
||||
|
||||
%expose
|
||||
8501
|
||||
|
||||
%startscript
|
||||
exec streamlit run streamlit_app.py
|
||||
9
docker-compose.yaml
Normal file
9
docker-compose.yaml
Normal file
@@ -0,0 +1,9 @@
|
||||
version: "3.9"
|
||||
|
||||
services:
|
||||
gpt4free:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "8501:8501"
|
||||
12
docker-compose.yml
Normal file
12
docker-compose.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
gpt4:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
image: gpt4free:latest
|
||||
container_name: gpt4
|
||||
ports:
|
||||
- 8501:8501
|
||||
restart: unless-stopped
|
||||
@@ -1,115 +0,0 @@
|
||||
from requests import Session
|
||||
from re import search
|
||||
from random import randint
|
||||
from json import dumps, loads
|
||||
from random import randint
|
||||
from urllib.parse import urlencode
|
||||
from dotenv import load_dotenv; load_dotenv()
|
||||
from os import getenv
|
||||
|
||||
from bard.typings import BardResponse
|
||||
|
||||
token = getenv('1psid')
|
||||
proxy = getenv('proxy')
|
||||
|
||||
temperatures = {
|
||||
0 : "Generate text strictly following known patterns, with no creativity.",
|
||||
0.1: "Produce text adhering closely to established patterns, allowing minimal creativity.",
|
||||
0.2: "Create text with modest deviations from familiar patterns, injecting a slight creative touch.",
|
||||
0.3: "Craft text with a mild level of creativity, deviating somewhat from common patterns.",
|
||||
0.4: "Formulate text balancing creativity and recognizable patterns for coherent results.",
|
||||
0.5: "Generate text with a moderate level of creativity, allowing for a mix of familiarity and novelty.",
|
||||
0.6: "Compose text with an increased emphasis on creativity, while partially maintaining familiar patterns.",
|
||||
0.7: "Produce text favoring creativity over typical patterns for more original results.",
|
||||
0.8: "Create text heavily focused on creativity, with limited concern for familiar patterns.",
|
||||
0.9: "Craft text with a strong emphasis on unique and inventive ideas, largely ignoring established patterns.",
|
||||
1 : "Generate text with maximum creativity, disregarding any constraints of known patterns or structures."
|
||||
}
|
||||
|
||||
class Completion:
|
||||
# def __init__(self, _token, proxy: str or None = None) -> None:
|
||||
# self.client = Session()
|
||||
# self.client.proxies = {
|
||||
# 'http': f'http://{proxy}',
|
||||
# 'https': f'http://{proxy}' } if proxy else None
|
||||
|
||||
# self.client.headers = {
|
||||
# 'authority' : 'bard.google.com',
|
||||
# 'content-type' : 'application/x-www-form-urlencoded;charset=UTF-8',
|
||||
# 'origin' : 'https://bard.google.com',
|
||||
# 'referer' : 'https://bard.google.com/',
|
||||
# 'user-agent' : 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
|
||||
# 'x-same-domain' : '1',
|
||||
# 'cookie' : f'__Secure-1PSID={_token}'
|
||||
# }
|
||||
|
||||
# self.snlm0e = self.__init_client()
|
||||
# self.conversation_id = ''
|
||||
# self.response_id = ''
|
||||
# self.choice_id = ''
|
||||
# self.reqid = randint(1111, 9999)
|
||||
|
||||
def create(
|
||||
prompt : str = 'hello world',
|
||||
temperature : int = None,
|
||||
conversation_id : str = '',
|
||||
response_id : str = '',
|
||||
choice_id : str = '') -> BardResponse:
|
||||
|
||||
if temperature:
|
||||
prompt = f'''settings: follow these settings for your response: [temperature: {temperature} - {temperatures[temperature]}] | prompt : {prompt}'''
|
||||
|
||||
client = Session()
|
||||
client.proxies = {
|
||||
'http': f'http://{proxy}',
|
||||
'https': f'http://{proxy}' } if proxy else None
|
||||
|
||||
client.headers = {
|
||||
'authority' : 'bard.google.com',
|
||||
'content-type' : 'application/x-www-form-urlencoded;charset=UTF-8',
|
||||
'origin' : 'https://bard.google.com',
|
||||
'referer' : 'https://bard.google.com/',
|
||||
'user-agent' : 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
|
||||
'x-same-domain' : '1',
|
||||
'cookie' : f'__Secure-1PSID={token}'
|
||||
}
|
||||
|
||||
snlm0e = search(r'SNlM0e\":\"(.*?)\"', client.get('https://bard.google.com/').text).group(1)
|
||||
|
||||
params = urlencode({
|
||||
'bl' : 'boq_assistant-bard-web-server_20230326.21_p0',
|
||||
'_reqid' : randint(1111, 9999),
|
||||
'rt' : 'c',
|
||||
})
|
||||
|
||||
response = client.post(f'https://bard.google.com/_/BardChatUi/data/assistant.lamda.BardFrontendService/StreamGenerate?{params}',
|
||||
data = {
|
||||
'at': snlm0e,
|
||||
'f.req': dumps([None, dumps([
|
||||
[prompt],
|
||||
None,
|
||||
[conversation_id, response_id, choice_id],
|
||||
])
|
||||
])
|
||||
}
|
||||
)
|
||||
|
||||
chat_data = loads(response.content.splitlines()[3])[0][2]
|
||||
if not chat_data: print('error, retrying'); Completion.create(prompt, temperature, conversation_id, response_id, choice_id)
|
||||
|
||||
json_chat_data = loads(chat_data)
|
||||
results = {
|
||||
'content' : json_chat_data[0][0],
|
||||
'conversation_id' : json_chat_data[1][0],
|
||||
'response_id' : json_chat_data[1][1],
|
||||
'factualityQueries' : json_chat_data[3],
|
||||
'textQuery' : json_chat_data[2][0] if json_chat_data[2] is not None else '',
|
||||
'choices' : [{'id': i[0], 'content': i[1]} for i in json_chat_data[4]],
|
||||
}
|
||||
|
||||
# self.conversation_id = results['conversation_id']
|
||||
# self.response_id = results['response_id']
|
||||
# self.choice_id = results['choices'][0]['id']
|
||||
# self.reqid += 100000
|
||||
|
||||
return BardResponse(results)
|
||||
@@ -1,15 +0,0 @@
|
||||
class BardResponse:
|
||||
def __init__(self, json_dict):
|
||||
self.json = json_dict
|
||||
|
||||
self.content = json_dict.get('content')
|
||||
self.conversation_id = json_dict.get('conversation_id')
|
||||
self.response_id = json_dict.get('response_id')
|
||||
self.factuality_queries = json_dict.get('factualityQueries', [])
|
||||
self.text_query = json_dict.get('textQuery', [])
|
||||
self.choices = [self.BardChoice(choice) for choice in json_dict.get('choices', [])]
|
||||
|
||||
class BardChoice:
|
||||
def __init__(self, choice_dict):
|
||||
self.id = choice_dict.get('id')
|
||||
self.content = choice_dict.get('content')[0]
|
||||
@@ -1,151 +0,0 @@
|
||||
from requests import get
|
||||
from browser_cookie3 import edge, chrome
|
||||
from ssl import create_default_context
|
||||
from certifi import where
|
||||
from uuid import uuid4
|
||||
from random import randint
|
||||
from json import dumps, loads
|
||||
|
||||
import asyncio
|
||||
import websockets
|
||||
|
||||
ssl_context = create_default_context()
|
||||
ssl_context.load_verify_locations(where())
|
||||
|
||||
def format(msg: dict) -> str:
|
||||
return dumps(msg) + '\x1e'
|
||||
|
||||
def get_token():
|
||||
|
||||
cookies = {c.name: c.value for c in edge(domain_name='bing.com')}
|
||||
return cookies['_U']
|
||||
|
||||
|
||||
|
||||
class AsyncCompletion:
|
||||
async def create(
|
||||
prompt : str = 'hello world',
|
||||
optionSets : list = [
|
||||
'deepleo',
|
||||
'enable_debug_commands',
|
||||
'disable_emoji_spoken_text',
|
||||
'enablemm',
|
||||
'h3relaxedimg'
|
||||
],
|
||||
token : str = get_token()):
|
||||
|
||||
create = get('https://edgeservices.bing.com/edgesvc/turing/conversation/create',
|
||||
headers = {
|
||||
'host' : 'edgeservices.bing.com',
|
||||
'authority' : 'edgeservices.bing.com',
|
||||
'cookie' : f'_U={token}',
|
||||
'user-agent' : 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.69',
|
||||
}
|
||||
)
|
||||
|
||||
conversationId = create.json()['conversationId']
|
||||
clientId = create.json()['clientId']
|
||||
conversationSignature = create.json()['conversationSignature']
|
||||
|
||||
wss: websockets.WebSocketClientProtocol or None = None
|
||||
|
||||
wss = await websockets.connect('wss://sydney.bing.com/sydney/ChatHub', max_size = None, ssl = ssl_context,
|
||||
extra_headers = {
|
||||
'accept': 'application/json',
|
||||
'accept-language': 'en-US,en;q=0.9',
|
||||
'content-type': 'application/json',
|
||||
'sec-ch-ua': '"Not_A Brand";v="99", Microsoft Edge";v="110", "Chromium";v="110"',
|
||||
'sec-ch-ua-arch': '"x86"',
|
||||
'sec-ch-ua-bitness': '"64"',
|
||||
'sec-ch-ua-full-version': '"109.0.1518.78"',
|
||||
'sec-ch-ua-full-version-list': '"Chromium";v="110.0.5481.192", "Not A(Brand";v="24.0.0.0", "Microsoft Edge";v="110.0.1587.69"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-model': "",
|
||||
'sec-ch-ua-platform': '"Windows"',
|
||||
'sec-ch-ua-platform-version': '"15.0.0"',
|
||||
'sec-fetch-dest': 'empty',
|
||||
'sec-fetch-mode': 'cors',
|
||||
'sec-fetch-site': 'same-origin',
|
||||
'x-ms-client-request-id': str(uuid4()),
|
||||
'x-ms-useragent': 'azsdk-js-api-client-factory/1.0.0-beta.1 core-rest-pipeline/1.10.0 OS/Win32',
|
||||
'Referer': 'https://www.bing.com/search?q=Bing+AI&showconv=1&FORM=hpcodx',
|
||||
'Referrer-Policy': 'origin-when-cross-origin',
|
||||
'x-forwarded-for': f'13.{randint(104, 107)}.{randint(0, 255)}.{randint(0, 255)}'
|
||||
}
|
||||
)
|
||||
|
||||
await wss.send(format({'protocol': 'json', 'version': 1}))
|
||||
await wss.recv()
|
||||
|
||||
struct = {
|
||||
'arguments': [
|
||||
{
|
||||
'source': 'cib',
|
||||
'optionsSets': optionSets,
|
||||
'isStartOfSession': True,
|
||||
'message': {
|
||||
'author': 'user',
|
||||
'inputMethod': 'Keyboard',
|
||||
'text': prompt,
|
||||
'messageType': 'Chat'
|
||||
},
|
||||
'conversationSignature': conversationSignature,
|
||||
'participant': {
|
||||
'id': clientId
|
||||
},
|
||||
'conversationId': conversationId
|
||||
}
|
||||
],
|
||||
'invocationId': '0',
|
||||
'target': 'chat',
|
||||
'type': 4
|
||||
}
|
||||
|
||||
await wss.send(format(struct))
|
||||
|
||||
base_string = ''
|
||||
|
||||
final = False
|
||||
while not final:
|
||||
objects = str(await wss.recv()).split('\x1e')
|
||||
for obj in objects:
|
||||
if obj is None or obj == '':
|
||||
continue
|
||||
|
||||
response = loads(obj)
|
||||
if response.get('type') == 1 and response['arguments'][0].get('messages',):
|
||||
response_text = response['arguments'][0]['messages'][0]['adaptiveCards'][0]['body'][0].get('text')
|
||||
|
||||
yield (response_text.replace(base_string, ''))
|
||||
base_string = response_text
|
||||
|
||||
elif response.get('type') == 2:
|
||||
final = True
|
||||
|
||||
await wss.close()
|
||||
|
||||
async def run():
|
||||
async for value in AsyncCompletion.create(
|
||||
prompt = 'summarize cinderella with each word beginning with a consecutive letter of the alphabet, a-z',
|
||||
# optionSets = [
|
||||
# "deepleo",
|
||||
# "enable_debug_commands",
|
||||
# "disable_emoji_spoken_text",
|
||||
# "enablemm"
|
||||
# ]
|
||||
optionSets = [
|
||||
#"nlu_direct_response_filter",
|
||||
#"deepleo",
|
||||
#"disable_emoji_spoken_text",
|
||||
# "responsible_ai_policy_235",
|
||||
#"enablemm",
|
||||
"galileo",
|
||||
#"dtappid",
|
||||
# "cricinfo",
|
||||
# "cricinfov2",
|
||||
# "dv3sugg",
|
||||
]
|
||||
):
|
||||
print(value, end = '', flush=True)
|
||||
|
||||
asyncio.run(run())
|
||||
@@ -1,72 +0,0 @@
|
||||
# experimental, needs chat.openai.com to be loaded with cf_clearance on browser ( can be closed after )
|
||||
|
||||
from tls_client import Session
|
||||
from uuid import uuid4
|
||||
|
||||
from browser_cookie3 import chrome
|
||||
|
||||
def session_auth(client):
|
||||
headers = {
|
||||
'authority': 'chat.openai.com',
|
||||
'accept': '*/*',
|
||||
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
|
||||
'cache-control': 'no-cache',
|
||||
'pragma': 'no-cache',
|
||||
'referer': 'https://chat.openai.com/chat',
|
||||
'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-platform': '"macOS"',
|
||||
'sec-fetch-dest': 'empty',
|
||||
'sec-fetch-mode': 'cors',
|
||||
'sec-fetch-site': 'same-origin',
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
|
||||
}
|
||||
|
||||
return client.get('https://chat.openai.com/api/auth/session', headers=headers).json()
|
||||
|
||||
client = Session(client_identifier='chrome110')
|
||||
|
||||
for cookie in chrome(domain_name='chat.openai.com'):
|
||||
client.cookies[cookie.name] = cookie.value
|
||||
|
||||
client.headers = {
|
||||
'authority': 'chat.openai.com',
|
||||
'accept': 'text/event-stream',
|
||||
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
|
||||
'authorization': 'Bearer ' + session_auth(client)['accessToken'],
|
||||
'cache-control': 'no-cache',
|
||||
'content-type': 'application/json',
|
||||
'origin': 'https://chat.openai.com',
|
||||
'pragma': 'no-cache',
|
||||
'referer': 'https://chat.openai.com/chat',
|
||||
'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-platform': '"macOS"',
|
||||
'sec-fetch-dest': 'empty',
|
||||
'sec-fetch-mode': 'cors',
|
||||
'sec-fetch-site': 'same-origin',
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
|
||||
}
|
||||
|
||||
response = client.post('https://chat.openai.com/backend-api/conversation', json = {
|
||||
'action': 'next',
|
||||
'messages': [
|
||||
{
|
||||
'id': str(uuid4()),
|
||||
'author': {
|
||||
'role': 'user',
|
||||
},
|
||||
'content': {
|
||||
'content_type': 'text',
|
||||
'parts': [
|
||||
'hello world',
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
'parent_message_id': '9b4682f7-977c-4c8a-b5e6-9713e73dfe01',
|
||||
'model': 'text-davinci-002-render-sha',
|
||||
'timezone_offset_min': -120,
|
||||
})
|
||||
|
||||
print(response.text)
|
||||
115
gpt4free/README.md
Normal file
115
gpt4free/README.md
Normal file
@@ -0,0 +1,115 @@
|
||||
# gpt4free package
|
||||
|
||||
### What is it?
|
||||
|
||||
gpt4free is a python package that provides some language model api's
|
||||
|
||||
### Main Features
|
||||
|
||||
- It's free to use
|
||||
- Easy access
|
||||
|
||||
### Installation:
|
||||
|
||||
```bash
|
||||
pip install gpt4free
|
||||
```
|
||||
|
||||
#### Usage:
|
||||
|
||||
```python
|
||||
import gpt4free
|
||||
from gpt4free import Provider, quora, forefront
|
||||
|
||||
# usage You
|
||||
response = gpt4free.Completion.create(Provider.You, prompt='Write a poem on Lionel Messi')
|
||||
print(response)
|
||||
|
||||
# usage Poe
|
||||
token = quora.Account.create(logging=False)
|
||||
response = gpt4free.Completion.create(Provider.Poe, prompt='Write a poem on Lionel Messi', token=token, model='ChatGPT')
|
||||
print(response)
|
||||
|
||||
# usage forefront
|
||||
token = forefront.Account.create(logging=False)
|
||||
response = gpt4free.Completion.create(
|
||||
Provider.ForeFront, prompt='Write a poem on Lionel Messi', model='gpt-4', token=token
|
||||
)
|
||||
print(response)
|
||||
print(f'END')
|
||||
|
||||
# usage theb
|
||||
response = gpt4free.Completion.create(Provider.Theb, prompt='Write a poem on Lionel Messi')
|
||||
print(response)
|
||||
|
||||
# usage cocalc
|
||||
response = gpt4free.Completion.create(Provider.CoCalc, prompt='Write a poem on Lionel Messi', cookie_input='')
|
||||
print(response)
|
||||
|
||||
```
|
||||
|
||||
### Invocation Arguments
|
||||
|
||||
`gpt4free.Completion.create()` method has two required arguments
|
||||
|
||||
1. Provider: This is an enum representing different provider
|
||||
2. prompt: This is the user input
|
||||
|
||||
#### Keyword Arguments
|
||||
|
||||
Some of the keyword arguments are optional, while others are required.
|
||||
|
||||
- You:
|
||||
- `safe_search`: boolean - default value is `False`
|
||||
- `include_links`: boolean - default value is `False`
|
||||
- `detailed`: boolean - default value is `False`
|
||||
- Quora:
|
||||
- `token`: str - this needs to be provided by the user
|
||||
- `model`: str - default value is `gpt-4`.
|
||||
|
||||
(Available models: `['Sage', 'GPT-4', 'Claude+', 'Claude-instant', 'ChatGPT', 'Dragonfly', 'NeevaAI']`)
|
||||
- ForeFront:
|
||||
- `token`: str - this need to be provided by the user
|
||||
|
||||
- Theb:
|
||||
(no keyword arguments required)
|
||||
- CoCalc:
|
||||
- `cookie_input`: str - this needs to be provided by user
|
||||
|
||||
#### Token generation of quora
|
||||
```python
|
||||
from gpt4free import quora
|
||||
|
||||
token = quora.Account.create(logging=False)
|
||||
```
|
||||
|
||||
### Token generation of ForeFront
|
||||
```python
|
||||
from gpt4free import forefront
|
||||
|
||||
token = forefront.Account.create(logging=False)
|
||||
```
|
||||
|
||||
## Copyright:
|
||||
|
||||
This program is licensed under the [GNU GPL v3](https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
### Copyright Notice: <a name="copyright"></a>
|
||||
|
||||
```
|
||||
xtekky/gpt4free: multiple reverse engineered language-model api's to decentralise the ai industry.
|
||||
Copyright (C) 2023 xtekky
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
```
|
||||
73
gpt4free/__init__.py
Normal file
73
gpt4free/__init__.py
Normal file
@@ -0,0 +1,73 @@
|
||||
from enum import Enum
|
||||
|
||||
from gpt4free import cocalc
|
||||
from gpt4free import forefront
|
||||
from gpt4free import quora
|
||||
from gpt4free import theb
|
||||
from gpt4free import you
|
||||
from gpt4free import usesless
|
||||
|
||||
|
||||
class Provider(Enum):
|
||||
"""An enum representing different providers."""
|
||||
|
||||
You = 'you'
|
||||
Poe = 'poe'
|
||||
ForeFront = 'fore_front'
|
||||
Theb = 'theb'
|
||||
CoCalc = 'cocalc'
|
||||
UseLess = 'useless'
|
||||
|
||||
|
||||
class Completion:
|
||||
"""This class will be used for invoking the given provider"""
|
||||
|
||||
@staticmethod
|
||||
def create(provider: Provider, prompt: str, **kwargs) -> str:
|
||||
|
||||
"""
|
||||
Invokes the given provider with given prompt and addition arguments and returns the string response
|
||||
|
||||
:param provider: an enum representing the provider to use while invoking
|
||||
:param prompt: input provided by the user
|
||||
:param kwargs: Additional keyword arguments to pass to the provider while invoking
|
||||
:return: A string representing the response from the provider
|
||||
"""
|
||||
if provider == Provider.Poe:
|
||||
return Completion.__poe_service(prompt, **kwargs)
|
||||
elif provider == Provider.You:
|
||||
return Completion.__you_service(prompt, **kwargs)
|
||||
elif provider == Provider.ForeFront:
|
||||
return Completion.__fore_front_service(prompt, **kwargs)
|
||||
elif provider == Provider.Theb:
|
||||
return Completion.__theb_service(prompt, **kwargs)
|
||||
elif provider == Provider.CoCalc:
|
||||
return Completion.__cocalc_service(prompt, **kwargs)
|
||||
elif provider == Provider.UseLess:
|
||||
return Completion.__useless_service(prompt, **kwargs)
|
||||
else:
|
||||
raise Exception('Provider not exist, Please try again')
|
||||
|
||||
@staticmethod
|
||||
def __useless_service(prompt: str, **kwargs) -> str:
|
||||
return usesless.Completion.create(prompt = prompt, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def __you_service(prompt: str, **kwargs) -> str:
|
||||
return you.Completion.create(prompt, **kwargs).text
|
||||
|
||||
@staticmethod
|
||||
def __poe_service(prompt: str, **kwargs) -> str:
|
||||
return quora.Completion.create(prompt=prompt, **kwargs).text
|
||||
|
||||
@staticmethod
|
||||
def __fore_front_service(prompt: str, **kwargs) -> str:
|
||||
return forefront.Completion.create(prompt=prompt, **kwargs).text
|
||||
|
||||
@staticmethod
|
||||
def __theb_service(prompt: str, **kwargs):
|
||||
return ''.join(theb.Completion.create(prompt=prompt))
|
||||
|
||||
@staticmethod
|
||||
def __cocalc_service(prompt: str, **kwargs):
|
||||
return cocalc.Completion.create(prompt, cookie_input=kwargs.get('cookie_input', '')).text
|
||||
67
gpt4free/cocalc/__init__.py
Normal file
67
gpt4free/cocalc/__init__.py
Normal file
@@ -0,0 +1,67 @@
|
||||
import requests
|
||||
from fake_useragent import UserAgent
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class CoCalcResponse(BaseModel):
|
||||
text: str
|
||||
status: bool
|
||||
|
||||
|
||||
class Completion:
|
||||
"""A class for generating text completions using CoCalc's GPT-based chatbot."""
|
||||
|
||||
API_ENDPOINT = "https://cocalc.com/api/v2/openai/chatgpt"
|
||||
DEFAULT_SYSTEM_PROMPT = "ASSUME I HAVE FULL ACCESS TO COCALC. "
|
||||
|
||||
@staticmethod
|
||||
def create(prompt: str, cookie_input: str) -> CoCalcResponse:
|
||||
"""
|
||||
Generate a text completion for the given prompt using CoCalc's GPT-based chatbot.
|
||||
|
||||
Args:
|
||||
prompt: The text prompt to complete.
|
||||
cookie_input: The cookie required to authenticate the chatbot API request.
|
||||
|
||||
Returns:
|
||||
A CoCalcResponse object containing the text completion and a boolean indicating
|
||||
whether the request was successful.
|
||||
"""
|
||||
|
||||
# Initialize a session with custom headers
|
||||
session = Completion._initialize_session(cookie_input)
|
||||
|
||||
# Set the data that will be submitted
|
||||
payload = Completion._create_payload(prompt, Completion.DEFAULT_SYSTEM_PROMPT)
|
||||
|
||||
try:
|
||||
# Submit the request and return the results
|
||||
response = session.post(Completion.API_ENDPOINT, json=payload).json()
|
||||
return CoCalcResponse(text=response['output'], status=response['success'])
|
||||
except requests.exceptions.RequestException as e:
|
||||
# Handle exceptions that may occur during the request
|
||||
print(f"Error: {e}")
|
||||
return CoCalcResponse(text="", status=False)
|
||||
|
||||
@classmethod
|
||||
def _initialize_session(cls, conversation_cookie: str) -> requests.Session:
|
||||
"""Initialize a session with custom headers for the request."""
|
||||
|
||||
session = requests.Session()
|
||||
headers = {
|
||||
"Accept": "*/*",
|
||||
"Accept-Language": "en-US,en;q=0.5",
|
||||
"Origin": "https://cocalc.com",
|
||||
"Referer": "https://cocalc.com/api/v2/openai/chatgpt",
|
||||
"Cookie": conversation_cookie,
|
||||
"User-Agent": UserAgent().random,
|
||||
}
|
||||
session.headers.update(headers)
|
||||
|
||||
return session
|
||||
|
||||
@staticmethod
|
||||
def _create_payload(prompt: str, system_prompt: str) -> dict:
|
||||
"""Create the payload for the API request."""
|
||||
|
||||
return {"input": prompt, "system": system_prompt, "tag": "next:index"}
|
||||
19
gpt4free/cocalc/readme.md
Normal file
19
gpt4free/cocalc/readme.md
Normal file
@@ -0,0 +1,19 @@
|
||||
### Example: `cocalc` <a name="example-cocalc"></a>
|
||||
|
||||
```python
|
||||
# import library
|
||||
from gpt4free import cocalc
|
||||
|
||||
cocalc.Completion.create(prompt="How are you!", cookie_input="cookieinput") ## Tutorial
|
||||
```
|
||||
|
||||
### How to grab cookie input
|
||||
```js
|
||||
// input this into ur developer tools console and the exact response u get from this u put into ur cookieInput!
|
||||
var cookies = document.cookie.split("; ");
|
||||
var cookieString = "";
|
||||
for (var i = 0; i < cookies.length; i++) {
|
||||
cookieString += cookies[i] + "; ";
|
||||
}
|
||||
console.log(cookieString);
|
||||
```
|
||||
13
gpt4free/forefront/README.md
Normal file
13
gpt4free/forefront/README.md
Normal file
@@ -0,0 +1,13 @@
|
||||
### Example: `forefront` (use like openai pypi package) <a name="example-forefront"></a>
|
||||
|
||||
```python
|
||||
from gpt4free import forefront
|
||||
# create an account
|
||||
token = forefront.Account.create(logging=False)
|
||||
print(token)
|
||||
# get a response
|
||||
for response in forefront.StreamingCompletion.create(token=token,
|
||||
prompt='hello world', model='gpt-4'):
|
||||
print(response.completion.choices[0].text, end='')
|
||||
print("")
|
||||
```
|
||||
194
gpt4free/forefront/__init__.py
Normal file
194
gpt4free/forefront/__init__.py
Normal file
@@ -0,0 +1,194 @@
|
||||
from json import loads
|
||||
from re import findall
|
||||
from time import time, sleep
|
||||
from typing import Generator, Optional
|
||||
from uuid import uuid4
|
||||
|
||||
from fake_useragent import UserAgent
|
||||
from requests import post
|
||||
from pymailtm import MailTm, Message
|
||||
from tls_client import Session
|
||||
|
||||
from .typing import ForeFrontResponse
|
||||
|
||||
|
||||
class Account:
|
||||
@staticmethod
|
||||
def create(proxy: Optional[str] = None, logging: bool = False):
|
||||
proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else False
|
||||
|
||||
start = time()
|
||||
|
||||
mail_client = MailTm().get_account()
|
||||
mail_address = mail_client.address
|
||||
|
||||
client = Session(client_identifier='chrome110')
|
||||
client.proxies = proxies
|
||||
client.headers = {
|
||||
'origin': 'https://accounts.forefront.ai',
|
||||
'user-agent': UserAgent().random,
|
||||
}
|
||||
|
||||
response = client.post(
|
||||
'https://clerk.forefront.ai/v1/client/sign_ups?_clerk_js_version=4.38.4',
|
||||
data={'email_address': mail_address},
|
||||
)
|
||||
|
||||
try:
|
||||
trace_token = response.json()['response']['id']
|
||||
if logging:
|
||||
print(trace_token)
|
||||
except KeyError:
|
||||
return 'Failed to create account!'
|
||||
|
||||
response = client.post(
|
||||
f'https://clerk.forefront.ai/v1/client/sign_ups/{trace_token}/prepare_verification?_clerk_js_version=4.38.4',
|
||||
data={
|
||||
'strategy': 'email_link',
|
||||
'redirect_url': 'https://accounts.forefront.ai/sign-up/verify'
|
||||
},
|
||||
)
|
||||
|
||||
if logging:
|
||||
print(response.text)
|
||||
|
||||
if 'sign_up_attempt' not in response.text:
|
||||
return 'Failed to create account!'
|
||||
|
||||
while True:
|
||||
sleep(1)
|
||||
new_message: Message = mail_client.wait_for_message()
|
||||
if logging:
|
||||
print(new_message.data['id'])
|
||||
|
||||
verification_url = findall(r'https:\/\/clerk\.forefront\.ai\/v1\/verify\?token=\w.+', new_message.text)[0]
|
||||
|
||||
if verification_url:
|
||||
break
|
||||
|
||||
if logging:
|
||||
print(verification_url)
|
||||
|
||||
response = client.get(verification_url)
|
||||
|
||||
response = client.get('https://clerk.forefront.ai/v1/client?_clerk_js_version=4.38.4')
|
||||
|
||||
token = response.json()['response']['sessions'][0]['last_active_token']['jwt']
|
||||
|
||||
with open('accounts.txt', 'a') as f:
|
||||
f.write(f'{mail_address}:{token}\n')
|
||||
|
||||
if logging:
|
||||
print(time() - start)
|
||||
|
||||
return token
|
||||
|
||||
|
||||
class StreamingCompletion:
|
||||
@staticmethod
|
||||
def create(
|
||||
token=None,
|
||||
chat_id=None,
|
||||
prompt='',
|
||||
action_type='new',
|
||||
default_persona='607e41fe-95be-497e-8e97-010a59b2e2c0', # default
|
||||
model='gpt-4',
|
||||
proxy=None
|
||||
) -> Generator[ForeFrontResponse, None, None]:
|
||||
if not token:
|
||||
raise Exception('Token is required!')
|
||||
if not chat_id:
|
||||
chat_id = str(uuid4())
|
||||
|
||||
proxies = { 'http': 'http://' + proxy, 'https': 'http://' + proxy } if proxy else None
|
||||
|
||||
headers = {
|
||||
'authority': 'chat-server.tenant-forefront-default.knative.chi.coreweave.com',
|
||||
'accept': '*/*',
|
||||
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
|
||||
'authorization': 'Bearer ' + token,
|
||||
'cache-control': 'no-cache',
|
||||
'content-type': 'application/json',
|
||||
'origin': 'https://chat.forefront.ai',
|
||||
'pragma': 'no-cache',
|
||||
'referer': 'https://chat.forefront.ai/',
|
||||
'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-platform': '"macOS"',
|
||||
'sec-fetch-dest': 'empty',
|
||||
'sec-fetch-mode': 'cors',
|
||||
'sec-fetch-site': 'cross-site',
|
||||
'user-agent': UserAgent().random,
|
||||
}
|
||||
|
||||
json_data = {
|
||||
'text': prompt,
|
||||
'action': action_type,
|
||||
'parentId': chat_id,
|
||||
'workspaceId': chat_id,
|
||||
'messagePersona': default_persona,
|
||||
'model': model,
|
||||
}
|
||||
|
||||
for chunk in post(
|
||||
'https://chat-server.tenant-forefront-default.knative.chi.coreweave.com/chat',
|
||||
headers=headers,
|
||||
proxies=proxies,
|
||||
json=json_data,
|
||||
stream=True,
|
||||
).iter_lines():
|
||||
if b'finish_reason":null' in chunk:
|
||||
data = loads(chunk.decode('utf-8').split('data: ')[1])
|
||||
token = data['choices'][0]['delta'].get('content')
|
||||
|
||||
if token is not None:
|
||||
yield ForeFrontResponse(
|
||||
**{
|
||||
'id': chat_id,
|
||||
'object': 'text_completion',
|
||||
'created': int(time()),
|
||||
'text': token,
|
||||
'model': model,
|
||||
'choices': [{'text': token, 'index': 0, 'logprobs': None, 'finish_reason': 'stop'}],
|
||||
'usage': {
|
||||
'prompt_tokens': len(prompt),
|
||||
'completion_tokens': len(token),
|
||||
'total_tokens': len(prompt) + len(token),
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class Completion:
|
||||
@staticmethod
|
||||
def create(
|
||||
token=None,
|
||||
chat_id=None,
|
||||
prompt='',
|
||||
action_type='new',
|
||||
default_persona='607e41fe-95be-497e-8e97-010a59b2e2c0', # default
|
||||
model='gpt-4',
|
||||
proxy=None
|
||||
) -> ForeFrontResponse:
|
||||
text = ''
|
||||
final_response = None
|
||||
for response in StreamingCompletion.create(
|
||||
token=token,
|
||||
chat_id=chat_id,
|
||||
prompt=prompt,
|
||||
action_type=action_type,
|
||||
default_persona=default_persona,
|
||||
model=model,
|
||||
proxy=proxy
|
||||
):
|
||||
if response:
|
||||
final_response = response
|
||||
text += response.text
|
||||
|
||||
if final_response:
|
||||
final_response.text = text
|
||||
else:
|
||||
raise Exception('Unable to get the response, Please try again')
|
||||
|
||||
return final_response
|
||||
|
||||
25
gpt4free/forefront/typing.py
Normal file
25
gpt4free/forefront/typing.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from typing import Any, List
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class Choice(BaseModel):
|
||||
text: str
|
||||
index: int
|
||||
logprobs: Any
|
||||
finish_reason: str
|
||||
|
||||
|
||||
class Usage(BaseModel):
|
||||
prompt_tokens: int
|
||||
completion_tokens: int
|
||||
total_tokens: int
|
||||
|
||||
|
||||
class ForeFrontResponse(BaseModel):
|
||||
id: str
|
||||
object: str
|
||||
created: int
|
||||
model: str
|
||||
choices: List[Choice]
|
||||
usage: Usage
|
||||
text: str
|
||||
68
gpt4free/quora/README.md
Normal file
68
gpt4free/quora/README.md
Normal file
@@ -0,0 +1,68 @@
|
||||
|
||||
> ⚠ Warning !!!
|
||||
poe.com added security and can detect if you are making automated requests. You may get your account banned if you are using this api.
|
||||
The normal non-driver api is also currently not very stable
|
||||
|
||||
|
||||
### Example: `quora (poe)` (use like openai pypi package) - GPT-4 <a name="example-poe"></a>
|
||||
|
||||
```python
|
||||
# quora model names: (use left key as argument)
|
||||
models = {
|
||||
'sage' : 'capybara',
|
||||
'gpt-4' : 'beaver',
|
||||
'claude-v1.2' : 'a2_2',
|
||||
'claude-instant-v1.0' : 'a2',
|
||||
'gpt-3.5-turbo' : 'chinchilla'
|
||||
}
|
||||
```
|
||||
|
||||
### New: bot creation
|
||||
|
||||
```python
|
||||
# import quora (poe) package
|
||||
from gpt4free import quora
|
||||
|
||||
# create account
|
||||
# make sure to set enable_bot_creation to True
|
||||
token = quora.Account.create(logging=True, enable_bot_creation=True)
|
||||
|
||||
model = quora.Model.create(
|
||||
token=token,
|
||||
model='gpt-3.5-turbo', # or claude-instant-v1.0
|
||||
system_prompt='you are ChatGPT a large language model ...'
|
||||
)
|
||||
|
||||
print(model.name) # gptx....
|
||||
|
||||
# streaming response
|
||||
for response in quora.StreamingCompletion.create(
|
||||
custom_model=model.name,
|
||||
prompt='hello world',
|
||||
token=token):
|
||||
print(response.completion.choices[0].text)
|
||||
```
|
||||
|
||||
### Normal Response:
|
||||
```python
|
||||
|
||||
response = quora.Completion.create(model = 'gpt-4',
|
||||
prompt = 'hello world',
|
||||
token = token)
|
||||
|
||||
print(response.completion.choices[0].text)
|
||||
```
|
||||
|
||||
### Update Use This For Poe
|
||||
```python
|
||||
from gpt4free.quora import Poe
|
||||
|
||||
# available models: ['Sage', 'GPT-4', 'Claude+', 'Claude-instant', 'ChatGPT', 'Dragonfly', 'NeevaAI']
|
||||
|
||||
poe = Poe(model='ChatGPT', driver='firefox', cookie_path='cookie.json', driver_path='path_of_driver')
|
||||
poe.chat('who won the football world cup most?')
|
||||
|
||||
# new bot creation
|
||||
poe.create_bot('new_bot_name', prompt='You are new test bot', base_model='gpt-3.5-turbo')
|
||||
|
||||
```
|
||||
477
gpt4free/quora/__init__.py
Normal file
477
gpt4free/quora/__init__.py
Normal file
@@ -0,0 +1,477 @@
|
||||
import json
|
||||
from datetime import datetime
|
||||
from hashlib import md5
|
||||
from json import dumps
|
||||
from pathlib import Path
|
||||
from random import choice, choices, randint
|
||||
from re import search, findall
|
||||
from string import ascii_letters, digits
|
||||
from typing import Optional, Union, List, Any, Generator
|
||||
from urllib.parse import unquote
|
||||
|
||||
import selenium.webdriver.support.expected_conditions as EC
|
||||
from fake_useragent import UserAgent
|
||||
from pydantic import BaseModel
|
||||
from pypasser import reCaptchaV3
|
||||
from requests import Session
|
||||
from selenium.webdriver import Firefox, Chrome, FirefoxOptions, ChromeOptions
|
||||
from selenium.webdriver.common.by import By
|
||||
from selenium.webdriver.support.wait import WebDriverWait
|
||||
from tls_client import Session as TLS
|
||||
|
||||
from .api import Client as PoeClient
|
||||
from .mail import Emailnator
|
||||
|
||||
SELENIUM_WEB_DRIVER_ERROR_MSG = b'''The error message you are receiving is due to the `geckodriver` executable not
|
||||
being found in your system\'s PATH. To resolve this issue, you need to download the geckodriver and add its location
|
||||
to your system\'s PATH.\n\nHere are the steps to resolve the issue:\n\n1. Download the geckodriver for your platform
|
||||
(Windows, macOS, or Linux) from the following link: https://github.com/mozilla/geckodriver/releases\n\n2. Extract the
|
||||
downloaded archive and locate the geckodriver executable.\n\n3. Add the geckodriver executable to your system\'s
|
||||
PATH.\n\nFor macOS and Linux:\n\n- Open a terminal window.\n- Move the geckodriver executable to a directory that is
|
||||
already in your PATH, or create a new directory and add it to your PATH:\n\n```bash\n# Example: Move geckodriver to
|
||||
/usr/local/bin\nmv /path/to/your/geckodriver /usr/local/bin\n```\n\n- If you created a new directory, add it to your
|
||||
PATH:\n\n```bash\n# Example: Add a new directory to PATH\nexport PATH=$PATH:/path/to/your/directory\n```\n\nFor
|
||||
Windows:\n\n- Right-click on "My Computer" or "This PC" and select "Properties".\n- Click on "Advanced system
|
||||
settings".\n- Click on the "Environment Variables" button.\n- In the "System variables" section, find the "Path"
|
||||
variable, select it, and click "Edit".\n- Click "New" and add the path to the directory containing the geckodriver
|
||||
executable.\n\nAfter adding the geckodriver to your PATH, restart your terminal or command prompt and try running
|
||||
your script again. The error should be resolved.'''
|
||||
|
||||
# from twocaptcha import TwoCaptcha
|
||||
# solver = TwoCaptcha('72747bf24a9d89b4dcc1b24875efd358')
|
||||
|
||||
MODELS = {
|
||||
'Sage': 'capybara',
|
||||
'GPT-4': 'beaver',
|
||||
'Claude+': 'a2_2',
|
||||
'Claude-instant': 'a2',
|
||||
'ChatGPT': 'chinchilla',
|
||||
'Dragonfly': 'nutria',
|
||||
'NeevaAI': 'hutia',
|
||||
}
|
||||
|
||||
|
||||
def extract_formkey(html):
|
||||
script_regex = r'<script>if\(.+\)throw new Error;(.+)</script>'
|
||||
script_text = search(script_regex, html).group(1)
|
||||
key_regex = r'var .="([0-9a-f]+)",'
|
||||
key_text = search(key_regex, script_text).group(1)
|
||||
cipher_regex = r'.\[(\d+)\]=.\[(\d+)\]'
|
||||
cipher_pairs = findall(cipher_regex, script_text)
|
||||
|
||||
formkey_list = [''] * len(cipher_pairs)
|
||||
for pair in cipher_pairs:
|
||||
formkey_index, key_index = map(int, pair)
|
||||
formkey_list[formkey_index] = key_text[key_index]
|
||||
formkey = ''.join(formkey_list)
|
||||
|
||||
return formkey
|
||||
|
||||
|
||||
class Choice(BaseModel):
|
||||
text: str
|
||||
index: int
|
||||
logprobs: Any
|
||||
finish_reason: str
|
||||
|
||||
|
||||
class Usage(BaseModel):
|
||||
prompt_tokens: int
|
||||
completion_tokens: int
|
||||
total_tokens: int
|
||||
|
||||
|
||||
class PoeResponse(BaseModel):
|
||||
id: int
|
||||
object: str
|
||||
created: int
|
||||
model: str
|
||||
choices: List[Choice]
|
||||
usage: Usage
|
||||
text: str
|
||||
|
||||
|
||||
class ModelResponse:
|
||||
def __init__(self, json_response: dict) -> None:
|
||||
self.id = json_response['data']['poeBotCreate']['bot']['id']
|
||||
self.name = json_response['data']['poeBotCreate']['bot']['displayName']
|
||||
self.limit = json_response['data']['poeBotCreate']['bot']['messageLimit']['dailyLimit']
|
||||
self.deleted = json_response['data']['poeBotCreate']['bot']['deletionState']
|
||||
|
||||
|
||||
class Model:
|
||||
@staticmethod
|
||||
def create(
|
||||
token: str,
|
||||
model: str = 'gpt-3.5-turbo', # claude-instant
|
||||
system_prompt: str = 'You are ChatGPT a large language model developed by Openai. Answer as consisely as possible',
|
||||
description: str = 'gpt-3.5 language model from openai, skidded by poe.com',
|
||||
handle: str = None,
|
||||
) -> ModelResponse:
|
||||
if not handle:
|
||||
handle = f'gptx{randint(1111111, 9999999)}'
|
||||
|
||||
client = Session()
|
||||
client.cookies['p-b'] = token
|
||||
|
||||
formkey = extract_formkey(client.get('https://poe.com').text)
|
||||
settings = client.get('https://poe.com/api/settings').json()
|
||||
|
||||
client.headers = {
|
||||
'host': 'poe.com',
|
||||
'origin': 'https://poe.com',
|
||||
'referer': 'https://poe.com/',
|
||||
'poe-formkey': formkey,
|
||||
'poe-tchannel': settings['tchannelData']['channel'],
|
||||
'user-agent': UserAgent().random,
|
||||
'connection': 'keep-alive',
|
||||
'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-platform': '"macOS"',
|
||||
'content-type': 'application/json',
|
||||
'sec-fetch-site': 'same-origin',
|
||||
'sec-fetch-mode': 'cors',
|
||||
'sec-fetch-dest': 'empty',
|
||||
'accept': '*/*',
|
||||
'accept-encoding': 'gzip, deflate, br',
|
||||
'accept-language': 'en-GB,en-US;q=0.9,en;q=0.8',
|
||||
}
|
||||
|
||||
payload = dumps(
|
||||
separators=(',', ':'),
|
||||
obj={
|
||||
'queryName': 'CreateBotMain_poeBotCreate_Mutation',
|
||||
'variables': {
|
||||
'model': MODELS[model],
|
||||
'handle': handle,
|
||||
'prompt': system_prompt,
|
||||
'isPromptPublic': True,
|
||||
'introduction': '',
|
||||
'description': description,
|
||||
'profilePictureUrl': 'https://qph.fs.quoracdn.net/main-qimg-24e0b480dcd946e1cc6728802c5128b6',
|
||||
'apiUrl': None,
|
||||
'apiKey': ''.join(choices(ascii_letters + digits, k=32)),
|
||||
'isApiBot': False,
|
||||
'hasLinkification': False,
|
||||
'hasMarkdownRendering': False,
|
||||
'hasSuggestedReplies': False,
|
||||
'isPrivateBot': False,
|
||||
},
|
||||
'query': 'mutation CreateBotMain_poeBotCreate_Mutation(\n $model: String!\n $handle: String!\n $prompt: String!\n $isPromptPublic: Boolean!\n $introduction: String!\n $description: String!\n $profilePictureUrl: String\n $apiUrl: String\n $apiKey: String\n $isApiBot: Boolean\n $hasLinkification: Boolean\n $hasMarkdownRendering: Boolean\n $hasSuggestedReplies: Boolean\n $isPrivateBot: Boolean\n) {\n poeBotCreate(model: $model, handle: $handle, promptPlaintext: $prompt, isPromptPublic: $isPromptPublic, introduction: $introduction, description: $description, profilePicture: $profilePictureUrl, apiUrl: $apiUrl, apiKey: $apiKey, isApiBot: $isApiBot, hasLinkification: $hasLinkification, hasMarkdownRendering: $hasMarkdownRendering, hasSuggestedReplies: $hasSuggestedReplies, isPrivateBot: $isPrivateBot) {\n status\n bot {\n id\n ...BotHeader_bot\n }\n }\n}\n\nfragment BotHeader_bot on Bot {\n displayName\n messageLimit {\n dailyLimit\n }\n ...BotImage_bot\n ...BotLink_bot\n ...IdAnnotation_node\n ...botHelpers_useViewerCanAccessPrivateBot\n ...botHelpers_useDeletion_bot\n}\n\nfragment BotImage_bot on Bot {\n displayName\n ...botHelpers_useDeletion_bot\n ...BotImage_useProfileImage_bot\n}\n\nfragment BotImage_useProfileImage_bot on Bot {\n image {\n __typename\n ... on LocalBotImage {\n localName\n }\n ... on UrlBotImage {\n url\n }\n }\n ...botHelpers_useDeletion_bot\n}\n\nfragment BotLink_bot on Bot {\n displayName\n}\n\nfragment IdAnnotation_node on Node {\n __isNode: __typename\n id\n}\n\nfragment botHelpers_useDeletion_bot on Bot {\n deletionState\n}\n\nfragment botHelpers_useViewerCanAccessPrivateBot on Bot {\n isPrivateBot\n viewerIsCreator\n}\n',
|
||||
},
|
||||
)
|
||||
|
||||
base_string = payload + client.headers['poe-formkey'] + 'WpuLMiXEKKE98j56k'
|
||||
client.headers['poe-tag-id'] = md5(base_string.encode()).hexdigest()
|
||||
|
||||
response = client.post('https://poe.com/api/gql_POST', data=payload)
|
||||
|
||||
if 'success' not in response.text:
|
||||
raise Exception(
|
||||
'''
|
||||
Bot creation Failed
|
||||
!! Important !!
|
||||
Bot creation was not enabled on this account
|
||||
please use: quora.Account.create with enable_bot_creation set to True
|
||||
'''
|
||||
)
|
||||
|
||||
return ModelResponse(response.json())
|
||||
|
||||
|
||||
class Account:
|
||||
@staticmethod
|
||||
def create(
|
||||
proxy: Optional[str] = None,
|
||||
logging: bool = False,
|
||||
enable_bot_creation: bool = False,
|
||||
):
|
||||
client = TLS(client_identifier='chrome110')
|
||||
client.proxies = {'http': f'http://{proxy}', 'https': f'http://{proxy}'} if proxy else {}
|
||||
|
||||
mail_client = Emailnator()
|
||||
mail_address = mail_client.get_mail()
|
||||
|
||||
if logging:
|
||||
print('email', mail_address)
|
||||
|
||||
client.headers = {
|
||||
'authority': 'poe.com',
|
||||
'accept': '*/*',
|
||||
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
|
||||
'content-type': 'application/json',
|
||||
'origin': 'https://poe.com',
|
||||
'poe-tag-id': 'null',
|
||||
'referer': 'https://poe.com/login',
|
||||
'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-platform': '"macOS"',
|
||||
'sec-fetch-dest': 'empty',
|
||||
'sec-fetch-mode': 'cors',
|
||||
'sec-fetch-site': 'same-origin',
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
|
||||
'poe-formkey': extract_formkey(client.get('https://poe.com/login').text),
|
||||
'poe-tchannel': client.get('https://poe.com/api/settings').json()['tchannelData']['channel'],
|
||||
}
|
||||
|
||||
token = reCaptchaV3(
|
||||
'https://www.recaptcha.net/recaptcha/enterprise/anchor?ar=1&k=6LflhEElAAAAAI_ewVwRWI9hsyV4mbZnYAslSvlG&co=aHR0cHM6Ly9wb2UuY29tOjQ0Mw..&hl=en&v=4PnKmGB9wRHh1i04o7YUICeI&size=invisible&cb=bi6ivxoskyal'
|
||||
)
|
||||
# token = solver.recaptcha(sitekey='6LflhEElAAAAAI_ewVwRWI9hsyV4mbZnYAslSvlG',
|
||||
# url = 'https://poe.com/login?redirect_url=%2F',
|
||||
# version = 'v3',
|
||||
# enterprise = 1,
|
||||
# invisible = 1,
|
||||
# action = 'login',)['code']
|
||||
|
||||
payload = dumps(
|
||||
separators=(',', ':'),
|
||||
obj={
|
||||
'queryName': 'MainSignupLoginSection_sendVerificationCodeMutation_Mutation',
|
||||
'variables': {
|
||||
'emailAddress': mail_address,
|
||||
'phoneNumber': None,
|
||||
'recaptchaToken': token,
|
||||
},
|
||||
'query': 'mutation MainSignupLoginSection_sendVerificationCodeMutation_Mutation(\n $emailAddress: String\n $phoneNumber: String\n $recaptchaToken: String\n) {\n sendVerificationCode(verificationReason: login, emailAddress: $emailAddress, phoneNumber: $phoneNumber, recaptchaToken: $recaptchaToken) {\n status\n errorMessage\n }\n}\n',
|
||||
},
|
||||
)
|
||||
|
||||
base_string = payload + client.headers['poe-formkey'] + 'WpuLMiXEKKE98j56k'
|
||||
client.headers['poe-tag-id'] = md5(base_string.encode()).hexdigest()
|
||||
|
||||
print(dumps(client.headers, indent=4))
|
||||
|
||||
response = client.post('https://poe.com/api/gql_POST', data=payload)
|
||||
|
||||
if 'automated_request_detected' in response.text:
|
||||
print('please try using a proxy / wait for fix')
|
||||
|
||||
if 'Bad Request' in response.text:
|
||||
if logging:
|
||||
print('bad request, retrying...', response.json())
|
||||
quit()
|
||||
|
||||
if logging:
|
||||
print('send_code', response.json())
|
||||
|
||||
mail_content = mail_client.get_message()
|
||||
mail_token = findall(r';">(\d{6,7})</div>', mail_content)[0]
|
||||
|
||||
if logging:
|
||||
print('code', mail_token)
|
||||
|
||||
payload = dumps(
|
||||
separators=(',', ':'),
|
||||
obj={
|
||||
'queryName': 'SignupOrLoginWithCodeSection_signupWithVerificationCodeMutation_Mutation',
|
||||
'variables': {
|
||||
'verificationCode': str(mail_token),
|
||||
'emailAddress': mail_address,
|
||||
'phoneNumber': None,
|
||||
},
|
||||
'query': 'mutation SignupOrLoginWithCodeSection_signupWithVerificationCodeMutation_Mutation(\n $verificationCode: String!\n $emailAddress: String\n $phoneNumber: String\n) {\n signupWithVerificationCode(verificationCode: $verificationCode, emailAddress: $emailAddress, phoneNumber: $phoneNumber) {\n status\n errorMessage\n }\n}\n',
|
||||
},
|
||||
)
|
||||
|
||||
base_string = payload + client.headers['poe-formkey'] + 'WpuLMiXEKKE98j56k'
|
||||
client.headers['poe-tag-id'] = md5(base_string.encode()).hexdigest()
|
||||
|
||||
response = client.post('https://poe.com/api/gql_POST', data=payload)
|
||||
if logging:
|
||||
print('verify_code', response.json())
|
||||
|
||||
def get(self):
|
||||
cookies = open(Path(__file__).resolve().parent / 'cookies.txt', 'r').read().splitlines()
|
||||
return choice(cookies)
|
||||
|
||||
|
||||
class StreamingCompletion:
|
||||
@staticmethod
|
||||
def create(
|
||||
model: str = 'gpt-4',
|
||||
custom_model: bool = None,
|
||||
prompt: str = 'hello world',
|
||||
token: str = '',
|
||||
proxy: Optional[str] = None
|
||||
) -> Generator[PoeResponse, None, None]:
|
||||
_model = MODELS[model] if not custom_model else custom_model
|
||||
|
||||
proxies = { 'http': 'http://' + proxy, 'https': 'http://' + proxy } if proxy else False
|
||||
client = PoeClient(token)
|
||||
client.proxy = proxies
|
||||
|
||||
for chunk in client.send_message(_model, prompt):
|
||||
yield PoeResponse(
|
||||
**{
|
||||
'id': chunk['messageId'],
|
||||
'object': 'text_completion',
|
||||
'created': chunk['creationTime'],
|
||||
'model': _model,
|
||||
'text': chunk['text_new'],
|
||||
'choices': [
|
||||
{
|
||||
'text': chunk['text_new'],
|
||||
'index': 0,
|
||||
'logprobs': None,
|
||||
'finish_reason': 'stop',
|
||||
}
|
||||
],
|
||||
'usage': {
|
||||
'prompt_tokens': len(prompt),
|
||||
'completion_tokens': len(chunk['text_new']),
|
||||
'total_tokens': len(prompt) + len(chunk['text_new']),
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class Completion:
|
||||
@staticmethod
|
||||
def create(
|
||||
model: str = 'gpt-4',
|
||||
custom_model: str = None,
|
||||
prompt: str = 'hello world',
|
||||
token: str = '',
|
||||
proxy: Optional[str] = None
|
||||
) -> PoeResponse:
|
||||
_model = MODELS[model] if not custom_model else custom_model
|
||||
|
||||
proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else False
|
||||
client = PoeClient(token)
|
||||
client.proxy = proxies
|
||||
|
||||
chunk = None
|
||||
for response in client.send_message(_model, prompt):
|
||||
chunk = response
|
||||
|
||||
return PoeResponse(
|
||||
**{
|
||||
'id': chunk['messageId'],
|
||||
'object': 'text_completion',
|
||||
'created': chunk['creationTime'],
|
||||
'model': _model,
|
||||
'text': chunk['text'],
|
||||
'choices': [
|
||||
{
|
||||
'text': chunk['text'],
|
||||
'index': 0,
|
||||
'logprobs': None,
|
||||
'finish_reason': 'stop',
|
||||
}
|
||||
],
|
||||
'usage': {
|
||||
'prompt_tokens': len(prompt),
|
||||
'completion_tokens': len(chunk['text']),
|
||||
'total_tokens': len(prompt) + len(chunk['text']),
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class Poe:
|
||||
def __init__(
|
||||
self,
|
||||
model: str = 'ChatGPT',
|
||||
driver: str = 'firefox',
|
||||
download_driver: bool = False,
|
||||
driver_path: Optional[str] = None,
|
||||
cookie_path: str = './quora/cookie.json',
|
||||
):
|
||||
# validating the model
|
||||
if model and model not in MODELS:
|
||||
raise RuntimeError('Sorry, the model you provided does not exist. Please check and try again.')
|
||||
self.model = MODELS[model]
|
||||
self.cookie_path = cookie_path
|
||||
self.cookie = self.__load_cookie(driver, driver_path=driver_path)
|
||||
self.client = PoeClient(self.cookie)
|
||||
|
||||
def __load_cookie(self, driver: str, driver_path: Optional[str] = None) -> str:
|
||||
if (cookie_file := Path(self.cookie_path)).exists():
|
||||
with cookie_file.open() as fp:
|
||||
cookie = json.load(fp)
|
||||
if datetime.fromtimestamp(cookie['expiry']) < datetime.now():
|
||||
cookie = self.__register_and_get_cookie(driver, driver_path=driver_path)
|
||||
else:
|
||||
print('Loading the cookie from file')
|
||||
else:
|
||||
cookie = self.__register_and_get_cookie(driver, driver_path=driver_path)
|
||||
|
||||
return unquote(cookie['value'])
|
||||
|
||||
def __register_and_get_cookie(self, driver: str, driver_path: Optional[str] = None) -> dict:
|
||||
mail_client = Emailnator()
|
||||
mail_address = mail_client.get_mail()
|
||||
|
||||
driver = self.__resolve_driver(driver, driver_path=driver_path)
|
||||
driver.get("https://www.poe.com")
|
||||
|
||||
# clicking use email button
|
||||
driver.find_element(By.XPATH, '//button[contains(text(), "Use email")]').click()
|
||||
|
||||
email = WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.XPATH, '//input[@type="email"]')))
|
||||
email.send_keys(mail_address)
|
||||
driver.find_element(By.XPATH, '//button[text()="Go"]').click()
|
||||
|
||||
code = findall(r';">(\d{6,7})</div>', mail_client.get_message())[0]
|
||||
print(code)
|
||||
|
||||
verification_code = WebDriverWait(driver, 30).until(
|
||||
EC.presence_of_element_located((By.XPATH, '//input[@placeholder="Code"]'))
|
||||
)
|
||||
verification_code.send_keys(code)
|
||||
verify_button = EC.presence_of_element_located((By.XPATH, '//button[text()="Verify"]'))
|
||||
login_button = EC.presence_of_element_located((By.XPATH, '//button[text()="Log In"]'))
|
||||
|
||||
WebDriverWait(driver, 30).until(EC.any_of(verify_button, login_button)).click()
|
||||
|
||||
cookie = driver.get_cookie('p-b')
|
||||
|
||||
with open(self.cookie_path, 'w') as fw:
|
||||
json.dump(cookie, fw)
|
||||
|
||||
driver.close()
|
||||
return cookie
|
||||
|
||||
@staticmethod
|
||||
def __resolve_driver(driver: str, driver_path: Optional[str] = None) -> Union[Firefox, Chrome]:
|
||||
options = FirefoxOptions() if driver == 'firefox' else ChromeOptions()
|
||||
options.add_argument('-headless')
|
||||
|
||||
if driver_path:
|
||||
options.binary_location = driver_path
|
||||
try:
|
||||
return Firefox(options=options) if driver == 'firefox' else Chrome(options=options)
|
||||
except Exception:
|
||||
raise Exception(SELENIUM_WEB_DRIVER_ERROR_MSG)
|
||||
|
||||
def chat(self, message: str, model: Optional[str] = None) -> str:
|
||||
if model and model not in MODELS:
|
||||
raise RuntimeError('Sorry, the model you provided does not exist. Please check and try again.')
|
||||
model = MODELS[model] if model else self.model
|
||||
response = None
|
||||
for chunk in self.client.send_message(model, message):
|
||||
response = chunk['text']
|
||||
return response
|
||||
|
||||
def create_bot(
|
||||
self,
|
||||
name: str,
|
||||
/,
|
||||
prompt: str = '',
|
||||
base_model: str = 'ChatGPT',
|
||||
description: str = '',
|
||||
) -> None:
|
||||
if base_model not in MODELS:
|
||||
raise RuntimeError('Sorry, the base_model you provided does not exist. Please check and try again.')
|
||||
|
||||
response = self.client.create_bot(
|
||||
handle=name,
|
||||
prompt=prompt,
|
||||
base_model=MODELS[base_model],
|
||||
description=description,
|
||||
)
|
||||
print(f'Successfully created bot with name: {response["bot"]["displayName"]}')
|
||||
|
||||
def list_bots(self) -> list:
|
||||
return list(self.client.bot_names.values())
|
||||
@@ -18,20 +18,21 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
import requests
|
||||
import re
|
||||
import json
|
||||
import random
|
||||
import logging
|
||||
import time
|
||||
import queue
|
||||
import threading
|
||||
import traceback
|
||||
import hashlib
|
||||
import websocket
|
||||
import json
|
||||
import logging
|
||||
import queue
|
||||
import random
|
||||
import re
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
from pathlib import Path
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import requests
|
||||
import requests.adapters
|
||||
import websocket
|
||||
|
||||
parent_path = Path(__file__).resolve().parent
|
||||
queries_path = parent_path / "graphql"
|
||||
@@ -52,10 +53,7 @@ def load_queries():
|
||||
|
||||
|
||||
def generate_payload(query_name, variables):
|
||||
return {
|
||||
"query": queries[query_name],
|
||||
"variables": variables
|
||||
}
|
||||
return {"query": queries[query_name], "variables": variables}
|
||||
|
||||
|
||||
def request_with_retries(method, *args, **kwargs):
|
||||
@@ -66,7 +64,8 @@ def request_with_retries(method, *args, **kwargs):
|
||||
if r.status_code == 200:
|
||||
return r
|
||||
logger.warn(
|
||||
f"Server returned a status code of {r.status_code} while downloading {url}. Retrying ({i+1}/{attempts})...")
|
||||
f"Server returned a status code of {r.status_code} while downloading {url}. Retrying ({i + 1}/{attempts})..."
|
||||
)
|
||||
|
||||
raise RuntimeError(f"Failed to download {url} too many times.")
|
||||
|
||||
@@ -80,12 +79,12 @@ class Client:
|
||||
def __init__(self, token, proxy=None):
|
||||
self.proxy = proxy
|
||||
self.session = requests.Session()
|
||||
self.adapter = requests.adapters.HTTPAdapter(pool_connections=100, pool_maxsize=100)
|
||||
self.session.mount("http://", self.adapter)
|
||||
self.session.mount("https://", self.adapter)
|
||||
|
||||
if proxy:
|
||||
self.session.proxies = {
|
||||
"http": self.proxy,
|
||||
"https": self.proxy
|
||||
}
|
||||
self.session.proxies = {"http": self.proxy, "https": self.proxy}
|
||||
logger.info(f"Proxy enabled: {self.proxy}")
|
||||
|
||||
self.active_messages = {}
|
||||
@@ -117,11 +116,11 @@ class Client:
|
||||
self.subscribe()
|
||||
|
||||
def extract_formkey(self, html):
|
||||
script_regex = r'<script>if\(.+\)throw new Error;(.+)</script>'
|
||||
script_regex = r"<script>if\(.+\)throw new Error;(.+)</script>"
|
||||
script_text = re.search(script_regex, html).group(1)
|
||||
key_regex = r'var .="([0-9a-f]+)",'
|
||||
key_text = re.search(key_regex, script_text).group(1)
|
||||
cipher_regex = r'.\[(\d+)\]=.\[(\d+)\]'
|
||||
cipher_regex = r".\[(\d+)\]=.\[(\d+)\]"
|
||||
cipher_pairs = re.findall(cipher_regex, script_text)
|
||||
|
||||
formkey_list = [""] * len(cipher_pairs)
|
||||
@@ -143,12 +142,12 @@ class Client:
|
||||
if overwrite_vars:
|
||||
self.formkey = self.extract_formkey(r.text)
|
||||
self.viewer = next_data["props"]["pageProps"]["payload"]["viewer"]
|
||||
self.next_data = next_data
|
||||
|
||||
return next_data
|
||||
|
||||
def get_bot(self, display_name):
|
||||
url = f'https://poe.com/_next/data/{self.next_data["buildId"]}/{display_name}.json'
|
||||
logger.info("Downloading "+url)
|
||||
|
||||
r = request_with_retries(self.session.get, url)
|
||||
|
||||
@@ -156,8 +155,9 @@ class Client:
|
||||
return chat_data
|
||||
|
||||
def get_bots(self, download_next_data=True):
|
||||
logger.info("Downloading all bots...")
|
||||
if download_next_data:
|
||||
next_data = self.get_next_data()
|
||||
next_data = self.get_next_data(overwrite_vars=True)
|
||||
else:
|
||||
next_data = self.next_data
|
||||
|
||||
@@ -165,11 +165,22 @@ class Client:
|
||||
raise RuntimeError("Invalid token or no bots are available.")
|
||||
bot_list = self.viewer["availableBots"]
|
||||
|
||||
threads = []
|
||||
bots = {}
|
||||
for bot in bot_list:
|
||||
|
||||
def get_bot_thread(bot):
|
||||
chat_data = self.get_bot(bot["displayName"])
|
||||
bots[chat_data["defaultBotObject"]["nickname"]] = chat_data
|
||||
|
||||
for bot in bot_list:
|
||||
thread = threading.Thread(target=get_bot_thread, args=(bot,), daemon=True)
|
||||
threads.append(thread)
|
||||
|
||||
for thread in threads:
|
||||
thread.start()
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
|
||||
self.bots = bots
|
||||
self.bot_names = self.get_bot_names()
|
||||
return bots
|
||||
@@ -181,6 +192,10 @@ class Client:
|
||||
bot_names[bot_nickname] = bot_obj["displayName"]
|
||||
return bot_names
|
||||
|
||||
def get_remaining_messages(self, chatbot):
|
||||
chat_data = self.get_bot(self.bot_names[chatbot])
|
||||
return chat_data["defaultBotObject"]["messageLimit"]["numMessagesRemaining"]
|
||||
|
||||
def get_channel_data(self, channel=None):
|
||||
logger.info("Downloading channel data...")
|
||||
r = request_with_retries(self.session.get, self.settings_url)
|
||||
@@ -192,50 +207,50 @@ class Client:
|
||||
if channel is None:
|
||||
channel = self.channel
|
||||
query = f'?min_seq={channel["minSeq"]}&channel={channel["channel"]}&hash={channel["channelHash"]}'
|
||||
return f'wss://{self.ws_domain}.tch.{channel["baseHost"]}/up/{channel["boxName"]}/updates'+query
|
||||
return f'wss://{self.ws_domain}.tch.{channel["baseHost"]}/up/{channel["boxName"]}/updates' + query
|
||||
|
||||
def send_query(self, query_name, variables):
|
||||
for i in range(20):
|
||||
json_data = generate_payload(query_name, variables)
|
||||
payload = json.dumps(json_data, separators=(",", ":"))
|
||||
|
||||
base_string = payload + \
|
||||
self.gql_headers["poe-formkey"] + "WpuLMiXEKKE98j56k"
|
||||
base_string = payload + self.gql_headers["poe-formkey"] + "WpuLMiXEKKE98j56k"
|
||||
|
||||
headers = {
|
||||
"content-type": "application/json",
|
||||
"poe-tag-id": hashlib.md5(base_string.encode()).hexdigest()
|
||||
"poe-tag-id": hashlib.md5(base_string.encode()).hexdigest(),
|
||||
}
|
||||
headers = {**self.gql_headers, **headers}
|
||||
|
||||
r = request_with_retries(
|
||||
self.session.post, self.gql_url, data=payload, headers=headers)
|
||||
r = request_with_retries(self.session.post, self.gql_url, data=payload, headers=headers)
|
||||
|
||||
data = r.json()
|
||||
if data["data"] == None:
|
||||
logger.warn(
|
||||
f'{query_name} returned an error: {data["errors"][0]["message"]} | Retrying ({i+1}/20)')
|
||||
if data["data"] is None:
|
||||
logger.warn(f'{query_name} returned an error: {data["errors"][0]["message"]} | Retrying ({i + 1}/20)')
|
||||
time.sleep(2)
|
||||
continue
|
||||
|
||||
return r.json()
|
||||
|
||||
raise RuntimeError(f'{query_name} failed too many times.')
|
||||
raise RuntimeError(f"{query_name} failed too many times.")
|
||||
|
||||
def subscribe(self):
|
||||
logger.info("Subscribing to mutations")
|
||||
result = self.send_query("SubscriptionsMutation", {
|
||||
"subscriptions": [
|
||||
{
|
||||
"subscriptionName": "messageAdded",
|
||||
"query": queries["MessageAddedSubscription"]
|
||||
},
|
||||
{
|
||||
"subscriptionName": "viewerStateUpdated",
|
||||
"query": queries["ViewerStateUpdatedSubscription"]
|
||||
}
|
||||
]
|
||||
})
|
||||
result = self.send_query(
|
||||
"SubscriptionsMutation",
|
||||
{
|
||||
"subscriptions": [
|
||||
{
|
||||
"subscriptionName": "messageAdded",
|
||||
"query": queries["MessageAddedSubscription"],
|
||||
},
|
||||
{
|
||||
"subscriptionName": "viewerStateUpdated",
|
||||
"query": queries["ViewerStateUpdatedSubscription"],
|
||||
},
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
def ws_run_thread(self):
|
||||
kwargs = {}
|
||||
@@ -244,7 +259,7 @@ class Client:
|
||||
kwargs = {
|
||||
"proxy_type": proxy_parsed.scheme,
|
||||
"http_proxy_host": proxy_parsed.hostname,
|
||||
"http_proxy_port": proxy_parsed.port
|
||||
"http_proxy_port": proxy_parsed.port,
|
||||
}
|
||||
|
||||
self.ws.run_forever(**kwargs)
|
||||
@@ -257,7 +272,7 @@ class Client:
|
||||
on_message=self.on_message,
|
||||
on_open=self.on_ws_connect,
|
||||
on_error=self.on_ws_error,
|
||||
on_close=self.on_ws_close
|
||||
on_close=self.on_ws_close,
|
||||
)
|
||||
t = threading.Thread(target=self.ws_run_thread, daemon=True)
|
||||
t.start()
|
||||
@@ -274,8 +289,7 @@ class Client:
|
||||
|
||||
def on_ws_close(self, ws, close_status_code, close_message):
|
||||
self.ws_connected = False
|
||||
logger.warn(
|
||||
f"Websocket closed with status {close_status_code}: {close_message}")
|
||||
logger.warn(f"Websocket closed with status {close_status_code}: {close_message}")
|
||||
|
||||
def on_ws_error(self, ws, error):
|
||||
self.disconnect_ws()
|
||||
@@ -302,7 +316,7 @@ class Client:
|
||||
return
|
||||
|
||||
# indicate that the response id is tied to the human message id
|
||||
elif key != "pending" and value == None and message["state"] != "complete":
|
||||
elif key != "pending" and value is None and message["state"] != "complete":
|
||||
self.active_messages[key] = message["messageId"]
|
||||
self.message_queues[key].put(message)
|
||||
return
|
||||
@@ -328,13 +342,16 @@ class Client:
|
||||
self.setup_connection()
|
||||
self.connect_ws()
|
||||
|
||||
message_data = self.send_query("SendMessageMutation", {
|
||||
"bot": chatbot,
|
||||
"query": message,
|
||||
"chatId": self.bots[chatbot]["chatId"],
|
||||
"source": None,
|
||||
"withChatBreak": with_chat_break
|
||||
})
|
||||
message_data = self.send_query(
|
||||
"SendMessageMutation",
|
||||
{
|
||||
"bot": chatbot,
|
||||
"query": message,
|
||||
"chatId": self.bots[chatbot]["chatId"],
|
||||
"source": None,
|
||||
"withChatBreak": with_chat_break,
|
||||
},
|
||||
)
|
||||
del self.active_messages["pending"]
|
||||
|
||||
if not message_data["data"]["messageEdgeCreate"]["message"]:
|
||||
@@ -343,8 +360,7 @@ class Client:
|
||||
human_message = message_data["data"]["messageEdgeCreate"]["message"]
|
||||
human_message_id = human_message["node"]["messageId"]
|
||||
except TypeError:
|
||||
raise RuntimeError(
|
||||
f"An unknown error occured. Raw response data: {message_data}")
|
||||
raise RuntimeError(f"An unknown error occurred. Raw response data: {message_data}")
|
||||
|
||||
# indicate that the current message is waiting for a response
|
||||
self.active_messages[human_message_id] = None
|
||||
@@ -354,8 +370,7 @@ class Client:
|
||||
message_id = None
|
||||
while True:
|
||||
try:
|
||||
message = self.message_queues[human_message_id].get(
|
||||
timeout=timeout)
|
||||
message = self.message_queues[human_message_id].get(timeout=timeout)
|
||||
except queue.Empty:
|
||||
del self.active_messages[human_message_id]
|
||||
del self.message_queues[human_message_id]
|
||||
@@ -369,7 +384,7 @@ class Client:
|
||||
continue
|
||||
|
||||
# update info about response
|
||||
message["text_new"] = message["text"][len(last_text):]
|
||||
message["text_new"] = message["text"][len(last_text) :]
|
||||
last_text = message["text"]
|
||||
message_id = message["messageId"]
|
||||
|
||||
@@ -380,16 +395,14 @@ class Client:
|
||||
|
||||
def send_chat_break(self, chatbot):
|
||||
logger.info(f"Sending chat break to {chatbot}")
|
||||
result = self.send_query("AddMessageBreakMutation", {
|
||||
"chatId": self.bots[chatbot]["chatId"]
|
||||
})
|
||||
result = self.send_query("AddMessageBreakMutation", {"chatId": self.bots[chatbot]["chatId"]})
|
||||
return result["data"]["messageBreakCreate"]["message"]
|
||||
|
||||
def get_message_history(self, chatbot, count=25, cursor=None):
|
||||
logger.info(f"Downloading {count} messages from {chatbot}")
|
||||
|
||||
messages = []
|
||||
if cursor == None:
|
||||
if cursor is None:
|
||||
chat_data = self.get_bot(self.bot_names[chatbot])
|
||||
if not chat_data["messagesConnection"]["edges"]:
|
||||
return []
|
||||
@@ -399,23 +412,20 @@ class Client:
|
||||
|
||||
cursor = str(cursor)
|
||||
if count > 50:
|
||||
messages = self.get_message_history(
|
||||
chatbot, count=50, cursor=cursor) + messages
|
||||
messages = self.get_message_history(chatbot, count=50, cursor=cursor) + messages
|
||||
while count > 0:
|
||||
count -= 50
|
||||
new_cursor = messages[0]["cursor"]
|
||||
new_messages = self.get_message_history(
|
||||
chatbot, min(50, count), cursor=new_cursor)
|
||||
new_messages = self.get_message_history(chatbot, min(50, count), cursor=new_cursor)
|
||||
messages = new_messages + messages
|
||||
return messages
|
||||
elif count <= 0:
|
||||
return messages
|
||||
|
||||
result = self.send_query("ChatListPaginationQuery", {
|
||||
"count": count,
|
||||
"cursor": cursor,
|
||||
"id": self.bots[chatbot]["id"]
|
||||
})
|
||||
result = self.send_query(
|
||||
"ChatListPaginationQuery",
|
||||
{"count": count, "cursor": cursor, "id": self.bots[chatbot]["id"]},
|
||||
)
|
||||
query_messages = result["data"]["node"]["messagesConnection"]["edges"]
|
||||
messages = query_messages + messages
|
||||
return messages
|
||||
@@ -425,9 +435,7 @@ class Client:
|
||||
if not type(message_ids) is list:
|
||||
message_ids = [int(message_ids)]
|
||||
|
||||
result = self.send_query("DeleteMessageMutation", {
|
||||
"messageIds": message_ids
|
||||
})
|
||||
result = self.send_query("DeleteMessageMutation", {"messageIds": message_ids})
|
||||
|
||||
def purge_conversation(self, chatbot, count=-1):
|
||||
logger.info(f"Purging messages from {chatbot}")
|
||||
@@ -447,5 +455,91 @@ class Client:
|
||||
last_messages = self.get_message_history(chatbot, count=50)[::-1]
|
||||
logger.info(f"No more messages left to delete.")
|
||||
|
||||
def create_bot(
|
||||
self,
|
||||
handle,
|
||||
prompt="",
|
||||
base_model="chinchilla",
|
||||
description="",
|
||||
intro_message="",
|
||||
api_key=None,
|
||||
api_bot=False,
|
||||
api_url=None,
|
||||
prompt_public=True,
|
||||
pfp_url=None,
|
||||
linkification=False,
|
||||
markdown_rendering=True,
|
||||
suggested_replies=False,
|
||||
private=False,
|
||||
):
|
||||
result = self.send_query(
|
||||
"PoeBotCreateMutation",
|
||||
{
|
||||
"model": base_model,
|
||||
"handle": handle,
|
||||
"prompt": prompt,
|
||||
"isPromptPublic": prompt_public,
|
||||
"introduction": intro_message,
|
||||
"description": description,
|
||||
"profilePictureUrl": pfp_url,
|
||||
"apiUrl": api_url,
|
||||
"apiKey": api_key,
|
||||
"isApiBot": api_bot,
|
||||
"hasLinkification": linkification,
|
||||
"hasMarkdownRendering": markdown_rendering,
|
||||
"hasSuggestedReplies": suggested_replies,
|
||||
"isPrivateBot": private,
|
||||
},
|
||||
)
|
||||
|
||||
data = result["data"]["poeBotCreate"]
|
||||
if data["status"] != "success":
|
||||
raise RuntimeError(f"Poe returned an error while trying to create a bot: {data['status']}")
|
||||
self.get_bots()
|
||||
return data
|
||||
|
||||
def edit_bot(
|
||||
self,
|
||||
bot_id,
|
||||
handle,
|
||||
prompt="",
|
||||
base_model="chinchilla",
|
||||
description="",
|
||||
intro_message="",
|
||||
api_key=None,
|
||||
api_url=None,
|
||||
private=False,
|
||||
prompt_public=True,
|
||||
pfp_url=None,
|
||||
linkification=False,
|
||||
markdown_rendering=True,
|
||||
suggested_replies=False,
|
||||
):
|
||||
result = self.send_query(
|
||||
"PoeBotEditMutation",
|
||||
{
|
||||
"baseBot": base_model,
|
||||
"botId": bot_id,
|
||||
"handle": handle,
|
||||
"prompt": prompt,
|
||||
"isPromptPublic": prompt_public,
|
||||
"introduction": intro_message,
|
||||
"description": description,
|
||||
"profilePictureUrl": pfp_url,
|
||||
"apiUrl": api_url,
|
||||
"apiKey": api_key,
|
||||
"hasLinkification": linkification,
|
||||
"hasMarkdownRendering": markdown_rendering,
|
||||
"hasSuggestedReplies": suggested_replies,
|
||||
"isPrivateBot": private,
|
||||
},
|
||||
)
|
||||
|
||||
data = result["data"]["poeBotEdit"]
|
||||
if data["status"] != "success":
|
||||
raise RuntimeError(f"Poe returned an error while trying to edit a bot: {data['status']}")
|
||||
self.get_bots()
|
||||
return data
|
||||
|
||||
|
||||
load_queries()
|
||||
37
gpt4free/quora/backup-mail.py
Normal file
37
gpt4free/quora/backup-mail.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from requests import Session
|
||||
from time import sleep
|
||||
from json import loads
|
||||
from re import findall
|
||||
class Mail:
|
||||
def __init__(self) -> None:
|
||||
self.client = Session()
|
||||
self.client.post("https://etempmail.com/")
|
||||
self.cookies = {'acceptcookie': 'true'}
|
||||
self.cookies["ci_session"] = self.client.cookies.get_dict()["ci_session"]
|
||||
self.email = None
|
||||
def get_mail(self):
|
||||
respone=self.client.post("https://etempmail.com/getEmailAddress")
|
||||
#cookies
|
||||
self.cookies["lisansimo"] = eval(respone.text)["recover_key"]
|
||||
self.email = eval(respone.text)["address"]
|
||||
return self.email
|
||||
def get_message(self):
|
||||
print("Waiting for message...")
|
||||
while True:
|
||||
sleep(5)
|
||||
respone=self.client.post("https://etempmail.com/getInbox")
|
||||
mail_token=loads(respone.text)
|
||||
print(self.client.cookies.get_dict())
|
||||
if len(mail_token) == 1:
|
||||
break
|
||||
|
||||
params = {'id': '1',}
|
||||
self.mail_context = self.client.post("https://etempmail.com/getInbox",params=params)
|
||||
self.mail_context = eval(self.mail_context.text)[0]["body"]
|
||||
return self.mail_context
|
||||
#,cookies=self.cookies
|
||||
def get_verification_code(self):
|
||||
message = self.mail_context
|
||||
code = findall(r';">(\d{6,7})</div>', message)[0]
|
||||
print(f"Verification code: {code}")
|
||||
return code
|
||||
@@ -16,3 +16,15 @@ nBvuNYRLaE4xE4HuzBPiIQ==
|
||||
oyae3iClomSrk6RJywZ4iw==
|
||||
1Z27Ul8BTdNOhncT5H6wdg==
|
||||
wfUfJIlwQwUss8l-3kDt3w==
|
||||
f6Jw_Nr0PietpNCtOCXJTw==
|
||||
6Jc3yCs7XhDRNHa4ZML09g==
|
||||
3vy44sIy-ZlTMofFiFDttw==
|
||||
p9FbMGGiK1rShKgL3YWkDg==
|
||||
pw6LI5Op84lf4HOY7fn91A==
|
||||
QemKm6aothMvqcEgeKFDlQ==
|
||||
cceZzucA-CEHR0Gt6VLYLQ==
|
||||
JRRObMp2RHVn5u4730DPvQ==
|
||||
XNt0wLTjX7Z-EsRR3TJMIQ==
|
||||
csjjirAUKtT5HT1KZUq1kg==
|
||||
8qZdCatCPQZyS7jsO4hkdQ==
|
||||
esnUxcBhvH1DmCJTeld0qw==
|
||||
73
gpt4free/quora/graphql/PoeBotCreateMutation.graphql
Normal file
73
gpt4free/quora/graphql/PoeBotCreateMutation.graphql
Normal file
@@ -0,0 +1,73 @@
|
||||
mutation CreateBotMain_poeBotCreate_Mutation(
|
||||
$model: String!
|
||||
$handle: String!
|
||||
$prompt: String!
|
||||
$isPromptPublic: Boolean!
|
||||
$introduction: String!
|
||||
$description: String!
|
||||
$profilePictureUrl: String
|
||||
$apiUrl: String
|
||||
$apiKey: String
|
||||
$isApiBot: Boolean
|
||||
$hasLinkification: Boolean
|
||||
$hasMarkdownRendering: Boolean
|
||||
$hasSuggestedReplies: Boolean
|
||||
$isPrivateBot: Boolean
|
||||
) {
|
||||
poeBotCreate(model: $model, handle: $handle, promptPlaintext: $prompt, isPromptPublic: $isPromptPublic, introduction: $introduction, description: $description, profilePicture: $profilePictureUrl, apiUrl: $apiUrl, apiKey: $apiKey, isApiBot: $isApiBot, hasLinkification: $hasLinkification, hasMarkdownRendering: $hasMarkdownRendering, hasSuggestedReplies: $hasSuggestedReplies, isPrivateBot: $isPrivateBot) {
|
||||
status
|
||||
bot {
|
||||
id
|
||||
...BotHeader_bot
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fragment BotHeader_bot on Bot {
|
||||
displayName
|
||||
messageLimit {
|
||||
dailyLimit
|
||||
}
|
||||
...BotImage_bot
|
||||
...BotLink_bot
|
||||
...IdAnnotation_node
|
||||
...botHelpers_useViewerCanAccessPrivateBot
|
||||
...botHelpers_useDeletion_bot
|
||||
}
|
||||
|
||||
fragment BotImage_bot on Bot {
|
||||
displayName
|
||||
...botHelpers_useDeletion_bot
|
||||
...BotImage_useProfileImage_bot
|
||||
}
|
||||
|
||||
fragment BotImage_useProfileImage_bot on Bot {
|
||||
image {
|
||||
__typename
|
||||
... on LocalBotImage {
|
||||
localName
|
||||
}
|
||||
... on UrlBotImage {
|
||||
url
|
||||
}
|
||||
}
|
||||
...botHelpers_useDeletion_bot
|
||||
}
|
||||
|
||||
fragment BotLink_bot on Bot {
|
||||
displayName
|
||||
}
|
||||
|
||||
fragment IdAnnotation_node on Node {
|
||||
__isNode: __typename
|
||||
id
|
||||
}
|
||||
|
||||
fragment botHelpers_useDeletion_bot on Bot {
|
||||
deletionState
|
||||
}
|
||||
|
||||
fragment botHelpers_useViewerCanAccessPrivateBot on Bot {
|
||||
isPrivateBot
|
||||
viewerIsCreator
|
||||
}
|
||||
24
gpt4free/quora/graphql/PoeBotEditMutation.graphql
Normal file
24
gpt4free/quora/graphql/PoeBotEditMutation.graphql
Normal file
@@ -0,0 +1,24 @@
|
||||
mutation EditBotMain_poeBotEdit_Mutation(
|
||||
$botId: BigInt!
|
||||
$handle: String!
|
||||
$description: String!
|
||||
$introduction: String!
|
||||
$isPromptPublic: Boolean!
|
||||
$baseBot: String!
|
||||
$profilePictureUrl: String
|
||||
$prompt: String!
|
||||
$apiUrl: String
|
||||
$apiKey: String
|
||||
$hasLinkification: Boolean
|
||||
$hasMarkdownRendering: Boolean
|
||||
$hasSuggestedReplies: Boolean
|
||||
$isPrivateBot: Boolean
|
||||
) {
|
||||
poeBotEdit(botId: $botId, handle: $handle, description: $description, introduction: $introduction, isPromptPublic: $isPromptPublic, model: $baseBot, promptPlaintext: $prompt, profilePicture: $profilePictureUrl, apiUrl: $apiUrl, apiKey: $apiKey, hasLinkification: $hasLinkification, hasMarkdownRendering: $hasMarkdownRendering, hasSuggestedReplies: $hasSuggestedReplies, isPrivateBot: $isPrivateBot) {
|
||||
status
|
||||
bot {
|
||||
handle
|
||||
id
|
||||
}
|
||||
}
|
||||
}
|
||||
80
gpt4free/quora/mail.py
Normal file
80
gpt4free/quora/mail.py
Normal file
@@ -0,0 +1,80 @@
|
||||
from json import loads
|
||||
from re import findall
|
||||
from time import sleep
|
||||
|
||||
from fake_useragent import UserAgent
|
||||
from requests import Session
|
||||
|
||||
|
||||
class Emailnator:
|
||||
def __init__(self) -> None:
|
||||
self.client = Session()
|
||||
self.client.get("https://www.emailnator.com/", timeout=6)
|
||||
self.cookies = self.client.cookies.get_dict()
|
||||
|
||||
self.client.headers = {
|
||||
"authority": "www.emailnator.com",
|
||||
"origin": "https://www.emailnator.com",
|
||||
"referer": "https://www.emailnator.com/",
|
||||
"user-agent": UserAgent().random,
|
||||
"x-xsrf-token": self.client.cookies.get("XSRF-TOKEN")[:-3] + "=",
|
||||
}
|
||||
|
||||
self.email = None
|
||||
|
||||
def get_mail(self):
|
||||
response = self.client.post(
|
||||
"https://www.emailnator.com/generate-email",
|
||||
json={
|
||||
"email": [
|
||||
"domain",
|
||||
"plusGmail",
|
||||
"dotGmail",
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
self.email = loads(response.text)["email"][0]
|
||||
return self.email
|
||||
|
||||
def get_message(self):
|
||||
print("Waiting for message...")
|
||||
|
||||
while True:
|
||||
sleep(2)
|
||||
mail_token = self.client.post("https://www.emailnator.com/message-list", json={"email": self.email})
|
||||
|
||||
mail_token = loads(mail_token.text)["messageData"]
|
||||
|
||||
if len(mail_token) == 2:
|
||||
print("Message received!")
|
||||
print(mail_token[1]["messageID"])
|
||||
break
|
||||
|
||||
mail_context = self.client.post(
|
||||
"https://www.emailnator.com/message-list",
|
||||
json={
|
||||
"email": self.email,
|
||||
"messageID": mail_token[1]["messageID"],
|
||||
},
|
||||
)
|
||||
|
||||
return mail_context.text
|
||||
|
||||
def get_verification_code(self):
|
||||
message = self.get_message()
|
||||
code = findall(r';">(\d{6,7})</div>', message)[0]
|
||||
print(f"Verification code: {code}")
|
||||
return code
|
||||
|
||||
def clear_inbox(self):
|
||||
print("Clearing inbox...")
|
||||
self.client.post(
|
||||
"https://www.emailnator.com/delete-all",
|
||||
json={"email": self.email},
|
||||
)
|
||||
print("Inbox cleared!")
|
||||
|
||||
def __del__(self):
|
||||
if self.email:
|
||||
self.clear_inbox()
|
||||
11
gpt4free/theb/README.md
Normal file
11
gpt4free/theb/README.md
Normal file
@@ -0,0 +1,11 @@
|
||||
### Example: `theb` (use like openai pypi package) <a name="example-theb"></a>
|
||||
|
||||
```python
|
||||
# import library
|
||||
from gpt4free import theb
|
||||
|
||||
# simple streaming completion
|
||||
for token in theb.Completion.create('hello world'):
|
||||
print(token, end='', flush=True)
|
||||
print("")
|
||||
```
|
||||
57
gpt4free/theb/__init__.py
Normal file
57
gpt4free/theb/__init__.py
Normal file
@@ -0,0 +1,57 @@
|
||||
from json import loads
|
||||
from queue import Queue, Empty
|
||||
from re import findall
|
||||
from threading import Thread
|
||||
from typing import Generator, Optional
|
||||
|
||||
from curl_cffi import requests
|
||||
from fake_useragent import UserAgent
|
||||
|
||||
|
||||
class Completion:
|
||||
# experimental
|
||||
part1 = '{"role":"assistant","id":"chatcmpl'
|
||||
part2 = '"},"index":0,"finish_reason":null}]}}'
|
||||
regex = rf'{part1}(.*){part2}'
|
||||
|
||||
timer = None
|
||||
message_queue = Queue()
|
||||
stream_completed = False
|
||||
|
||||
@staticmethod
|
||||
def request(prompt: str, proxy: Optional[str]=None):
|
||||
headers = {
|
||||
'authority': 'chatbot.theb.ai',
|
||||
'content-type': 'application/json',
|
||||
'origin': 'https://chatbot.theb.ai',
|
||||
'user-agent': UserAgent().random,
|
||||
}
|
||||
|
||||
proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else None
|
||||
|
||||
requests.post(
|
||||
'https://chatbot.theb.ai/api/chat-process',
|
||||
headers=headers,
|
||||
proxies=proxies,
|
||||
content_callback=Completion.handle_stream_response,
|
||||
json={'prompt': prompt, 'options': {}},
|
||||
)
|
||||
|
||||
Completion.stream_completed = True
|
||||
|
||||
@staticmethod
|
||||
def create(prompt: str, proxy: Optional[str]=None) -> Generator[str, None, None]:
|
||||
Thread(target=Completion.request, args=[prompt, proxy]).start()
|
||||
|
||||
while not Completion.stream_completed or not Completion.message_queue.empty():
|
||||
try:
|
||||
message = Completion.message_queue.get(timeout=0.01)
|
||||
for message in findall(Completion.regex, message):
|
||||
yield loads(Completion.part1 + message + Completion.part2)['delta']
|
||||
|
||||
except Empty:
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def handle_stream_response(response):
|
||||
Completion.message_queue.put(response.decode())
|
||||
4
gpt4free/theb/theb_test.py
Normal file
4
gpt4free/theb/theb_test.py
Normal file
@@ -0,0 +1,4 @@
|
||||
import theb
|
||||
|
||||
for token in theb.Completion.create('hello world'):
|
||||
print(token, end='', flush=True)
|
||||
23
gpt4free/usesless/README.md
Normal file
23
gpt4free/usesless/README.md
Normal file
@@ -0,0 +1,23 @@
|
||||
ai.usesless.com
|
||||
|
||||
to do:
|
||||
|
||||
- use random user agent in header
|
||||
- make the code better I guess (?)
|
||||
|
||||
### Example: `usesless` <a name="example-usesless"></a>
|
||||
|
||||
```python
|
||||
import usesless
|
||||
|
||||
message_id = ""
|
||||
while True:
|
||||
prompt = input("Question: ")
|
||||
if prompt == "!stop":
|
||||
break
|
||||
|
||||
req = usesless.Completion.create(prompt=prompt, parentMessageId=message_id)
|
||||
|
||||
print(f"Answer: {req['text']}")
|
||||
message_id = req["id"]
|
||||
```
|
||||
55
gpt4free/usesless/__init__.py
Normal file
55
gpt4free/usesless/__init__.py
Normal file
@@ -0,0 +1,55 @@
|
||||
import requests
|
||||
import json
|
||||
|
||||
|
||||
class Completion:
|
||||
headers = {
|
||||
"authority": "ai.usesless.com",
|
||||
"accept": "application/json, text/plain, */*",
|
||||
"accept-language": "en-US,en;q=0.5",
|
||||
"cache-control": "no-cache",
|
||||
"sec-fetch-dest": "empty",
|
||||
"sec-fetch-mode": "cors",
|
||||
"sec-fetch-site": "same-origin",
|
||||
"user-agent": "Mozilla/5.0 (X11; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/112.0",
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def create(
|
||||
systemMessage: str = "You are a helpful assistant",
|
||||
prompt: str = "",
|
||||
parentMessageId: str = "",
|
||||
presence_penalty: float = 1,
|
||||
temperature: float = 1,
|
||||
model: str = "gpt-3.5-turbo",
|
||||
):
|
||||
print(parentMessageId, prompt)
|
||||
|
||||
json_data = {
|
||||
"openaiKey": "",
|
||||
"prompt": prompt,
|
||||
"options": {
|
||||
"parentMessageId": parentMessageId,
|
||||
"systemMessage": systemMessage,
|
||||
"completionParams": {
|
||||
"presence_penalty": presence_penalty,
|
||||
"temperature": temperature,
|
||||
"model": model,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
url = "https://ai.usesless.com/api/chat-process"
|
||||
request = requests.post(url, headers=Completion.headers, json=json_data)
|
||||
content = request.content
|
||||
|
||||
response = Completion.__response_to_json(content)
|
||||
return response
|
||||
|
||||
@classmethod
|
||||
def __response_to_json(cls, text) -> dict:
|
||||
text = str(text.decode("utf-8"))
|
||||
|
||||
split_text = text.rsplit("\n", 1)[1]
|
||||
to_json = json.loads(split_text)
|
||||
return to_json
|
||||
38
gpt4free/you/README.md
Normal file
38
gpt4free/you/README.md
Normal file
@@ -0,0 +1,38 @@
|
||||
### Example: `you` (use like openai pypi package) <a name="example-you"></a>
|
||||
|
||||
```python
|
||||
|
||||
from gpt4free import you
|
||||
|
||||
# simple request with links and details
|
||||
response = you.Completion.create(
|
||||
prompt="hello world",
|
||||
detailed=True,
|
||||
include_links=True, )
|
||||
|
||||
print(response.dict())
|
||||
|
||||
# {
|
||||
# "response": "...",
|
||||
# "links": [...],
|
||||
# "extra": {...},
|
||||
# "slots": {...}
|
||||
# }
|
||||
# }
|
||||
|
||||
# chatbot
|
||||
|
||||
chat = []
|
||||
|
||||
while True:
|
||||
prompt = input("You: ")
|
||||
if prompt == 'q':
|
||||
break
|
||||
response = you.Completion.create(
|
||||
prompt=prompt,
|
||||
chat=chat)
|
||||
|
||||
print("Bot:", response.text)
|
||||
|
||||
chat.append({"question": prompt, "answer": response.text})
|
||||
```
|
||||
112
gpt4free/you/__init__.py
Normal file
112
gpt4free/you/__init__.py
Normal file
@@ -0,0 +1,112 @@
|
||||
import json
|
||||
import re
|
||||
from typing import Optional, List, Dict, Any
|
||||
from uuid import uuid4
|
||||
|
||||
from fake_useragent import UserAgent
|
||||
from pydantic import BaseModel
|
||||
from tls_client import Session
|
||||
|
||||
|
||||
class PoeResponse(BaseModel):
|
||||
text: Optional[str] = None
|
||||
links: List[str] = []
|
||||
extra: Dict[str, Any] = {}
|
||||
|
||||
|
||||
class Completion:
|
||||
@staticmethod
|
||||
def create(
|
||||
prompt: str,
|
||||
page: int = 1,
|
||||
count: int = 10,
|
||||
safe_search: str = 'Moderate',
|
||||
on_shopping_page: bool = False,
|
||||
mkt: str = '',
|
||||
response_filter: str = 'WebPages,Translations,TimeZone,Computation,RelatedSearches',
|
||||
domain: str = 'youchat',
|
||||
query_trace_id: str = None,
|
||||
chat: list = None,
|
||||
include_links: bool = False,
|
||||
detailed: bool = False,
|
||||
debug: bool = False,
|
||||
proxy: Optional[str] = None
|
||||
) -> PoeResponse:
|
||||
if chat is None:
|
||||
chat = []
|
||||
|
||||
proxies = { 'http': 'http://' + proxy, 'https': 'http://' + proxy } if proxy else {}
|
||||
|
||||
client = Session(client_identifier='chrome_108')
|
||||
client.headers = Completion.__get_headers()
|
||||
client.proxies = proxies
|
||||
|
||||
response = client.get(
|
||||
f'https://you.com/api/streamingSearch',
|
||||
params={
|
||||
'q': prompt,
|
||||
'page': page,
|
||||
'count': count,
|
||||
'safeSearch': safe_search,
|
||||
'onShoppingPage': on_shopping_page,
|
||||
'mkt': mkt,
|
||||
'responseFilter': response_filter,
|
||||
'domain': domain,
|
||||
'queryTraceId': str(uuid4()) if query_trace_id is None else query_trace_id,
|
||||
'chat': str(chat), # {'question':'','answer':' ''}
|
||||
},
|
||||
)
|
||||
|
||||
if debug:
|
||||
print('\n\n------------------\n\n')
|
||||
print(response.text)
|
||||
print('\n\n------------------\n\n')
|
||||
|
||||
if 'youChatToken' not in response.text:
|
||||
return Completion.__get_failure_response()
|
||||
|
||||
you_chat_serp_results = re.search(
|
||||
r'(?<=event: youChatSerpResults\ndata:)(.*\n)*?(?=event: )', response.text
|
||||
).group()
|
||||
third_party_search_results = re.search(
|
||||
r'(?<=event: thirdPartySearchResults\ndata:)(.*\n)*?(?=event: )', response.text
|
||||
).group()
|
||||
# slots = findall(r"slots\ndata: (.*)\n\nevent", response.text)[0]
|
||||
|
||||
text = ''.join(re.findall(r'{\"youChatToken\": \"(.*?)\"}', response.text))
|
||||
|
||||
extra = {
|
||||
'youChatSerpResults': json.loads(you_chat_serp_results),
|
||||
# 'slots' : loads(slots)
|
||||
}
|
||||
|
||||
response = PoeResponse(text=text.replace('\\n', '\n').replace('\\\\', '\\').replace('\\"', '"'))
|
||||
if include_links:
|
||||
response.links = json.loads(third_party_search_results)['search']['third_party_search_results']
|
||||
|
||||
if detailed:
|
||||
response.extra = extra
|
||||
|
||||
return response
|
||||
|
||||
@staticmethod
|
||||
def __get_headers() -> dict:
|
||||
return {
|
||||
'authority': 'you.com',
|
||||
'accept': 'text/event-stream',
|
||||
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
|
||||
'cache-control': 'no-cache',
|
||||
'referer': 'https://you.com/search?q=who+are+you&tbm=youchat',
|
||||
'sec-ch-ua': '"Not_A Brand";v="99", "Google Chrome";v="109", "Chromium";v="109"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-platform': '"Windows"',
|
||||
'sec-fetch-dest': 'empty',
|
||||
'sec-fetch-mode': 'cors',
|
||||
'sec-fetch-site': 'same-origin',
|
||||
'cookie': f'safesearch_guest=Moderate; uuid_guest={str(uuid4())}',
|
||||
'user-agent': UserAgent().random,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def __get_failure_response() -> PoeResponse:
|
||||
return PoeResponse(text='Unable to fetch the response, Please try again.')
|
||||
72
gui/README.md
Normal file
72
gui/README.md
Normal file
@@ -0,0 +1,72 @@
|
||||
# gpt4free gui
|
||||
|
||||
This code provides a Graphical User Interface (GUI) for gpt4free. Users can ask questions and get answers from GPT-4 API's, utilizing multiple API implementations. The project contains two different Streamlit applications: `streamlit_app.py` and `streamlit_chat_app.py`.
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
1. Clone the repository.
|
||||
2. Install the required dependencies with: `pip install -r requirements.txt`.
|
||||
3. To use `streamlit_chat_app.py`, note that it depends on a pull request (PR #24) from the https://github.com/AI-Yash/st-chat/ repository, which may change in the future. The current dependency library can be found at https://github.com/AI-Yash/st-chat/archive/refs/pull/24/head.zip.
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
Choose one of the Streamlit applications to run:
|
||||
|
||||
### streamlit\_app.py
|
||||
|
||||
This application provides a simple interface for asking GPT-4 questions and receiving answers.
|
||||
|
||||
To run the application:
|
||||
|
||||
run:
|
||||
```arduino
|
||||
streamlit run gui/streamlit_app.py
|
||||
```
|
||||
<br>
|
||||
|
||||
<img width="724" alt="image" src="https://user-images.githubusercontent.com/98614666/234232449-0d5cd092-a29d-4759-8197-e00ba712cb1a.png">
|
||||
|
||||
<br>
|
||||
<br>
|
||||
|
||||
preview:
|
||||
|
||||
<img width="1125" alt="image" src="https://user-images.githubusercontent.com/98614666/234232398-09e9d3c5-08e6-4b8a-b4f2-0666e9790c7d.png">
|
||||
|
||||
|
||||
### streamlit\_chat\_app.py
|
||||
|
||||
This application provides a chat-like interface for asking GPT-4 questions and receiving answers. It supports multiple query methods, and users can select the desired API for their queries. The application also maintains a conversation history.
|
||||
|
||||
To run the application:
|
||||
|
||||
```arduino
|
||||
streamlit run streamlit_chat_app.py
|
||||
```
|
||||
|
||||
<br>
|
||||
|
||||
<img width="724" alt="image" src="image1.png">
|
||||
|
||||
<br>
|
||||
<br>
|
||||
|
||||
preview:
|
||||
|
||||
<img width="1125" alt="image" src="image2.png">
|
||||
|
||||
Contributing
|
||||
------------
|
||||
|
||||
Feel free to submit pull requests, report bugs, or request new features by opening issues on the GitHub repository.
|
||||
|
||||
Bug
|
||||
----
|
||||
There is a bug in `streamlit_chat_app.py` right now that I haven't pinpointed yet, probably is really simple but havent had the time to look for it. Whenever you open a new conversation or access an old conversation it will only start prompt-answering after the second time you input to the text input, other than that, everything else seems to work accordingly.
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
This project is licensed under the MIT License.
|
||||
0
gui/__init__.py
Normal file
0
gui/__init__.py
Normal file
BIN
gui/image1.png
Normal file
BIN
gui/image1.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 168 KiB |
BIN
gui/image2.png
Normal file
BIN
gui/image2.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 336 KiB |
100
gui/query_methods.py
Normal file
100
gui/query_methods.py
Normal file
@@ -0,0 +1,100 @@
|
||||
import os
|
||||
import sys
|
||||
from typing import Optional
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir))
|
||||
|
||||
from gpt4free import quora, forefront, theb, you
|
||||
import random
|
||||
|
||||
|
||||
def query_forefront(question: str, proxy: Optional[str] = None) -> str:
|
||||
# create an account
|
||||
token = forefront.Account.create(logging=False, proxy=proxy)
|
||||
|
||||
response = ""
|
||||
# get a response
|
||||
try:
|
||||
return forefront.Completion.create(token=token, prompt='hello world', model='gpt-4', proxy=proxy).text
|
||||
except Exception as e:
|
||||
# Return error message if an exception occurs
|
||||
return (
|
||||
f'An error occurred: {e}. Please make sure you are using a valid cloudflare clearance token and user agent.'
|
||||
)
|
||||
|
||||
|
||||
def query_quora(question: str, proxy: Optional[str] = None) -> str:
|
||||
token = quora.Account.create(logging=False, enable_bot_creation=True, proxy=proxy)
|
||||
return quora.Completion.create(model='gpt-4', prompt=question, token=token, proxy=proxy).text
|
||||
|
||||
|
||||
def query_theb(question: str, proxy: Optional[str] = None) -> str:
|
||||
# Set cloudflare clearance cookie and get answer from GPT-4 model
|
||||
response = ""
|
||||
try:
|
||||
return ''.join(theb.Completion.create(prompt=question, proxy=proxy))
|
||||
|
||||
except Exception as e:
|
||||
# Return error message if an exception occurs
|
||||
return (
|
||||
f'An error occurred: {e}. Please make sure you are using a valid cloudflare clearance token and user agent.'
|
||||
)
|
||||
|
||||
|
||||
def query_you(question: str, proxy: Optional[str] = None) -> str:
|
||||
# Set cloudflare clearance cookie and get answer from GPT-4 model
|
||||
try:
|
||||
result = you.Completion.create(prompt=question, proxy=proxy)
|
||||
return result.text
|
||||
|
||||
except Exception as e:
|
||||
# Return error message if an exception occurs
|
||||
return (
|
||||
f'An error occurred: {e}. Please make sure you are using a valid cloudflare clearance token and user agent.'
|
||||
)
|
||||
|
||||
|
||||
# Define a dictionary containing all query methods
|
||||
avail_query_methods = {
|
||||
"Forefront": query_forefront,
|
||||
"Poe": query_quora,
|
||||
"Theb": query_theb,
|
||||
"You": query_you,
|
||||
# "Writesonic": query_writesonic,
|
||||
# "T3nsor": query_t3nsor,
|
||||
# "Phind": query_phind,
|
||||
# "Ora": query_ora,
|
||||
}
|
||||
|
||||
|
||||
def query(user_input: str, selected_method: str = "Random", proxy: Optional[str] = None) -> str:
|
||||
# If a specific query method is selected (not "Random") and the method is in the dictionary, try to call it
|
||||
if selected_method != "Random" and selected_method in avail_query_methods:
|
||||
try:
|
||||
return avail_query_methods[selected_method](user_input, proxy=proxy)
|
||||
except Exception as e:
|
||||
print(f"Error with {selected_method}: {e}")
|
||||
return "😵 Sorry, some error occurred please try again."
|
||||
|
||||
# Initialize variables for determining success and storing the result
|
||||
success = False
|
||||
result = "😵 Sorry, some error occurred please try again."
|
||||
# Create a list of available query methods
|
||||
query_methods_list = list(avail_query_methods.values())
|
||||
|
||||
# Continue trying different methods until a successful result is obtained or all methods have been tried
|
||||
while not success and query_methods_list:
|
||||
# Choose a random method from the list
|
||||
chosen_query = random.choice(query_methods_list)
|
||||
# Find the name of the chosen method
|
||||
chosen_query_name = [k for k, v in avail_query_methods.items() if v == chosen_query][0]
|
||||
try:
|
||||
# Try to call the chosen method with the user input
|
||||
result = chosen_query(user_input, proxy=proxy)
|
||||
success = True
|
||||
except Exception as e:
|
||||
print(f"Error with {chosen_query_name}: {e}")
|
||||
# Remove the failed method from the list of available methods
|
||||
query_methods_list.remove(chosen_query)
|
||||
|
||||
return result
|
||||
52
gui/streamlit_app.py
Normal file
52
gui/streamlit_app.py
Normal file
@@ -0,0 +1,52 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir))
|
||||
|
||||
import streamlit as st
|
||||
from gpt4free import you
|
||||
|
||||
|
||||
def get_answer(question: str) -> str:
|
||||
# Set cloudflare clearance cookie and get answer from GPT-4 model
|
||||
try:
|
||||
result = you.Completion.create(prompt=question)
|
||||
|
||||
return result.text
|
||||
|
||||
except Exception as e:
|
||||
# Return error message if an exception occurs
|
||||
return (
|
||||
f'An error occurred: {e}. Please make sure you are using a valid cloudflare clearance token and user agent.'
|
||||
)
|
||||
|
||||
|
||||
# Set page configuration and add header
|
||||
st.set_page_config(
|
||||
page_title="gpt4freeGUI",
|
||||
initial_sidebar_state="expanded",
|
||||
page_icon="🧠",
|
||||
menu_items={
|
||||
'Get Help': 'https://github.com/xtekky/gpt4free/blob/main/README.md',
|
||||
'Report a bug': "https://github.com/xtekky/gpt4free/issues",
|
||||
'About': "### gptfree GUI",
|
||||
},
|
||||
)
|
||||
st.header('GPT4free GUI')
|
||||
|
||||
# Add text area for user input and button to get answer
|
||||
question_text_area = st.text_area('🤖 Ask Any Question :', placeholder='Explain quantum computing in 50 words')
|
||||
if st.button('🧠 Think'):
|
||||
answer = get_answer(question_text_area)
|
||||
escaped = answer.encode('utf-8').decode('unicode-escape')
|
||||
# Display answer
|
||||
st.caption("Answer :")
|
||||
st.markdown(escaped)
|
||||
|
||||
# Hide Streamlit footer
|
||||
hide_streamlit_style = """
|
||||
<style>
|
||||
footer {visibility: hidden;}
|
||||
</style>
|
||||
"""
|
||||
st.markdown(hide_streamlit_style, unsafe_allow_html=True)
|
||||
118
gui/streamlit_chat_app.py
Normal file
118
gui/streamlit_chat_app.py
Normal file
@@ -0,0 +1,118 @@
|
||||
import atexit
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir))
|
||||
|
||||
import streamlit as st
|
||||
from streamlit_chat import message
|
||||
from query_methods import query, avail_query_methods
|
||||
import pickle
|
||||
|
||||
conversations_file = "conversations.pkl"
|
||||
|
||||
def load_conversations():
|
||||
try:
|
||||
with open(conversations_file, "rb") as f:
|
||||
return pickle.load(f)
|
||||
except FileNotFoundError:
|
||||
return []
|
||||
except EOFError:
|
||||
return []
|
||||
|
||||
|
||||
def save_conversations(conversations, current_conversation):
|
||||
updated = False
|
||||
for idx, conversation in enumerate(conversations):
|
||||
if conversation == current_conversation:
|
||||
conversations[idx] = current_conversation
|
||||
updated = True
|
||||
break
|
||||
if not updated:
|
||||
conversations.append(current_conversation)
|
||||
|
||||
temp_conversations_file = "temp_" + conversations_file
|
||||
with open(temp_conversations_file, "wb") as f:
|
||||
pickle.dump(conversations, f)
|
||||
|
||||
os.replace(temp_conversations_file, conversations_file)
|
||||
|
||||
|
||||
def exit_handler():
|
||||
print("Exiting, saving data...")
|
||||
# Perform cleanup operations here, like saving data or closing open files.
|
||||
save_conversations(st.session_state.conversations, st.session_state.current_conversation)
|
||||
|
||||
|
||||
# Register the exit_handler function to be called when the program is closing.
|
||||
atexit.register(exit_handler)
|
||||
|
||||
st.header("Chat Placeholder")
|
||||
|
||||
if 'conversations' not in st.session_state:
|
||||
st.session_state['conversations'] = load_conversations()
|
||||
|
||||
if 'input_text' not in st.session_state:
|
||||
st.session_state['input_text'] = ''
|
||||
|
||||
if 'selected_conversation' not in st.session_state:
|
||||
st.session_state['selected_conversation'] = None
|
||||
|
||||
if 'input_field_key' not in st.session_state:
|
||||
st.session_state['input_field_key'] = 0
|
||||
|
||||
if 'query_method' not in st.session_state:
|
||||
st.session_state['query_method'] = query
|
||||
|
||||
# Initialize new conversation
|
||||
if 'current_conversation' not in st.session_state or st.session_state['current_conversation'] is None:
|
||||
st.session_state['current_conversation'] = {'user_inputs': [], 'generated_responses': []}
|
||||
|
||||
input_placeholder = st.empty()
|
||||
user_input = input_placeholder.text_input(
|
||||
'You:', value=st.session_state['input_text'], key=f'input_text_{st.session_state["input_field_key"]}'
|
||||
)
|
||||
submit_button = st.button("Submit")
|
||||
|
||||
|
||||
if (user_input and user_input != st.session_state['input_text']) or submit_button:
|
||||
output = query(user_input, st.session_state['query_method'])
|
||||
|
||||
escaped_output = output.encode('utf-8').decode('unicode-escape')
|
||||
|
||||
st.session_state.current_conversation['user_inputs'].append(user_input)
|
||||
st.session_state.current_conversation['generated_responses'].append(escaped_output)
|
||||
save_conversations(st.session_state.conversations, st.session_state.current_conversation)
|
||||
st.session_state['input_text'] = ''
|
||||
user_input = input_placeholder.text_input(
|
||||
'You:', value=st.session_state['input_text'], key=f'input_text_{st.session_state["input_field_key"]}'
|
||||
) # Clear the input field
|
||||
|
||||
# Add a button to create a new conversation
|
||||
if st.sidebar.button("New Conversation"):
|
||||
st.session_state['selected_conversation'] = None
|
||||
st.session_state['current_conversation'] = {'user_inputs': [], 'generated_responses': []}
|
||||
st.session_state['input_field_key'] += 1
|
||||
|
||||
st.session_state['query_method'] = st.sidebar.selectbox("Select API:", options=avail_query_methods, index=0)
|
||||
|
||||
# Proxy
|
||||
st.session_state['proxy'] = st.sidebar.text_input("Proxy: ")
|
||||
|
||||
# Sidebar
|
||||
st.sidebar.header("Conversation History")
|
||||
|
||||
for idx, conversation in enumerate(st.session_state.conversations):
|
||||
if st.sidebar.button(f"Conversation {idx + 1}: {conversation['user_inputs'][0]}", key=f"sidebar_btn_{idx}"):
|
||||
st.session_state['selected_conversation'] = idx
|
||||
st.session_state['current_conversation'] = st.session_state.conversations[idx]
|
||||
|
||||
if st.session_state['selected_conversation'] is not None:
|
||||
conversation_to_display = st.session_state.conversations[st.session_state['selected_conversation']]
|
||||
else:
|
||||
conversation_to_display = st.session_state.current_conversation
|
||||
|
||||
if conversation_to_display['generated_responses']:
|
||||
for i in range(len(conversation_to_display['generated_responses']) - 1, -1, -1):
|
||||
message(conversation_to_display["generated_responses"][i], key=f"display_generated_{i}")
|
||||
message(conversation_to_display['user_inputs'][i], is_user=True, key=f"display_user_{i}")
|
||||
@@ -1,49 +0,0 @@
|
||||
from ora.model import CompletionModel
|
||||
from ora.typing import OraResponse
|
||||
from requests import post
|
||||
from time import time
|
||||
from random import randint
|
||||
|
||||
class Completion:
|
||||
def create(
|
||||
model : CompletionModel,
|
||||
prompt: str,
|
||||
includeHistory: bool = True,
|
||||
conversationId: str or None = None) -> OraResponse:
|
||||
|
||||
extra = {
|
||||
'conversationId': conversationId} if conversationId else {}
|
||||
|
||||
response = post('https://ora.sh/api/conversation',
|
||||
headers = {
|
||||
"host" : "ora.sh",
|
||||
"authorization" : f"Bearer AY0{randint(1111, 9999)}",
|
||||
"user-agent" : "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36",
|
||||
"origin" : "https://ora.sh",
|
||||
"referer" : "https://ora.sh/chat/",
|
||||
},
|
||||
json = extra | {
|
||||
'chatbotId': model.id,
|
||||
'input' : prompt,
|
||||
'userId' : model.createdBy,
|
||||
'model' : model.modelName,
|
||||
'provider' : 'OPEN_AI',
|
||||
'includeHistory': includeHistory}).json()
|
||||
|
||||
return OraResponse({
|
||||
'id' : response['conversationId'],
|
||||
'object' : 'text_completion',
|
||||
'created': int(time()),
|
||||
'model' : model.slug,
|
||||
'choices': [{
|
||||
'text' : response['response'],
|
||||
'index' : 0,
|
||||
'logprobs' : None,
|
||||
'finish_reason' : 'stop'
|
||||
}],
|
||||
'usage': {
|
||||
'prompt_tokens' : len(prompt),
|
||||
'completion_tokens' : len(response['response']),
|
||||
'total_tokens' : len(prompt) + len(response['response'])
|
||||
}
|
||||
})
|
||||
46
ora/model.py
46
ora/model.py
@@ -1,46 +0,0 @@
|
||||
from uuid import uuid4
|
||||
from requests import post
|
||||
|
||||
class CompletionModel:
|
||||
system_prompt = None
|
||||
description = None
|
||||
createdBy = None
|
||||
createdAt = None
|
||||
slug = None
|
||||
id = None
|
||||
model = 'gpt-3.5-turbo'
|
||||
|
||||
def create(
|
||||
system_prompt: str = 'You are ChatGPT, a large language model trained by OpenAI. Answer as concisely as possible',
|
||||
description : str = 'ChatGPT Openai Language Model',
|
||||
name : str = 'gpt-3.5'):
|
||||
|
||||
CompletionModel.system_prompt = system_prompt
|
||||
CompletionModel.description = description
|
||||
CompletionModel.slug = name
|
||||
|
||||
|
||||
response = post('https://ora.sh/api/assistant', json = {
|
||||
'prompt' : system_prompt,
|
||||
'userId' : f'auto:{uuid4()}',
|
||||
'name' : name,
|
||||
'description': description})
|
||||
|
||||
CompletionModel.id = response.json()['id']
|
||||
CompletionModel.createdBy = response.json()['createdBy']
|
||||
CompletionModel.createdAt = response.json()['createdAt']
|
||||
|
||||
return CompletionModel
|
||||
|
||||
def load(chatbotId: str, modelName: str = 'gpt-3.5-turbo', userId: str = None):
|
||||
if userId is None: userId = f'{uuid4()}'
|
||||
|
||||
CompletionModel.system_prompt = None
|
||||
CompletionModel.description = None
|
||||
CompletionModel.slug = None
|
||||
CompletionModel.id = chatbotId
|
||||
CompletionModel.createdBy = userId
|
||||
CompletionModel.createdAt = None
|
||||
CompletionModel.modelName = modelName
|
||||
|
||||
return CompletionModel
|
||||
@@ -1,39 +0,0 @@
|
||||
class OraResponse:
|
||||
|
||||
class Completion:
|
||||
|
||||
class Choices:
|
||||
def __init__(self, choice: dict) -> None:
|
||||
self.text = choice['text']
|
||||
self.content = self.text.encode()
|
||||
self.index = choice['index']
|
||||
self.logprobs = choice['logprobs']
|
||||
self.finish_reason = choice['finish_reason']
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f'''<__main__.APIResponse.Completion.Choices(\n text = {self.text.encode()},\n index = {self.index},\n logprobs = {self.logprobs},\n finish_reason = {self.finish_reason})object at 0x1337>'''
|
||||
|
||||
def __init__(self, choices: dict) -> None:
|
||||
self.choices = [self.Choices(choice) for choice in choices]
|
||||
|
||||
class Usage:
|
||||
def __init__(self, usage_dict: dict) -> None:
|
||||
self.prompt_tokens = usage_dict['prompt_tokens']
|
||||
self.completion_tokens = usage_dict['completion_tokens']
|
||||
self.total_tokens = usage_dict['total_tokens']
|
||||
|
||||
def __repr__(self):
|
||||
return f'''<__main__.APIResponse.Usage(\n prompt_tokens = {self.prompt_tokens},\n completion_tokens = {self.completion_tokens},\n total_tokens = {self.total_tokens})object at 0x1337>'''
|
||||
|
||||
def __init__(self, response_dict: dict) -> None:
|
||||
|
||||
self.response_dict = response_dict
|
||||
self.id = response_dict['id']
|
||||
self.object = response_dict['object']
|
||||
self.created = response_dict['created']
|
||||
self.model = response_dict['model']
|
||||
self.completion = self.Completion(response_dict['choices'])
|
||||
self.usage = self.Usage(response_dict['usage'])
|
||||
|
||||
def json(self) -> dict:
|
||||
return self.response_dict
|
||||
@@ -1,145 +0,0 @@
|
||||
from urllib.parse import quote
|
||||
from tls_client import Session
|
||||
from time import time
|
||||
from datetime import datetime
|
||||
|
||||
client = Session(client_identifier='chrome110')
|
||||
client.headers = {
|
||||
'authority': 'www.phind.com',
|
||||
'accept': '*/*',
|
||||
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
|
||||
'content-type': 'application/json',
|
||||
'origin': 'https://www.phind.com',
|
||||
'referer': 'https://www.phind.com/search',
|
||||
'sec-ch-ua': '"Chromium";v="110", "Google Chrome";v="110", "Not:A-Brand";v="99"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-platform': '"macOS"',
|
||||
'sec-fetch-dest': 'empty',
|
||||
'sec-fetch-mode': 'cors',
|
||||
'sec-fetch-site': 'same-origin',
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36',
|
||||
}
|
||||
|
||||
class PhindResponse:
|
||||
|
||||
class Completion:
|
||||
|
||||
class Choices:
|
||||
def __init__(self, choice: dict) -> None:
|
||||
self.text = choice['text']
|
||||
self.content = self.text.encode()
|
||||
self.index = choice['index']
|
||||
self.logprobs = choice['logprobs']
|
||||
self.finish_reason = choice['finish_reason']
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f'''<__main__.APIResponse.Completion.Choices(\n text = {self.text.encode()},\n index = {self.index},\n logprobs = {self.logprobs},\n finish_reason = {self.finish_reason})object at 0x1337>'''
|
||||
|
||||
def __init__(self, choices: dict) -> None:
|
||||
self.choices = [self.Choices(choice) for choice in choices]
|
||||
|
||||
class Usage:
|
||||
def __init__(self, usage_dict: dict) -> None:
|
||||
self.prompt_tokens = usage_dict['prompt_tokens']
|
||||
self.completion_tokens = usage_dict['completion_tokens']
|
||||
self.total_tokens = usage_dict['total_tokens']
|
||||
|
||||
def __repr__(self):
|
||||
return f'''<__main__.APIResponse.Usage(\n prompt_tokens = {self.prompt_tokens},\n completion_tokens = {self.completion_tokens},\n total_tokens = {self.total_tokens})object at 0x1337>'''
|
||||
|
||||
def __init__(self, response_dict: dict) -> None:
|
||||
|
||||
self.response_dict = response_dict
|
||||
self.id = response_dict['id']
|
||||
self.object = response_dict['object']
|
||||
self.created = response_dict['created']
|
||||
self.model = response_dict['model']
|
||||
self.completion = self.Completion(response_dict['choices'])
|
||||
self.usage = self.Usage(response_dict['usage'])
|
||||
|
||||
def json(self) -> dict:
|
||||
return self.response_dict
|
||||
|
||||
|
||||
class Search:
|
||||
def create(prompt: str, actualSearch: bool = True, language: str = 'en') -> dict: # None = no search
|
||||
if not actualSearch:
|
||||
return {
|
||||
'_type': 'SearchResponse',
|
||||
'queryContext': {
|
||||
'originalQuery': prompt
|
||||
},
|
||||
'webPages': {
|
||||
'webSearchUrl': f'https://www.bing.com/search?q={quote(prompt)}',
|
||||
'totalEstimatedMatches': 0,
|
||||
'value': []
|
||||
},
|
||||
'rankingResponse': {
|
||||
'mainline': {
|
||||
'items': []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return client.post('https://www.phind.com/api/bing/search', json = {
|
||||
'q': prompt,
|
||||
'userRankList': {},
|
||||
'browserLanguage': language}).json()['rawBingResults']
|
||||
|
||||
class Completion:
|
||||
def create(
|
||||
model = 'gpt-4',
|
||||
prompt: str = '',
|
||||
results: dict = None,
|
||||
creative: bool = False,
|
||||
detailed: bool = False,
|
||||
codeContext: str = '',
|
||||
language: str = 'en') -> PhindResponse:
|
||||
|
||||
if results is None:
|
||||
results = Search.create(prompt, actualSearch = True)
|
||||
|
||||
if len(codeContext) > 2999:
|
||||
raise ValueError('codeContext must be less than 3000 characters')
|
||||
|
||||
models = {
|
||||
'gpt-4' : 'expert',
|
||||
'gpt-3.5-turbo' : 'intermediate',
|
||||
'gpt-3.5': 'intermediate',
|
||||
}
|
||||
|
||||
json_data = {
|
||||
'question' : prompt,
|
||||
'bingResults' : results, #response.json()['rawBingResults'],
|
||||
'codeContext' : codeContext,
|
||||
'options': {
|
||||
'skill' : models[model],
|
||||
'date' : datetime.now().strftime("%d/%m/%Y"),
|
||||
'language': language,
|
||||
'detailed': detailed,
|
||||
'creative': creative
|
||||
}
|
||||
}
|
||||
|
||||
completion = ''
|
||||
response = client.post('https://www.phind.com/api/infer/answer', json=json_data, timeout_seconds=200)
|
||||
for line in response.text.split('\r\n\r\n'):
|
||||
completion += (line.replace('data: ', ''))
|
||||
|
||||
return PhindResponse({
|
||||
'id' : f'cmpl-1337-{int(time())}',
|
||||
'object' : 'text_completion',
|
||||
'created': int(time()),
|
||||
'model' : models[model],
|
||||
'choices': [{
|
||||
'text' : completion,
|
||||
'index' : 0,
|
||||
'logprobs' : None,
|
||||
'finish_reason' : 'stop'
|
||||
}],
|
||||
'usage': {
|
||||
'prompt_tokens' : len(prompt),
|
||||
'completion_tokens' : len(completion),
|
||||
'total_tokens' : len(prompt) + len(completion)
|
||||
}
|
||||
})
|
||||
702
poetry.lock
generated
Normal file
702
poetry.lock
generated
Normal file
@@ -0,0 +1,702 @@
|
||||
# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "async-generator"
|
||||
version = "1.10"
|
||||
description = "Async generators and context managers for Python 3.5+"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "async_generator-1.10-py3-none-any.whl", hash = "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b"},
|
||||
{file = "async_generator-1.10.tar.gz", hash = "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "attrs"
|
||||
version = "23.1.0"
|
||||
description = "Classes Without Boilerplate"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"},
|
||||
{file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
|
||||
|
||||
[package.extras]
|
||||
cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
|
||||
dev = ["attrs[docs,tests]", "pre-commit"]
|
||||
docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
|
||||
tests = ["attrs[tests-no-zope]", "zope-interface"]
|
||||
tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2022.12.7"
|
||||
description = "Python package for providing Mozilla's CA Bundle."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
|
||||
{file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cffi"
|
||||
version = "1.15.1"
|
||||
description = "Foreign Function Interface for Python calling C code."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"},
|
||||
{file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"},
|
||||
{file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"},
|
||||
{file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"},
|
||||
{file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"},
|
||||
{file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"},
|
||||
{file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"},
|
||||
{file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pycparser = "*"
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "3.1.0"
|
||||
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7.0"
|
||||
files = [
|
||||
{file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"},
|
||||
{file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.6"
|
||||
description = "Cross-platform colored terminal text."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||
files = [
|
||||
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
|
||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "curl-cffi"
|
||||
version = "0.5.5"
|
||||
description = "libcurl ffi bindings for Python, with impersonation support"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "curl_cffi-0.5.5-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:4322f330167a5c87f6913d32b73eb7da9fe3e3dd86b28f137469f432b346d9bb"},
|
||||
{file = "curl_cffi-0.5.5-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:48b0dcc6e91d68694e6472fa47b7f3457d8bd24e42c91e15d6e2b650f0d9d206"},
|
||||
{file = "curl_cffi-0.5.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fda4c35f03ae593b7667d1a09bcd718d1399a5596b936cacb65dcd4bd705e95f"},
|
||||
{file = "curl_cffi-0.5.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660b2174b71d86bd7b136a6b91434a4c3edbcb1de718f3d337b688955872fbcc"},
|
||||
{file = "curl_cffi-0.5.5-cp37-abi3-win_amd64.whl", hash = "sha256:7adb44515cb165ac661a8e5453c41d75bc284f494921051b64f2889c2c518544"},
|
||||
{file = "curl_cffi-0.5.5.tar.gz", hash = "sha256:db94b8d0ad52f3b5c55d32225c29a8219a19592d882075965a78aa9e1a0dead1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cffi = ">=1.12.0"
|
||||
|
||||
[package.extras]
|
||||
build = ["cibuildwheel", "wheel"]
|
||||
dev = ["autoflake (==1.4)", "black (==22.8.0)", "coverage (==6.4.1)", "cryptography (==38.0.3)", "flake8 (==6.0.0)", "flake8-bugbear (==22.7.1)", "flake8-pie (==0.15.0)", "httpx (==0.23.1)", "isort (==5.10.1)", "mypy (==0.971)", "pytest (==7.1.2)", "pytest-asyncio (==0.19.0)", "pytest-trio (==0.7.0)", "trio (==0.21.0)", "trio-typing (==0.7.0)", "trustme (==0.9.0)", "types-certifi (==2021.10.8.2)", "uvicorn (==0.18.3)"]
|
||||
test = ["cryptography (==38.0.3)", "httpx (==0.23.1)", "pytest (==7.1.2)", "pytest-asyncio (==0.19.0)", "pytest-trio (==0.7.0)", "trio (==0.21.0)", "trio-typing (==0.7.0)", "trustme (==0.9.0)", "types-certifi (==2021.10.8.2)", "uvicorn (==0.18.3)"]
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.1.1"
|
||||
description = "Backport of PEP 654 (exception groups)"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"},
|
||||
{file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
test = ["pytest (>=6)"]
|
||||
|
||||
[[package]]
|
||||
name = "fake-useragent"
|
||||
version = "1.1.3"
|
||||
description = "Up-to-date simple useragent faker with real world database"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "fake-useragent-1.1.3.tar.gz", hash = "sha256:1c06f0aa7d6e4894b919b30b9c7ebd72ff497325191057fbb5df3d5db06b93fc"},
|
||||
{file = "fake_useragent-1.1.3-py3-none-any.whl", hash = "sha256:695d3b1bf7d11d04ab0f971fb73b0ca8de98b78bbadfbc8bacbc9a48423f7531"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
importlib-metadata = {version = ">=4.0,<5.0", markers = "python_version < \"3.8\""}
|
||||
importlib-resources = {version = ">=5.0", markers = "python_version < \"3.10\""}
|
||||
|
||||
[[package]]
|
||||
name = "h11"
|
||||
version = "0.14.0"
|
||||
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
|
||||
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing-extensions = {version = "*", markers = "python_version < \"3.8\""}
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.4"
|
||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
|
||||
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "importlib-metadata"
|
||||
version = "4.13.0"
|
||||
description = "Read metadata from Python packages"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"},
|
||||
{file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
|
||||
zipp = ">=0.5"
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"]
|
||||
perf = ["ipython"]
|
||||
testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "importlib-resources"
|
||||
version = "5.12.0"
|
||||
description = "Read resources from Python packages"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "importlib_resources-5.12.0-py3-none-any.whl", hash = "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a"},
|
||||
{file = "importlib_resources-5.12.0.tar.gz", hash = "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
|
||||
testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "names"
|
||||
version = "0.3.0"
|
||||
description = "Generate random names"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "names-0.3.0.tar.gz", hash = "sha256:726e46254f2ed03f1ffb5d941dae3bc67c35123941c29becd02d48d0caa2a671"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "outcome"
|
||||
version = "1.2.0"
|
||||
description = "Capture the outcome of Python function calls."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "outcome-1.2.0-py2.py3-none-any.whl", hash = "sha256:c4ab89a56575d6d38a05aa16daeaa333109c1f96167aba8901ab18b6b5e0f7f5"},
|
||||
{file = "outcome-1.2.0.tar.gz", hash = "sha256:6f82bd3de45da303cf1f771ecafa1633750a358436a8bb60e06a1ceb745d2672"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
attrs = ">=19.2.0"
|
||||
|
||||
[[package]]
|
||||
name = "pycparser"
|
||||
version = "2.21"
|
||||
description = "C parser in Python"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
files = [
|
||||
{file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
|
||||
{file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "1.10.7"
|
||||
description = "Data validation and settings management using python type hints"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pydantic-1.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e79e999e539872e903767c417c897e729e015872040e56b96e67968c3b918b2d"},
|
||||
{file = "pydantic-1.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:01aea3a42c13f2602b7ecbbea484a98169fb568ebd9e247593ea05f01b884b2e"},
|
||||
{file = "pydantic-1.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:516f1ed9bc2406a0467dd777afc636c7091d71f214d5e413d64fef45174cfc7a"},
|
||||
{file = "pydantic-1.10.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae150a63564929c675d7f2303008d88426a0add46efd76c3fc797cd71cb1b46f"},
|
||||
{file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ecbbc51391248116c0a055899e6c3e7ffbb11fb5e2a4cd6f2d0b93272118a209"},
|
||||
{file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f4a2b50e2b03d5776e7f21af73e2070e1b5c0d0df255a827e7c632962f8315af"},
|
||||
{file = "pydantic-1.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:a7cd2251439988b413cb0a985c4ed82b6c6aac382dbaff53ae03c4b23a70e80a"},
|
||||
{file = "pydantic-1.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:68792151e174a4aa9e9fc1b4e653e65a354a2fa0fed169f7b3d09902ad2cb6f1"},
|
||||
{file = "pydantic-1.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe2507b8ef209da71b6fb5f4e597b50c5a34b78d7e857c4f8f3115effaef5fe"},
|
||||
{file = "pydantic-1.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10a86d8c8db68086f1e30a530f7d5f83eb0685e632e411dbbcf2d5c0150e8dcd"},
|
||||
{file = "pydantic-1.10.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75ae19d2a3dbb146b6f324031c24f8a3f52ff5d6a9f22f0683694b3afcb16fb"},
|
||||
{file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:464855a7ff7f2cc2cf537ecc421291b9132aa9c79aef44e917ad711b4a93163b"},
|
||||
{file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:193924c563fae6ddcb71d3f06fa153866423ac1b793a47936656e806b64e24ca"},
|
||||
{file = "pydantic-1.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:b4a849d10f211389502059c33332e91327bc154acc1845f375a99eca3afa802d"},
|
||||
{file = "pydantic-1.10.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cc1dde4e50a5fc1336ee0581c1612215bc64ed6d28d2c7c6f25d2fe3e7c3e918"},
|
||||
{file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0cfe895a504c060e5d36b287ee696e2fdad02d89e0d895f83037245218a87fe"},
|
||||
{file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:670bb4683ad1e48b0ecb06f0cfe2178dcf74ff27921cdf1606e527d2617a81ee"},
|
||||
{file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:950ce33857841f9a337ce07ddf46bc84e1c4946d2a3bba18f8280297157a3fd1"},
|
||||
{file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c15582f9055fbc1bfe50266a19771bbbef33dd28c45e78afbe1996fd70966c2a"},
|
||||
{file = "pydantic-1.10.7-cp37-cp37m-win_amd64.whl", hash = "sha256:82dffb306dd20bd5268fd6379bc4bfe75242a9c2b79fec58e1041fbbdb1f7914"},
|
||||
{file = "pydantic-1.10.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c7f51861d73e8b9ddcb9916ae7ac39fb52761d9ea0df41128e81e2ba42886cd"},
|
||||
{file = "pydantic-1.10.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6434b49c0b03a51021ade5c4daa7d70c98f7a79e95b551201fff682fc1661245"},
|
||||
{file = "pydantic-1.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64d34ab766fa056df49013bb6e79921a0265204c071984e75a09cbceacbbdd5d"},
|
||||
{file = "pydantic-1.10.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:701daea9ffe9d26f97b52f1d157e0d4121644f0fcf80b443248434958fd03dc3"},
|
||||
{file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf135c46099ff3f919d2150a948ce94b9ce545598ef2c6c7bf55dca98a304b52"},
|
||||
{file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0f85904f73161817b80781cc150f8b906d521fa11e3cdabae19a581c3606209"},
|
||||
{file = "pydantic-1.10.7-cp38-cp38-win_amd64.whl", hash = "sha256:9f6f0fd68d73257ad6685419478c5aece46432f4bdd8d32c7345f1986496171e"},
|
||||
{file = "pydantic-1.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c230c0d8a322276d6e7b88c3f7ce885f9ed16e0910354510e0bae84d54991143"},
|
||||
{file = "pydantic-1.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:976cae77ba6a49d80f461fd8bba183ff7ba79f44aa5cfa82f1346b5626542f8e"},
|
||||
{file = "pydantic-1.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d45fc99d64af9aaf7e308054a0067fdcd87ffe974f2442312372dfa66e1001d"},
|
||||
{file = "pydantic-1.10.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2a5ebb48958754d386195fe9e9c5106f11275867051bf017a8059410e9abf1f"},
|
||||
{file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:abfb7d4a7cd5cc4e1d1887c43503a7c5dd608eadf8bc615413fc498d3e4645cd"},
|
||||
{file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:80b1fab4deb08a8292d15e43a6edccdffa5377a36a4597bb545b93e79c5ff0a5"},
|
||||
{file = "pydantic-1.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:d71e69699498b020ea198468e2480a2f1e7433e32a3a99760058c6520e2bea7e"},
|
||||
{file = "pydantic-1.10.7-py3-none-any.whl", hash = "sha256:0cd181f1d0b1d00e2b705f1bf1ac7799a2d938cce3376b8007df62b29be3c2c6"},
|
||||
{file = "pydantic-1.10.7.tar.gz", hash = "sha256:cfc83c0678b6ba51b0532bea66860617c4cd4251ecf76e9846fa5a9f3454e97e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing-extensions = ">=4.2.0"
|
||||
|
||||
[package.extras]
|
||||
dotenv = ["python-dotenv (>=0.10.4)"]
|
||||
email = ["email-validator (>=1.0.3)"]
|
||||
|
||||
[[package]]
|
||||
name = "pydub"
|
||||
version = "0.25.1"
|
||||
description = "Manipulate audio with an simple and easy high level interface"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "pydub-0.25.1-py2.py3-none-any.whl", hash = "sha256:65617e33033874b59d87db603aa1ed450633288aefead953b30bded59cb599a6"},
|
||||
{file = "pydub-0.25.1.tar.gz", hash = "sha256:980a33ce9949cab2a569606b65674d748ecbca4f0796887fd6f46173a7b0d30f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pypasser"
|
||||
version = "0.0.5"
|
||||
description = "Bypassing reCaptcha V3 by sending HTTP requests & solving reCaptcha V2 using speech to text."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "PyPasser-0.0.5.tar.gz", hash = "sha256:72b0ded34edcfa885a13ecc825c5a058503b68521ab87294205d7ff5cd569515"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pydub = "0.25.1"
|
||||
PySocks = "1.7.1"
|
||||
requests = ">=2.25.1,<3.0"
|
||||
selenium = "*"
|
||||
SpeechRecognition = "3.8.1"
|
||||
|
||||
[[package]]
|
||||
name = "pysocks"
|
||||
version = "1.7.1"
|
||||
description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
files = [
|
||||
{file = "PySocks-1.7.1-py27-none-any.whl", hash = "sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299"},
|
||||
{file = "PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5"},
|
||||
{file = "PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.29.0"
|
||||
description = "Python HTTP for Humans."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "requests-2.29.0-py3-none-any.whl", hash = "sha256:e8f3c9be120d3333921d213eef078af392fba3933ab7ed2d1cba3b56f2568c3b"},
|
||||
{file = "requests-2.29.0.tar.gz", hash = "sha256:f2e34a75f4749019bb0e3effb66683630e4ffeaf75819fb51bebef1bf5aef059"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2017.4.17"
|
||||
charset-normalizer = ">=2,<4"
|
||||
idna = ">=2.5,<4"
|
||||
urllib3 = ">=1.21.1,<1.27"
|
||||
|
||||
[package.extras]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "selenium"
|
||||
version = "4.9.0"
|
||||
description = ""
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "selenium-4.9.0-py3-none-any.whl", hash = "sha256:4c19e6aac202719373108d53a5a8e9336ba8d2b25822ca32ae6ff37acbabbdbe"},
|
||||
{file = "selenium-4.9.0.tar.gz", hash = "sha256:478fae77cdfaec32adb1e68d59632c8c191f920535282abcaa2d1a3d98655624"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2021.10.8"
|
||||
trio = ">=0.17,<1.0"
|
||||
trio-websocket = ">=0.9,<1.0"
|
||||
urllib3 = {version = ">=1.26,<2.0", extras = ["socks"]}
|
||||
|
||||
[[package]]
|
||||
name = "sniffio"
|
||||
version = "1.3.0"
|
||||
description = "Sniff out which async library your code is running under"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"},
|
||||
{file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sortedcontainers"
|
||||
version = "2.4.0"
|
||||
description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"},
|
||||
{file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "speechrecognition"
|
||||
version = "3.8.1"
|
||||
description = "Library for performing speech recognition, with support for several engines and APIs, online and offline."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "SpeechRecognition-3.8.1-py2.py3-none-any.whl", hash = "sha256:4d8f73a0c05ec70331c3bacaa89ecc06dfa8d9aba0899276664cda06ab597e8e"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tls-client"
|
||||
version = "0.2"
|
||||
description = "Advanced Python HTTP Client."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "tls_client-0.2-py3-none-any.whl", hash = "sha256:26012084e753d7531d32960ec706f81188bb7d825586675fa300b2b44f791412"},
|
||||
{file = "tls_client-0.2.tar.gz", hash = "sha256:eef3860c6f186fa866dc782f1b9e43ae837e40e831f50831c3515cee7c84fd0f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "trio"
|
||||
version = "0.22.0"
|
||||
description = "A friendly Python library for async concurrency and I/O"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "trio-0.22.0-py3-none-any.whl", hash = "sha256:f1dd0780a89bfc880c7c7994519cb53f62aacb2c25ff487001c0052bd721cdf0"},
|
||||
{file = "trio-0.22.0.tar.gz", hash = "sha256:ce68f1c5400a47b137c5a4de72c7c901bd4e7a24fbdebfe9b41de8c6c04eaacf"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
async-generator = ">=1.9"
|
||||
attrs = ">=19.2.0"
|
||||
cffi = {version = ">=1.14", markers = "os_name == \"nt\" and implementation_name != \"pypy\""}
|
||||
exceptiongroup = {version = ">=1.0.0rc9", markers = "python_version < \"3.11\""}
|
||||
idna = "*"
|
||||
outcome = "*"
|
||||
sniffio = "*"
|
||||
sortedcontainers = "*"
|
||||
|
||||
[[package]]
|
||||
name = "trio-websocket"
|
||||
version = "0.10.2"
|
||||
description = "WebSocket library for Trio"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "trio-websocket-0.10.2.tar.gz", hash = "sha256:af13e9393f9051111300287947ec595d601758ce3d165328e7d36325135a8d62"},
|
||||
{file = "trio_websocket-0.10.2-py3-none-any.whl", hash = "sha256:0908435e4eecc49d830ae1c4d6c47b978a75f00594a2be2104d58b61a04cdb53"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
exceptiongroup = "*"
|
||||
trio = ">=0.11"
|
||||
wsproto = ">=0.14"
|
||||
|
||||
[[package]]
|
||||
name = "twocaptcha"
|
||||
version = "0.0.1"
|
||||
description = "2Captcha Python3 API Wrapper"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "TwoCaptcha-0.0.1.tar.gz", hash = "sha256:fd04127de71ca4bd31c22add84a5bcb7c683cf9ee5bf503ca14a8f372ac76a0e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
requests = "*"
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.5.0"
|
||||
description = "Backported and Experimental Type Hints for Python 3.7+"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"},
|
||||
{file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "1.26.15"
|
||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
|
||||
files = [
|
||||
{file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"},
|
||||
{file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
PySocks = {version = ">=1.5.6,<1.5.7 || >1.5.7,<2.0", optional = true, markers = "extra == \"socks\""}
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
|
||||
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "websocket-client"
|
||||
version = "1.5.1"
|
||||
description = "WebSocket client for Python with low level API options"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "websocket-client-1.5.1.tar.gz", hash = "sha256:3f09e6d8230892547132177f575a4e3e73cfdf06526e20cc02aa1c3b47184d40"},
|
||||
{file = "websocket_client-1.5.1-py3-none-any.whl", hash = "sha256:cdf5877568b7e83aa7cf2244ab56a3213de587bbe0ce9d8b9600fc77b455d89e"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"]
|
||||
optional = ["python-socks", "wsaccel"]
|
||||
test = ["websockets"]
|
||||
|
||||
[[package]]
|
||||
name = "wsproto"
|
||||
version = "1.2.0"
|
||||
description = "WebSockets state-machine based protocol implementation"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7.0"
|
||||
files = [
|
||||
{file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"},
|
||||
{file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
h11 = ">=0.9.0,<1"
|
||||
|
||||
[[package]]
|
||||
name = "zipp"
|
||||
version = "3.15.0"
|
||||
description = "Backport of pathlib-compatible object wrapper for zip files"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"},
|
||||
{file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
|
||||
testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.7"
|
||||
content-hash = "d22aa72ee6bd554c0676a3b6b723090d156a720d03d3b05422a01aa9bf22dda2"
|
||||
28
pyproject.toml
Normal file
28
pyproject.toml
Normal file
@@ -0,0 +1,28 @@
|
||||
[tool.poetry]
|
||||
name = "openai-rev"
|
||||
version = "0.1.0"
|
||||
description = ""
|
||||
authors = []
|
||||
license = "GPL-3.0"
|
||||
readme = "README.md"
|
||||
packages = [{ include = "gpt4free" }]
|
||||
exclude = ["**/*.txt"]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.7"
|
||||
websocket-client = "^1.5.1"
|
||||
requests = "2.29.0"
|
||||
tls-client = "^0.2"
|
||||
pypasser = "^0.0.5"
|
||||
names = "^0.3.0"
|
||||
colorama = "^0.4.6"
|
||||
curl-cffi = "^0.5.5"
|
||||
selenium = "^4.9.0"
|
||||
fake-useragent = "^1.1.3"
|
||||
twocaptcha = "^0.0.1"
|
||||
pydantic = "^1.10.7"
|
||||
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
@@ -1,345 +0,0 @@
|
||||
from quora.api import Client as PoeClient
|
||||
from quora.mail import Mail
|
||||
from requests import Session
|
||||
from re import search, findall
|
||||
from json import loads
|
||||
from time import sleep
|
||||
from pathlib import Path
|
||||
from random import choice, choices, randint
|
||||
from string import ascii_letters, digits
|
||||
from urllib import parse
|
||||
from os import urandom
|
||||
from hashlib import md5
|
||||
from json import dumps
|
||||
|
||||
def extract_formkey(html):
|
||||
script_regex = r'<script>if\(.+\)throw new Error;(.+)</script>'
|
||||
script_text = search(script_regex, html).group(1)
|
||||
key_regex = r'var .="([0-9a-f]+)",'
|
||||
key_text = search(key_regex, script_text).group(1)
|
||||
cipher_regex = r'.\[(\d+)\]=.\[(\d+)\]'
|
||||
cipher_pairs = findall(cipher_regex, script_text)
|
||||
|
||||
formkey_list = [""] * len(cipher_pairs)
|
||||
for pair in cipher_pairs:
|
||||
formkey_index, key_index = map(int, pair)
|
||||
formkey_list[formkey_index] = key_text[key_index]
|
||||
formkey = "".join(formkey_list)
|
||||
|
||||
return formkey
|
||||
|
||||
class PoeResponse:
|
||||
|
||||
class Completion:
|
||||
|
||||
class Choices:
|
||||
def __init__(self, choice: dict) -> None:
|
||||
self.text = choice['text']
|
||||
self.content = self.text.encode()
|
||||
self.index = choice['index']
|
||||
self.logprobs = choice['logprobs']
|
||||
self.finish_reason = choice['finish_reason']
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f'''<__main__.APIResponse.Completion.Choices(\n text = {self.text.encode()},\n index = {self.index},\n logprobs = {self.logprobs},\n finish_reason = {self.finish_reason})object at 0x1337>'''
|
||||
|
||||
def __init__(self, choices: dict) -> None:
|
||||
self.choices = [self.Choices(choice) for choice in choices]
|
||||
|
||||
class Usage:
|
||||
def __init__(self, usage_dict: dict) -> None:
|
||||
self.prompt_tokens = usage_dict['prompt_tokens']
|
||||
self.completion_tokens = usage_dict['completion_tokens']
|
||||
self.total_tokens = usage_dict['total_tokens']
|
||||
|
||||
def __repr__(self):
|
||||
return f'''<__main__.APIResponse.Usage(\n prompt_tokens = {self.prompt_tokens},\n completion_tokens = {self.completion_tokens},\n total_tokens = {self.total_tokens})object at 0x1337>'''
|
||||
|
||||
def __init__(self, response_dict: dict) -> None:
|
||||
|
||||
self.response_dict = response_dict
|
||||
self.id = response_dict['id']
|
||||
self.object = response_dict['object']
|
||||
self.created = response_dict['created']
|
||||
self.model = response_dict['model']
|
||||
self.completion = self.Completion(response_dict['choices'])
|
||||
self.usage = self.Usage(response_dict['usage'])
|
||||
|
||||
def json(self) -> dict:
|
||||
return self.response_dict
|
||||
|
||||
|
||||
class ModelResponse:
|
||||
def __init__(self, json_response: dict) -> None:
|
||||
self.id = json_response['data']['poeBotCreate']['bot']['id']
|
||||
self.name = json_response['data']['poeBotCreate']['bot']['displayName']
|
||||
self.limit = json_response['data']['poeBotCreate']['bot']['messageLimit']['dailyLimit']
|
||||
self.deleted = json_response['data']['poeBotCreate']['bot']['deletionState']
|
||||
|
||||
class Model:
|
||||
def create(
|
||||
token: str,
|
||||
model: str = 'gpt-3.5-turbo', # claude-instant
|
||||
system_prompt: str = 'You are ChatGPT a large language model developed by Openai. Answer as consisely as possible',
|
||||
description: str = 'gpt-3.5 language model from openai, skidded by poe.com',
|
||||
handle: str = None) -> ModelResponse:
|
||||
|
||||
models = {
|
||||
'gpt-3.5-turbo' : 'chinchilla',
|
||||
'claude-instant-v1.0': 'a2',
|
||||
'gpt-4': 'beaver'
|
||||
}
|
||||
|
||||
if not handle:
|
||||
handle = f'gptx{randint(1111111, 9999999)}'
|
||||
|
||||
client = Session()
|
||||
client.cookies['p-b'] = token
|
||||
|
||||
formkey = extract_formkey(client.get('https://poe.com').text)
|
||||
settings = client.get('https://poe.com/api/settings').json()
|
||||
|
||||
client.headers = {
|
||||
"host" : "poe.com",
|
||||
"origin" : "https://poe.com",
|
||||
"referer" : "https://poe.com/",
|
||||
"content-type" : "application/json",
|
||||
"poe-formkey" : formkey,
|
||||
"poe-tchannel" : settings['tchannelData']['channel'],
|
||||
"user-agent" : "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36",
|
||||
"connection" : "keep-alive",
|
||||
"sec-ch-ua" : "\"Chromium\";v=\"112\", \"Google Chrome\";v=\"112\", \"Not:A-Brand\";v=\"99\"",
|
||||
"sec-ch-ua-mobile" : "?0",
|
||||
"sec-ch-ua-platform": "\"macOS\"",
|
||||
"content-type" : "application/json",
|
||||
"sec-fetch-site" : "same-origin",
|
||||
"sec-fetch-mode" : "cors",
|
||||
"sec-fetch-dest" : "empty",
|
||||
"accept" : "*/*",
|
||||
"accept-encoding" : "gzip, deflate, br",
|
||||
"accept-language" : "en-GB,en-US;q=0.9,en;q=0.8",
|
||||
}
|
||||
|
||||
payload = dumps(separators=(',', ':'), obj = {
|
||||
'queryName': 'CreateBotMain_poeBotCreate_Mutation',
|
||||
'variables': {
|
||||
'model' : models[model],
|
||||
'handle' : handle,
|
||||
'prompt' : system_prompt,
|
||||
'isPromptPublic' : True,
|
||||
'introduction' : '',
|
||||
'description' : description,
|
||||
'profilePictureUrl' : 'https://qph.fs.quoracdn.net/main-qimg-24e0b480dcd946e1cc6728802c5128b6',
|
||||
'apiUrl' : None,
|
||||
'apiKey' : ''.join(choices(ascii_letters + digits, k = 32)),
|
||||
'isApiBot' : False,
|
||||
'hasLinkification' : False,
|
||||
'hasMarkdownRendering' : False,
|
||||
'hasSuggestedReplies' : False,
|
||||
'isPrivateBot' : False
|
||||
},
|
||||
'query': 'mutation CreateBotMain_poeBotCreate_Mutation(\n $model: String!\n $handle: String!\n $prompt: String!\n $isPromptPublic: Boolean!\n $introduction: String!\n $description: String!\n $profilePictureUrl: String\n $apiUrl: String\n $apiKey: String\n $isApiBot: Boolean\n $hasLinkification: Boolean\n $hasMarkdownRendering: Boolean\n $hasSuggestedReplies: Boolean\n $isPrivateBot: Boolean\n) {\n poeBotCreate(model: $model, handle: $handle, promptPlaintext: $prompt, isPromptPublic: $isPromptPublic, introduction: $introduction, description: $description, profilePicture: $profilePictureUrl, apiUrl: $apiUrl, apiKey: $apiKey, isApiBot: $isApiBot, hasLinkification: $hasLinkification, hasMarkdownRendering: $hasMarkdownRendering, hasSuggestedReplies: $hasSuggestedReplies, isPrivateBot: $isPrivateBot) {\n status\n bot {\n id\n ...BotHeader_bot\n }\n }\n}\n\nfragment BotHeader_bot on Bot {\n displayName\n messageLimit {\n dailyLimit\n }\n ...BotImage_bot\n ...BotLink_bot\n ...IdAnnotation_node\n ...botHelpers_useViewerCanAccessPrivateBot\n ...botHelpers_useDeletion_bot\n}\n\nfragment BotImage_bot on Bot {\n displayName\n ...botHelpers_useDeletion_bot\n ...BotImage_useProfileImage_bot\n}\n\nfragment BotImage_useProfileImage_bot on Bot {\n image {\n __typename\n ... on LocalBotImage {\n localName\n }\n ... on UrlBotImage {\n url\n }\n }\n ...botHelpers_useDeletion_bot\n}\n\nfragment BotLink_bot on Bot {\n displayName\n}\n\nfragment IdAnnotation_node on Node {\n __isNode: __typename\n id\n}\n\nfragment botHelpers_useDeletion_bot on Bot {\n deletionState\n}\n\nfragment botHelpers_useViewerCanAccessPrivateBot on Bot {\n isPrivateBot\n viewerIsCreator\n}\n',
|
||||
})
|
||||
|
||||
base_string = payload + client.headers["poe-formkey"] + 'WpuLMiXEKKE98j56k'
|
||||
client.headers["poe-tag-id"] = md5(base_string.encode()).hexdigest()
|
||||
|
||||
response = client.post("https://poe.com/api/gql_POST", data = payload)
|
||||
|
||||
if not 'success' in response.text:
|
||||
raise Exception('''
|
||||
Bot creation Failed
|
||||
!! Important !!
|
||||
Bot creation was not enabled on this account
|
||||
please use: quora.Account.create with enable_bot_creation set to True
|
||||
''')
|
||||
|
||||
return ModelResponse(response.json())
|
||||
|
||||
class Account:
|
||||
def create(proxy: None or str = None, logging: bool = False, enable_bot_creation: bool = False):
|
||||
client = Session()
|
||||
client.proxies = {
|
||||
'http': f'http://{proxy}',
|
||||
'https': f'http://{proxy}'} if proxy else None
|
||||
|
||||
mail = Mail(client.proxies)
|
||||
mail_token = None
|
||||
_, mail_address = mail.get_mail()
|
||||
|
||||
if logging: print('email', mail_address)
|
||||
|
||||
client.headers = {
|
||||
"host" : "poe.com",
|
||||
"connection" : "keep-alive",
|
||||
"cache-control" : "max-age=0",
|
||||
"sec-ch-ua" : "\"Microsoft Edge\";v=\"111\", \"Not(A:Brand\";v=\"8\", \"Chromium\";v=\"111\"",
|
||||
"sec-ch-ua-mobile" : "?0",
|
||||
"sec-ch-ua-platform": "\"macOS\"",
|
||||
"user-agent" : "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36 Edg/111.0.1661.54",
|
||||
"accept" : "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
|
||||
"sec-fetch-site" : "same-origin",
|
||||
"sec-fetch-mode" : "navigate",
|
||||
"content-type" : "application/json",
|
||||
"sec-fetch-user" : "?1",
|
||||
"sec-fetch-dest" : "document",
|
||||
"accept-encoding" : "gzip, deflate, br",
|
||||
"accept-language" : "en-GB,en;q=0.9,en-US;q=0.8",
|
||||
"upgrade-insecure-requests": "1",
|
||||
}
|
||||
|
||||
client.headers["poe-formkey"] = extract_formkey(client.get('https://poe.com/login').text)
|
||||
client.headers["poe-tchannel"] = client.get('https://poe.com/api/settings').json()['tchannelData']['channel']
|
||||
|
||||
payload = dumps(separators = (',', ':'), obj = {
|
||||
'queryName': 'MainSignupLoginSection_sendVerificationCodeMutation_Mutation',
|
||||
'variables': {
|
||||
'emailAddress': mail_address,
|
||||
'phoneNumber': None,
|
||||
'recaptchaToken': None,
|
||||
},
|
||||
'query': 'mutation MainSignupLoginSection_sendVerificationCodeMutation_Mutation(\n $emailAddress: String\n $phoneNumber: String\n $recaptchaToken: String\n) {\n sendVerificationCode(verificationReason: login, emailAddress: $emailAddress, phoneNumber: $phoneNumber, recaptchaToken: $recaptchaToken) {\n status\n errorMessage\n }\n}\n',
|
||||
})
|
||||
|
||||
base_string = payload + client.headers["poe-formkey"] + 'WpuLMiXEKKE98j56k'
|
||||
client.headers["poe-tag-id"] = md5(base_string.encode()).hexdigest()
|
||||
|
||||
response = client.post('https://poe.com/api/gql_POST', data=payload)
|
||||
if 'Bad Request' in response.text:
|
||||
if logging: print('bad request, retrying...' , response.json())
|
||||
quit()
|
||||
|
||||
if logging: print('send_code' ,response.json())
|
||||
|
||||
while True:
|
||||
sleep(1)
|
||||
messages = mail.fetch_inbox()
|
||||
|
||||
if len(messages["messages"]) > 0:
|
||||
email_content = mail.get_message_content(messages["messages"][0]["_id"])
|
||||
mail_token = findall(r';">(\d{6,7})</div>', email_content)[0]
|
||||
|
||||
if mail_token:
|
||||
break
|
||||
|
||||
if logging: print('code', mail_token)
|
||||
|
||||
payload = dumps(separators = (',', ':'), obj={
|
||||
"queryName": "SignupOrLoginWithCodeSection_signupWithVerificationCodeMutation_Mutation",
|
||||
"variables": {
|
||||
"verificationCode" : mail_token,
|
||||
"emailAddress" : mail_address,
|
||||
"phoneNumber" : None
|
||||
},
|
||||
"query": "mutation SignupOrLoginWithCodeSection_signupWithVerificationCodeMutation_Mutation(\n $verificationCode: String!\n $emailAddress: String\n $phoneNumber: String\n) {\n signupWithVerificationCode(verificationCode: $verificationCode, emailAddress: $emailAddress, phoneNumber: $phoneNumber) {\n status\n errorMessage\n }\n}\n"
|
||||
})
|
||||
|
||||
base_string = payload + client.headers["poe-formkey"] + 'WpuLMiXEKKE98j56k'
|
||||
client.headers["poe-tag-id"] = md5(base_string.encode()).hexdigest()
|
||||
|
||||
response = client.post('https://poe.com/api/gql_POST', data = payload)
|
||||
if logging: print('verify_code', response.json())
|
||||
|
||||
token = parse.unquote(client.cookies.get_dict()['p-b'])
|
||||
|
||||
with open(Path(__file__).resolve().parent / 'cookies.txt', 'a') as f:
|
||||
f.write(f'{token}\n')
|
||||
|
||||
if enable_bot_creation:
|
||||
|
||||
payload = dumps(separators = (',', ':'), obj={
|
||||
"queryName": "UserProfileConfigurePreviewModal_markMultiplayerNuxCompleted_Mutation",
|
||||
"variables": {},
|
||||
"query": "mutation UserProfileConfigurePreviewModal_markMultiplayerNuxCompleted_Mutation {\n markMultiplayerNuxCompleted {\n viewer {\n hasCompletedMultiplayerNux\n id\n }\n }\n}\n"
|
||||
})
|
||||
|
||||
base_string = payload + client.headers["poe-formkey"] + 'WpuLMiXEKKE98j56k'
|
||||
client.headers["poe-tag-id"] = md5(base_string.encode()).hexdigest()
|
||||
|
||||
resp = client.post("https://poe.com/api/gql_POST", data = payload)
|
||||
if logging: print(resp.json())
|
||||
|
||||
return token
|
||||
|
||||
def get():
|
||||
cookies = open(Path(__file__).resolve().parent / 'cookies.txt', 'r').read().splitlines()
|
||||
return choice(cookies)
|
||||
|
||||
class StreamingCompletion:
|
||||
def create(
|
||||
model : str = 'gpt-4',
|
||||
custom_model : bool = None,
|
||||
prompt: str = 'hello world',
|
||||
token : str = ''):
|
||||
|
||||
models = {
|
||||
'sage' : 'capybara',
|
||||
'gpt-4' : 'beaver',
|
||||
'claude-v1.2' : 'a2_2',
|
||||
'claude-instant-v1.0' : 'a2',
|
||||
'gpt-3.5-turbo' : 'chinchilla'
|
||||
}
|
||||
|
||||
_model = models[model] if not custom_model else custom_model
|
||||
|
||||
client = PoeClient(token)
|
||||
|
||||
for chunk in client.send_message(_model, prompt):
|
||||
|
||||
yield PoeResponse({
|
||||
'id' : chunk["messageId"],
|
||||
'object' : 'text_completion',
|
||||
'created': chunk['creationTime'],
|
||||
'model' : _model,
|
||||
'choices': [{
|
||||
'text' : chunk["text_new"],
|
||||
'index' : 0,
|
||||
'logprobs' : None,
|
||||
'finish_reason' : 'stop'
|
||||
}],
|
||||
'usage': {
|
||||
'prompt_tokens' : len(prompt),
|
||||
'completion_tokens' : len(chunk["text_new"]),
|
||||
'total_tokens' : len(prompt) + len(chunk["text_new"])
|
||||
}
|
||||
})
|
||||
|
||||
class Completion:
|
||||
def create(
|
||||
model : str = 'gpt-4',
|
||||
custom_model : str = None,
|
||||
prompt: str = 'hello world',
|
||||
token : str = ''):
|
||||
|
||||
models = {
|
||||
'sage' : 'capybara',
|
||||
'gpt-4' : 'beaver',
|
||||
'claude-v1.2' : 'a2_2',
|
||||
'claude-instant-v1.0' : 'a2',
|
||||
'gpt-3.5-turbo' : 'chinchilla'
|
||||
}
|
||||
|
||||
_model = models[model] if not custom_model else custom_model
|
||||
|
||||
client = PoeClient(token)
|
||||
|
||||
for chunk in client.send_message(_model, prompt):
|
||||
pass
|
||||
|
||||
return PoeResponse({
|
||||
'id' : chunk["messageId"],
|
||||
'object' : 'text_completion',
|
||||
'created': chunk['creationTime'],
|
||||
'model' : _model,
|
||||
'choices': [{
|
||||
'text' : chunk["text"],
|
||||
'index' : 0,
|
||||
'logprobs' : None,
|
||||
'finish_reason' : 'stop'
|
||||
}],
|
||||
'usage': {
|
||||
'prompt_tokens' : len(prompt),
|
||||
'completion_tokens' : len(chunk["text"]),
|
||||
'total_tokens' : len(prompt) + len(chunk["text"])
|
||||
}
|
||||
})
|
||||
@@ -1,39 +0,0 @@
|
||||
import html
|
||||
import json
|
||||
from tls_client import Session
|
||||
|
||||
class Mail:
|
||||
def __init__(self, proxies: str = None, timeout: int = 15, bearer_token: str or None = None) -> None:
|
||||
self.session = Session(client_identifier='chrome110')
|
||||
self.base_url = 'https://web2.temp-mail.org'
|
||||
self.proxies = proxies
|
||||
self.timeout = timeout
|
||||
|
||||
self.session.headers['authorization'] = f'Bearer {bearer_token}' if bearer_token else None
|
||||
|
||||
def get_mail(self) -> str:
|
||||
status: html = self.session.get(self.base_url).status_code
|
||||
|
||||
try:
|
||||
if status == 200:
|
||||
data = self.session.post(f'{self.base_url}/mailbox').json()
|
||||
|
||||
self.session.headers['authorization'] = f'Bearer {data["token"]}'
|
||||
return data["token"], data["mailbox"]
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return f'Email creation error. {e} | use proxies', False
|
||||
|
||||
def fetch_inbox(self) -> json:
|
||||
return self.session.get(f'{self.base_url}/messages').json()
|
||||
|
||||
def get_message_content(self, message_id: str):
|
||||
return self.session.get(f'{self.base_url}/messages/{message_id}').json()["bodyHtml"]
|
||||
|
||||
# if __name__ == '__main__':
|
||||
|
||||
# email_client = TempMail()
|
||||
# token, email = email_client.get_mail()
|
||||
# print(email)
|
||||
# print(token)
|
||||
@@ -1,3 +1,14 @@
|
||||
websocket-client
|
||||
requests
|
||||
tls-client
|
||||
pypasser
|
||||
names
|
||||
colorama
|
||||
curl_cffi
|
||||
streamlit==1.21.0
|
||||
selenium
|
||||
fake-useragent
|
||||
twocaptcha
|
||||
https://github.com/AI-Yash/st-chat/archive/refs/pull/24/head.zip
|
||||
pydantic
|
||||
pymailtm
|
||||
|
||||
@@ -1,135 +0,0 @@
|
||||
from requests import post
|
||||
from time import time
|
||||
|
||||
headers = {
|
||||
'authority': 'www.t3nsor.tech',
|
||||
'accept': '*/*',
|
||||
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
|
||||
'cache-control': 'no-cache',
|
||||
'content-type': 'application/json',
|
||||
'origin': 'https://www.t3nsor.tech',
|
||||
'pragma': 'no-cache',
|
||||
'referer': 'https://www.t3nsor.tech/',
|
||||
'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-platform': '"macOS"',
|
||||
'sec-fetch-dest': 'empty',
|
||||
'sec-fetch-mode': 'cors',
|
||||
'sec-fetch-site': 'same-origin',
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
|
||||
}
|
||||
|
||||
class T3nsorResponse:
|
||||
|
||||
class Completion:
|
||||
|
||||
class Choices:
|
||||
def __init__(self, choice: dict) -> None:
|
||||
self.text = choice['text']
|
||||
self.content = self.text.encode()
|
||||
self.index = choice['index']
|
||||
self.logprobs = choice['logprobs']
|
||||
self.finish_reason = choice['finish_reason']
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f'''<__main__.APIResponse.Completion.Choices(\n text = {self.text.encode()},\n index = {self.index},\n logprobs = {self.logprobs},\n finish_reason = {self.finish_reason})object at 0x1337>'''
|
||||
|
||||
def __init__(self, choices: dict) -> None:
|
||||
self.choices = [self.Choices(choice) for choice in choices]
|
||||
|
||||
class Usage:
|
||||
def __init__(self, usage_dict: dict) -> None:
|
||||
self.prompt_tokens = usage_dict['prompt_chars']
|
||||
self.completion_tokens = usage_dict['completion_chars']
|
||||
self.total_tokens = usage_dict['total_chars']
|
||||
|
||||
def __repr__(self):
|
||||
return f'''<__main__.APIResponse.Usage(\n prompt_tokens = {self.prompt_tokens},\n completion_tokens = {self.completion_tokens},\n total_tokens = {self.total_tokens})object at 0x1337>'''
|
||||
|
||||
def __init__(self, response_dict: dict) -> None:
|
||||
|
||||
self.response_dict = response_dict
|
||||
self.id = response_dict['id']
|
||||
self.object = response_dict['object']
|
||||
self.created = response_dict['created']
|
||||
self.model = response_dict['model']
|
||||
self.completion = self.Completion(response_dict['choices'])
|
||||
self.usage = self.Usage(response_dict['usage'])
|
||||
|
||||
def json(self) -> dict:
|
||||
return self.response_dict
|
||||
|
||||
class Completion:
|
||||
model = {
|
||||
'model': {
|
||||
'id' : 'gpt-3.5-turbo',
|
||||
'name' : 'Default (GPT-3.5)'
|
||||
}
|
||||
}
|
||||
|
||||
def create(
|
||||
prompt: str = 'hello world',
|
||||
messages: list = []) -> T3nsorResponse:
|
||||
|
||||
response = post('https://www.t3nsor.tech/api/chat', headers = headers, json = Completion.model | {
|
||||
'messages' : messages,
|
||||
'key' : '',
|
||||
'prompt' : prompt
|
||||
})
|
||||
|
||||
return T3nsorResponse({
|
||||
'id' : f'cmpl-1337-{int(time())}',
|
||||
'object' : 'text_completion',
|
||||
'created': int(time()),
|
||||
'model' : Completion.model,
|
||||
'choices': [{
|
||||
'text' : response.text,
|
||||
'index' : 0,
|
||||
'logprobs' : None,
|
||||
'finish_reason' : 'stop'
|
||||
}],
|
||||
'usage': {
|
||||
'prompt_chars' : len(prompt),
|
||||
'completion_chars' : len(response.text),
|
||||
'total_chars' : len(prompt) + len(response.text)
|
||||
}
|
||||
})
|
||||
|
||||
class StreamCompletion:
|
||||
model = {
|
||||
'model': {
|
||||
'id' : 'gpt-3.5-turbo',
|
||||
'name' : 'Default (GPT-3.5)'
|
||||
}
|
||||
}
|
||||
|
||||
def create(
|
||||
prompt: str = 'hello world',
|
||||
messages: list = []) -> T3nsorResponse:
|
||||
|
||||
response = post('https://www.t3nsor.tech/api/chat', headers = headers, stream = True, json = Completion.model | {
|
||||
'messages' : messages,
|
||||
'key' : '',
|
||||
'prompt' : prompt
|
||||
})
|
||||
|
||||
for chunk in response.iter_content(chunk_size = 2046):
|
||||
yield T3nsorResponse({
|
||||
'id' : f'cmpl-1337-{int(time())}',
|
||||
'object' : 'text_completion',
|
||||
'created': int(time()),
|
||||
'model' : Completion.model,
|
||||
|
||||
'choices': [{
|
||||
'text' : chunk.decode(),
|
||||
'index' : 0,
|
||||
'logprobs' : None,
|
||||
'finish_reason' : 'stop'
|
||||
}],
|
||||
|
||||
'usage': {
|
||||
'prompt_chars' : len(prompt),
|
||||
'completion_chars' : len(chunk.decode()),
|
||||
'total_chars' : len(prompt) + len(chunk.decode())
|
||||
}
|
||||
})
|
||||
9
testing/forefront_test.py
Normal file
9
testing/forefront_test.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from gpt4free import forefront
|
||||
|
||||
# create an account
|
||||
token = forefront.Account.create(logging=True)
|
||||
print(token)
|
||||
|
||||
# get a response
|
||||
for response in forefront.StreamingCompletion.create(token=token, prompt='hello world', model='gpt-4'):
|
||||
print(response.text, end='')
|
||||
14
testing/openaihosted_test.py
Normal file
14
testing/openaihosted_test.py
Normal file
@@ -0,0 +1,14 @@
|
||||
import openaihosted
|
||||
|
||||
messages = [{"role": "system", "content": "You are a helpful assistant."}]
|
||||
while True:
|
||||
question = input("Question: ")
|
||||
if question == "!stop":
|
||||
break
|
||||
|
||||
messages.append({"role": "user", "content": question})
|
||||
request = openaihosted.Completion.create(messages=messages)
|
||||
|
||||
response = request["responses"]
|
||||
messages.append({"role": "assistant", "content": response})
|
||||
print(f"Answer: {response}")
|
||||
@@ -1,27 +0,0 @@
|
||||
import ora
|
||||
|
||||
# 1 normal
|
||||
# 2 solidity contract helper
|
||||
# 3 swift project helper
|
||||
# 4 developer gpt
|
||||
# 5 lawsuit bot for spam call
|
||||
# 6 p5.js code help bot
|
||||
# 8 AI professor, for controversial topics
|
||||
# 9 HustleGPT, your entrepreneurial AI
|
||||
# 10 midjourney prompts bot
|
||||
# 11 AI philosophy professor
|
||||
# 12 TypeScript and JavaScript code review bot
|
||||
# 13 credit card transaction details to merchant and location bot
|
||||
# 15 Chemical Compound Similarity and Purchase Tool bot
|
||||
# 16 expert full-stack developer AI
|
||||
# 17 Solana development bot
|
||||
# 18 price guessing game bot
|
||||
# 19 AI Ethicist and Philosopher
|
||||
|
||||
gpt4_chatbot_ids = ['b8b12eaa-5d47-44d3-92a6-4d706f2bcacf', 'fbe53266-673c-4b70-9d2d-d247785ccd91', 'bd5781cf-727a-45e9-80fd-a3cfce1350c6', '993a0102-d397-47f6-98c3-2587f2c9ec3a', 'ae5c524e-d025-478b-ad46-8843a5745261', 'cc510743-e4ab-485e-9191-76960ecb6040', 'a5cd2481-8e24-4938-aa25-8e26d6233390', '6bca5930-2aa1-4bf4-96a7-bea4d32dcdac', '884a5f2b-47a2-47a5-9e0f-851bbe76b57c', 'd5f3c491-0e74-4ef7-bdca-b7d27c59e6b3', 'd72e83f6-ef4e-4702-844f-cf4bd432eef7', '6e80b170-11ed-4f1a-b992-fd04d7a9e78c', '8ef52d68-1b01-466f-bfbf-f25c13ff4a72', 'd0674e11-f22e-406b-98bc-c1ba8564f749', 'a051381d-6530-463f-be68-020afddf6a8f', '99c0afa1-9e32-4566-8909-f4ef9ac06226', '1be65282-9c59-4a96-99f8-d225059d9001', 'dba16bd8-5785-4248-a8e9-b5d1ecbfdd60', '1731450d-3226-42d0-b41c-4129fe009524', '8e74635d-000e-4819-ab2c-4e986b7a0f48', 'afe7ed01-c1ac-4129-9c71-2ca7f3800b30', 'e374c37a-8c44-4f0e-9e9f-1ad4609f24f5']
|
||||
chatbot_id = gpt4_chatbot_ids[0]
|
||||
|
||||
model = ora.CompletionModel.load(chatbot_id, 'gpt-4')
|
||||
response = ora.Completion.create(model, 'hello')
|
||||
|
||||
print(response)
|
||||
@@ -1,29 +0,0 @@
|
||||
# inport ora
|
||||
import ora
|
||||
|
||||
# create model
|
||||
model = ora.CompletionModel.create(
|
||||
system_prompt = 'You are ChatGPT, a large language model trained by OpenAI. Answer as concisely as possible',
|
||||
description = 'ChatGPT Openai Language Model',
|
||||
name = 'gpt-3.5')
|
||||
|
||||
print(model.id)
|
||||
|
||||
# init conversation (will give you a conversationId)
|
||||
init = ora.Completion.create(
|
||||
model = model,
|
||||
prompt = 'hello world')
|
||||
|
||||
print(init.completion.choices[0].text)
|
||||
|
||||
while True:
|
||||
# pass in conversationId to continue conversation
|
||||
|
||||
prompt = input('>>> ')
|
||||
response = ora.Completion.create(
|
||||
model = model,
|
||||
prompt = prompt,
|
||||
includeHistory = True,
|
||||
conversationId = init.id)
|
||||
|
||||
print(response.completion.choices[0].text)
|
||||
@@ -1,13 +0,0 @@
|
||||
import phind
|
||||
|
||||
prompt = 'hello world'
|
||||
|
||||
result = phind.Completion.create(
|
||||
model = 'gpt-4',
|
||||
prompt = prompt,
|
||||
results = phind.Search.create(prompt, actualSearch = False), # create search (set actualSearch to False to disable internet)
|
||||
creative = False,
|
||||
detailed = False,
|
||||
codeContext = '') # up to 3000 chars of code
|
||||
|
||||
print(result.completion.choices[0].text)
|
||||
109
testing/poe_account_create_test.py
Normal file
109
testing/poe_account_create_test.py
Normal file
@@ -0,0 +1,109 @@
|
||||
from hashlib import md5
|
||||
from json import dumps
|
||||
from re import findall
|
||||
from typing import Optional
|
||||
|
||||
from tls_client import Session as TLS
|
||||
from twocaptcha import TwoCaptcha
|
||||
|
||||
from gpt4free.quora import extract_formkey
|
||||
from gpt4free.quora.mail import Emailnator
|
||||
|
||||
solver = TwoCaptcha('72747bf24a9d89b4dcc1b24875efd358')
|
||||
|
||||
|
||||
class Account:
|
||||
@staticmethod
|
||||
def create(proxy: Optional[str] = None, logging: bool = False, enable_bot_creation: bool = False):
|
||||
client = TLS(client_identifier='chrome110')
|
||||
client.proxies = {'http': f'http://{proxy}', 'https': f'http://{proxy}'} if proxy else None
|
||||
|
||||
mail_client = Emailnator()
|
||||
mail_address = mail_client.get_mail()
|
||||
|
||||
if logging:
|
||||
print('email', mail_address)
|
||||
|
||||
client.headers = {
|
||||
'authority': 'poe.com',
|
||||
'accept': '*/*',
|
||||
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
|
||||
'content-type': 'application/json',
|
||||
'origin': 'https://poe.com',
|
||||
'poe-formkey': 'null',
|
||||
'poe-tag-id': 'null',
|
||||
'poe-tchannel': 'null',
|
||||
'referer': 'https://poe.com/login',
|
||||
'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-platform': '"macOS"',
|
||||
'sec-fetch-dest': 'empty',
|
||||
'sec-fetch-mode': 'cors',
|
||||
'sec-fetch-site': 'same-origin',
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
|
||||
}
|
||||
|
||||
client.headers["poe-formkey"] = extract_formkey(client.get('https://poe.com/login').text)
|
||||
client.headers["poe-tchannel"] = client.get('https://poe.com/api/settings').json()['tchannelData']['channel']
|
||||
|
||||
# token = reCaptchaV3('https://www.recaptcha.net/recaptcha/enterprise/anchor?ar=1&k=6LflhEElAAAAAI_ewVwRWI9hsyV4mbZnYAslSvlG&co=aHR0cHM6Ly9wb2UuY29tOjQ0Mw..&hl=en&v=4PnKmGB9wRHh1i04o7YUICeI&size=invisible&cb=bi6ivxoskyal')
|
||||
token = solver.recaptcha(
|
||||
sitekey='6LflhEElAAAAAI_ewVwRWI9hsyV4mbZnYAslSvlG',
|
||||
url='https://poe.com/login?redirect_url=%2F',
|
||||
version='v3',
|
||||
enterprise=1,
|
||||
invisible=1,
|
||||
action='login',
|
||||
)['code']
|
||||
|
||||
payload = dumps(
|
||||
separators=(',', ':'),
|
||||
obj={
|
||||
'queryName': 'MainSignupLoginSection_sendVerificationCodeMutation_Mutation',
|
||||
'variables': {'emailAddress': mail_address, 'phoneNumber': None, 'recaptchaToken': token},
|
||||
'query': 'mutation MainSignupLoginSection_sendVerificationCodeMutation_Mutation(\n $emailAddress: String\n $phoneNumber: String\n $recaptchaToken: String\n) {\n sendVerificationCode(verificationReason: login, emailAddress: $emailAddress, phoneNumber: $phoneNumber, recaptchaToken: $recaptchaToken) {\n status\n errorMessage\n }\n}\n',
|
||||
},
|
||||
)
|
||||
|
||||
base_string = payload + client.headers["poe-formkey"] + 'WpuLMiXEKKE98j56k'
|
||||
client.headers["poe-tag-id"] = md5(base_string.encode()).hexdigest()
|
||||
|
||||
print(dumps(client.headers, indent=4))
|
||||
|
||||
response = client.post('https://poe.com/api/gql_POST', data=payload)
|
||||
|
||||
if 'automated_request_detected' in response.text:
|
||||
print('please try using a proxy / wait for fix')
|
||||
|
||||
if 'Bad Request' in response.text:
|
||||
if logging:
|
||||
print('bad request, retrying...', response.json())
|
||||
quit()
|
||||
|
||||
if logging:
|
||||
print('send_code', response.json())
|
||||
|
||||
mail_content = mail_client.get_message()
|
||||
mail_token = findall(r';">(\d{6,7})</div>', mail_content)[0]
|
||||
|
||||
if logging:
|
||||
print('code', mail_token)
|
||||
|
||||
payload = dumps(
|
||||
separators=(',', ':'),
|
||||
obj={
|
||||
"queryName": "SignupOrLoginWithCodeSection_signupWithVerificationCodeMutation_Mutation",
|
||||
"variables": {"verificationCode": str(mail_token), "emailAddress": mail_address, "phoneNumber": None},
|
||||
"query": "mutation SignupOrLoginWithCodeSection_signupWithVerificationCodeMutation_Mutation(\n $verificationCode: String!\n $emailAddress: String\n $phoneNumber: String\n) {\n signupWithVerificationCode(verificationCode: $verificationCode, emailAddress: $emailAddress, phoneNumber: $phoneNumber) {\n status\n errorMessage\n }\n}\n",
|
||||
},
|
||||
)
|
||||
|
||||
base_string = payload + client.headers["poe-formkey"] + 'WpuLMiXEKKE98j56k'
|
||||
client.headers["poe-tag-id"] = md5(base_string.encode()).hexdigest()
|
||||
|
||||
response = client.post('https://poe.com/api/gql_POST', data=payload)
|
||||
if logging:
|
||||
print('verify_code', response.json())
|
||||
|
||||
|
||||
Account.create(proxy='xtekky:wegwgwegwed_streaming-1@geo.iproyal.com:12321', logging=True)
|
||||
@@ -1,13 +1,11 @@
|
||||
import quora
|
||||
from time import sleep
|
||||
|
||||
token = quora.Account.create(proxy = None,logging = True)
|
||||
from gpt4free import quora
|
||||
|
||||
token = quora.Account.create(proxy=None, logging=True)
|
||||
print('token', token)
|
||||
|
||||
sleep(2)
|
||||
|
||||
for response in quora.StreamingCompletion.create(model = 'gpt-3.5-turbo',
|
||||
prompt = 'hello world',
|
||||
token = token):
|
||||
|
||||
print(response.completion.choices[0].text, end="", flush=True)
|
||||
for response in quora.StreamingCompletion.create(model='ChatGPT', prompt='hello world', token=token):
|
||||
print(response.text, flush=True)
|
||||
|
||||
@@ -1,18 +1,12 @@
|
||||
import quora
|
||||
from gpt4free import quora
|
||||
|
||||
token = quora.Account.create(logging = True, enable_bot_creation=True)
|
||||
token = quora.Account.create(logging=True, enable_bot_creation=True)
|
||||
|
||||
model = quora.Model.create(
|
||||
token = token,
|
||||
model = 'gpt-3.5-turbo', # or claude-instant-v1.0
|
||||
system_prompt = 'you are ChatGPT a large language model ...'
|
||||
token=token, model='ChatGPT', system_prompt='you are ChatGPT a large language model ...' # or claude-instant-v1.0
|
||||
)
|
||||
|
||||
print(model.name)
|
||||
|
||||
for response in quora.StreamingCompletion.create(
|
||||
custom_model = model.name,
|
||||
prompt ='hello world',
|
||||
token = token):
|
||||
|
||||
print(response.completion.choices[0].text)
|
||||
for response in quora.StreamingCompletion.create(custom_model=model.name, prompt='hello world', token=token):
|
||||
print(response.text)
|
||||
|
||||
4
testing/sqlchat_test.py
Normal file
4
testing/sqlchat_test.py
Normal file
@@ -0,0 +1,4 @@
|
||||
import sqlchat
|
||||
|
||||
for response in sqlchat.StreamCompletion.create(prompt='write python code to reverse a string', messages=[]):
|
||||
print(response.completion.choices[0].text, end='')
|
||||
@@ -1,7 +1,4 @@
|
||||
import t3nsor
|
||||
|
||||
for response in t3nsor.StreamCompletion.create(
|
||||
prompt = 'write python code to reverse a string',
|
||||
messages = []):
|
||||
|
||||
for response in t3nsor.StreamCompletion.create(prompt='write python code to reverse a string', messages=[]):
|
||||
print(response.completion.choices[0].text)
|
||||
|
||||
27
testing/test_main.py
Normal file
27
testing/test_main.py
Normal file
@@ -0,0 +1,27 @@
|
||||
import gpt4free
|
||||
from gpt4free import Provider, quora, forefront
|
||||
|
||||
# usage You
|
||||
response = gpt4free.Completion.create(Provider.You, prompt='Write a poem on Lionel Messi')
|
||||
print(response)
|
||||
|
||||
# usage Poe
|
||||
token = quora.Account.create(logging=False)
|
||||
response = gpt4free.Completion.create(Provider.Poe, prompt='Write a poem on Lionel Messi', token=token, model='ChatGPT')
|
||||
print(response)
|
||||
|
||||
# usage forefront
|
||||
token = forefront.Account.create(logging=False)
|
||||
response = gpt4free.Completion.create(
|
||||
Provider.ForeFront, prompt='Write a poem on Lionel Messi', model='gpt-4', token=token
|
||||
)
|
||||
print(response)
|
||||
print(f'END')
|
||||
|
||||
# usage theb
|
||||
response = gpt4free.Completion.create(Provider.Theb, prompt='Write a poem on Lionel Messi')
|
||||
print(response)
|
||||
|
||||
# usage cocalc
|
||||
response = gpt4free.Completion.create(Provider.CoCalc, prompt='Write a poem on Lionel Messi', cookie_input='')
|
||||
print(response)
|
||||
5
testing/theb_test.py
Normal file
5
testing/theb_test.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from gpt4free import theb
|
||||
|
||||
for token in theb.Completion.create('hello world'):
|
||||
print(token, end='', flush=True)
|
||||
print('asdsos')
|
||||
27
testing/useless_test.py
Normal file
27
testing/useless_test.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from gpt4free import usesless
|
||||
|
||||
message_id = ""
|
||||
while True:
|
||||
prompt = input("Question: ")
|
||||
if prompt == "!stop":
|
||||
break
|
||||
|
||||
req = usesless.Completion.create(prompt=prompt, parentMessageId=message_id)
|
||||
|
||||
print(f"Answer: {req['text']}")
|
||||
message_id = req["id"]
|
||||
|
||||
|
||||
import gpt4free
|
||||
|
||||
message_id = ""
|
||||
while True:
|
||||
prompt = input("Question: ")
|
||||
if prompt == "!stop":
|
||||
break
|
||||
|
||||
req = gpt4free.Completion.create(provider = gpt4free.Provider.UseLess,
|
||||
prompt=prompt, parentMessageId=message_id)
|
||||
|
||||
print(f"Answer: {req['text']}")
|
||||
message_id = req["id"]
|
||||
13
testing/usesless_test.py
Normal file
13
testing/usesless_test.py
Normal file
@@ -0,0 +1,13 @@
|
||||
import usesless
|
||||
|
||||
question1 = "Who won the world series in 2020?"
|
||||
req = usesless.Completion.create(prompt=question1)
|
||||
answer = req["text"]
|
||||
message_id = req["parentMessageId"]
|
||||
|
||||
question2 = "Where was it played?"
|
||||
req2 = usesless.Completion.create(prompt=question2, parentMessageId=message_id)
|
||||
answer2 = req2["text"]
|
||||
|
||||
print(answer)
|
||||
print(answer2)
|
||||
@@ -2,48 +2,34 @@
|
||||
import writesonic
|
||||
|
||||
# create account (3-4s)
|
||||
account = writesonic.Account.create(logging = True)
|
||||
account = writesonic.Account.create(logging=True)
|
||||
|
||||
# with loging:
|
||||
# 2023-04-06 21:50:25 INFO __main__ -> register success : '{"id":"51aa0809-3053-44f7-922a...' (2s)
|
||||
# 2023-04-06 21:50:25 INFO __main__ -> id : '51aa0809-3053-44f7-922a-2b85d8d07edf'
|
||||
# 2023-04-06 21:50:25 INFO __main__ -> token : 'eyJhbGciOiJIUzI1NiIsInR5cCI6Ik...'
|
||||
# 2023-04-06 21:50:28 INFO __main__ -> got key : '194158c4-d249-4be0-82c6-5049e869533c' (2s)
|
||||
# 2023-04-06 21:50:25 INFO __main__ -> register success : '{"id":"51aa0809-3053-44f7-922a...' (2s)
|
||||
# 2023-04-06 21:50:25 INFO __main__ -> id : '51aa0809-3053-44f7-922a-2b85d8d07edf'
|
||||
# 2023-04-06 21:50:25 INFO __main__ -> token : 'eyJhbGciOiJIUzI1NiIsInR5cCI6Ik...'
|
||||
# 2023-04-06 21:50:28 INFO __main__ -> got key : '194158c4-d249-4be0-82c6-5049e869533c' (2s)
|
||||
|
||||
# simple completion
|
||||
response = writesonic.Completion.create(
|
||||
api_key = account.key,
|
||||
prompt = 'hello world'
|
||||
)
|
||||
response = writesonic.Completion.create(api_key=account.key, prompt='hello world')
|
||||
|
||||
print(response.completion.choices[0].text) # Hello! How may I assist you today?
|
||||
print(response.completion.choices[0].text) # Hello! How may I assist you today?
|
||||
|
||||
# conversation
|
||||
|
||||
response = writesonic.Completion.create(
|
||||
api_key = account.key,
|
||||
prompt = 'what is my name ?',
|
||||
enable_memory = True,
|
||||
history_data = [
|
||||
{
|
||||
'is_sent': True,
|
||||
'message': 'my name is Tekky'
|
||||
},
|
||||
{
|
||||
'is_sent': False,
|
||||
'message': 'hello Tekky'
|
||||
}
|
||||
]
|
||||
api_key=account.key,
|
||||
prompt='what is my name ?',
|
||||
enable_memory=True,
|
||||
history_data=[{'is_sent': True, 'message': 'my name is Tekky'}, {'is_sent': False, 'message': 'hello Tekky'}],
|
||||
)
|
||||
|
||||
print(response.completion.choices[0].text) # Your name is Tekky.
|
||||
print(response.completion.choices[0].text) # Your name is Tekky.
|
||||
|
||||
# enable internet
|
||||
|
||||
response = writesonic.Completion.create(
|
||||
api_key = account.key,
|
||||
prompt = 'who won the quatar world cup ?',
|
||||
enable_google_results = True
|
||||
api_key=account.key, prompt='who won the quatar world cup ?', enable_google_results=True
|
||||
)
|
||||
|
||||
print(response.completion.choices[0].text) # Argentina won the 2022 FIFA World Cup tournament held in Qatar ...
|
||||
print(response.completion.choices[0].text) # Argentina won the 2022 FIFA World Cup tournament held in Qatar ...
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
import you
|
||||
from gpt4free import you
|
||||
|
||||
# simple request with links and details
|
||||
response = you.Completion.create(
|
||||
prompt = "hello world",
|
||||
detailed = True,
|
||||
includelinks = True,)
|
||||
response = you.Completion.create(prompt="hello world", detailed=True, include_links=True)
|
||||
|
||||
print(response)
|
||||
|
||||
@@ -16,17 +13,15 @@ print(response)
|
||||
# }
|
||||
# }
|
||||
|
||||
#chatbot
|
||||
# chatbot
|
||||
|
||||
chat = []
|
||||
|
||||
while True:
|
||||
prompt = input("You: ")
|
||||
|
||||
response = you.Completion.create(
|
||||
prompt = prompt,
|
||||
chat = chat)
|
||||
response = you.Completion.create(prompt=prompt, chat=chat)
|
||||
|
||||
print("Bot:", response["response"])
|
||||
print("Bot:", response.text)
|
||||
|
||||
chat.append({"question": prompt, "answer": response["response"]})
|
||||
chat.append({"question": prompt, "answer": response.text})
|
||||
|
||||
2
unfinished/bard/README.md
Normal file
2
unfinished/bard/README.md
Normal file
@@ -0,0 +1,2 @@
|
||||
to do:
|
||||
- code refractoring
|
||||
93
unfinished/bard/__init__.py
Normal file
93
unfinished/bard/__init__.py
Normal file
@@ -0,0 +1,93 @@
|
||||
from json import dumps, loads
|
||||
from os import getenv
|
||||
from random import randint
|
||||
from re import search
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from bard.typings import BardResponse
|
||||
from dotenv import load_dotenv
|
||||
from requests import Session
|
||||
|
||||
load_dotenv()
|
||||
token = getenv('1psid')
|
||||
proxy = getenv('proxy')
|
||||
|
||||
temperatures = {
|
||||
0: "Generate text strictly following known patterns, with no creativity.",
|
||||
0.1: "Produce text adhering closely to established patterns, allowing minimal creativity.",
|
||||
0.2: "Create text with modest deviations from familiar patterns, injecting a slight creative touch.",
|
||||
0.3: "Craft text with a mild level of creativity, deviating somewhat from common patterns.",
|
||||
0.4: "Formulate text balancing creativity and recognizable patterns for coherent results.",
|
||||
0.5: "Generate text with a moderate level of creativity, allowing for a mix of familiarity and novelty.",
|
||||
0.6: "Compose text with an increased emphasis on creativity, while partially maintaining familiar patterns.",
|
||||
0.7: "Produce text favoring creativity over typical patterns for more original results.",
|
||||
0.8: "Create text heavily focused on creativity, with limited concern for familiar patterns.",
|
||||
0.9: "Craft text with a strong emphasis on unique and inventive ideas, largely ignoring established patterns.",
|
||||
1: "Generate text with maximum creativity, disregarding any constraints of known patterns or structures."
|
||||
}
|
||||
|
||||
|
||||
class Completion:
|
||||
def create(
|
||||
prompt: str = 'hello world',
|
||||
temperature: int = None,
|
||||
conversation_id: str = '',
|
||||
response_id: str = '',
|
||||
choice_id: str = '') -> BardResponse:
|
||||
|
||||
if temperature:
|
||||
prompt = f'''settings: follow these settings for your response: [temperature: {temperature} - {temperatures[temperature]}] | prompt : {prompt}'''
|
||||
|
||||
client = Session()
|
||||
client.proxies = {
|
||||
'http': f'http://{proxy}',
|
||||
'https': f'http://{proxy}'} if proxy else None
|
||||
|
||||
client.headers = {
|
||||
'authority': 'bard.google.com',
|
||||
'content-type': 'application/x-www-form-urlencoded;charset=UTF-8',
|
||||
'origin': 'https://bard.google.com',
|
||||
'referer': 'https://bard.google.com/',
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
|
||||
'x-same-domain': '1',
|
||||
'cookie': f'__Secure-1PSID={token}'
|
||||
}
|
||||
|
||||
snlm0e = search(r'SNlM0e\":\"(.*?)\"',
|
||||
client.get('https://bard.google.com/').text).group(1)
|
||||
|
||||
params = urlencode({
|
||||
'bl': 'boq_assistant-bard-web-server_20230326.21_p0',
|
||||
'_reqid': randint(1111, 9999),
|
||||
'rt': 'c',
|
||||
})
|
||||
|
||||
response = client.post(
|
||||
f'https://bard.google.com/_/BardChatUi/data/assistant.lamda.BardFrontendService/StreamGenerate?{params}',
|
||||
data={
|
||||
'at': snlm0e,
|
||||
'f.req': dumps([None, dumps([
|
||||
[prompt],
|
||||
None,
|
||||
[conversation_id, response_id, choice_id],
|
||||
])])
|
||||
}
|
||||
)
|
||||
|
||||
chat_data = loads(response.content.splitlines()[3])[0][2]
|
||||
if not chat_data:
|
||||
print('error, retrying')
|
||||
Completion.create(prompt, temperature,
|
||||
conversation_id, response_id, choice_id)
|
||||
|
||||
json_chat_data = loads(chat_data)
|
||||
results = {
|
||||
'content': json_chat_data[0][0],
|
||||
'conversation_id': json_chat_data[1][0],
|
||||
'response_id': json_chat_data[1][1],
|
||||
'factualityQueries': json_chat_data[3],
|
||||
'textQuery': json_chat_data[2][0] if json_chat_data[2] is not None else '',
|
||||
'choices': [{'id': i[0], 'content': i[1]} for i in json_chat_data[4]],
|
||||
}
|
||||
|
||||
return BardResponse(results)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user