Compare commits
160 Commits
upate-read
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8b0cb0cbc1 | ||
|
|
f8f9882fe0 | ||
|
|
e47236b137 | ||
|
|
501e7bcb42 | ||
|
|
d5f5a09ec9 | ||
|
|
408a169c41 | ||
|
|
6e3cfe41c7 | ||
|
|
b4aadbbac5 | ||
|
|
f710441a6d | ||
|
|
08abfe0ebb | ||
|
|
5867f14aa4 | ||
|
|
21c82d873a | ||
|
|
e02094de5b | ||
|
|
70395755f8 | ||
|
|
ed196d8aba | ||
|
|
22b9b1fc69 | ||
|
|
dfc9e01889 | ||
|
|
c6619ffc3e | ||
|
|
d1ac26833a | ||
|
|
6575871c32 | ||
|
|
b8802544b9 | ||
|
|
e3958951e5 | ||
|
|
566ae09bb7 | ||
|
|
ef9127d876 | ||
|
|
f7dab64069 | ||
|
|
ea9ff431d2 | ||
|
|
2af20fd6bf | ||
|
|
3b13c3df92 | ||
|
|
8a76728343 | ||
|
|
76e15efe75 | ||
|
|
5bbd0f5a6b | ||
|
|
0953981b53 | ||
|
|
70acea30a5 | ||
|
|
dab9b75ef3 | ||
|
|
a86ddffb9f | ||
|
|
dece50601f | ||
|
|
55990be732 | ||
|
|
9489dda310 | ||
|
|
d6819fd631 | ||
|
|
27add5f4b2 | ||
|
|
656f2ff262 | ||
|
|
e34f4bab61 | ||
|
|
60d4ca8e24 | ||
|
|
de2bf42f92 | ||
|
|
ea88a3a650 | ||
|
|
b82e787d4f | ||
|
|
afae71255f | ||
|
|
8847f0a4fd | ||
|
|
3af895a0b6 | ||
|
|
604c40928a | ||
|
|
2e6b523a70 | ||
|
|
786dc89ee8 | ||
|
|
e4aac28186 | ||
|
|
d555d0c247 | ||
|
|
1981d379d5 | ||
|
|
c7f9f44f99 | ||
|
|
afcaf7b70c | ||
|
|
54b4c789a7 | ||
|
|
94b30306f0 | ||
|
|
55217bb628 | ||
|
|
c99031acbe | ||
|
|
7bfb7fcb3c | ||
|
|
22bd61e56c | ||
|
|
698b8fddde | ||
|
|
49e8f297dd | ||
|
|
224a9a5871 | ||
|
|
c65875f3b0 | ||
|
|
b2d5309ce6 | ||
|
|
5c2896ed5e | ||
|
|
dc912e0fc9 | ||
|
|
d304de513d | ||
|
|
7542aac4c1 | ||
|
|
eb434543e9 | ||
|
|
9fe270819a | ||
|
|
aaad13f6d5 | ||
|
|
c63d291f22 | ||
|
|
06d20cbb42 | ||
|
|
b3d964b59e | ||
|
|
99c34046a3 | ||
|
|
9b67c45d15 | ||
|
|
281fbdd845 | ||
|
|
c00b21e0a3 | ||
|
|
19a09d76a2 | ||
|
|
88b4ebba3d | ||
|
|
fdb6fbdcc4 | ||
|
|
1941bffc1b | ||
|
|
3630149afa | ||
|
|
144143c84e | ||
|
|
3c5baa488a | ||
|
|
d8961b97d2 | ||
|
|
5b0fa35185 | ||
|
|
396d7e11b2 | ||
|
|
b136f556e7 | ||
|
|
7ef85f4671 | ||
|
|
61d6d03453 | ||
|
|
be04fcd7f3 | ||
|
|
5b38bcf8e6 | ||
|
|
f95ace3870 | ||
|
|
8fe493a38a | ||
|
|
99550a44f2 | ||
|
|
3307675844 | ||
|
|
62014fa85f | ||
|
|
d717511236 | ||
|
|
19769b8982 | ||
|
|
c5cdbaf6e1 | ||
|
|
5e8870ec1b | ||
|
|
ec33603bb7 | ||
|
|
6719bee133 | ||
|
|
51697d3216 | ||
|
|
210f8958ce | ||
|
|
81480d0c7e | ||
|
|
efcab48689 | ||
|
|
2c5b29260b | ||
|
|
15d0150cee | ||
|
|
104e58a342 | ||
|
|
6f6a739872 | ||
|
|
f34f561617 | ||
|
|
355dee533b | ||
|
|
9fc341a6c8 | ||
|
|
78e338b5de | ||
|
|
194acff111 | ||
|
|
952f7dbee9 | ||
|
|
7c5398e6e1 | ||
|
|
ac92b8a2ad | ||
|
|
789d60ef1c | ||
|
|
e2de6a01fe | ||
|
|
bca970ebc7 | ||
|
|
479b8d6d10 | ||
|
|
dc1d9834f9 | ||
|
|
b206a1eb63 | ||
|
|
2b4079f8a9 | ||
|
|
920fe19608 | ||
|
|
25428d58d5 | ||
|
|
c9d0e6dda7 | ||
|
|
c9c2c630de | ||
|
|
c855c6f06e | ||
|
|
ac96278d74 | ||
|
|
011d0babc2 | ||
|
|
10104774c1 | ||
|
|
1704cbebc8 | ||
|
|
bbb4d69a93 | ||
|
|
f1594cfdde | ||
|
|
86e0fbe556 | ||
|
|
814c960998 | ||
|
|
f1a2060bfb | ||
|
|
99ccfc24e3 | ||
|
|
a5b4d8b10c | ||
|
|
d8ec09cebb | ||
|
|
d2d81b7648 | ||
|
|
b2037302e9 | ||
|
|
e4aefb08db | ||
|
|
28a820a8bc | ||
|
|
4260c23a23 | ||
|
|
1829e536d5 | ||
|
|
de2a29bceb | ||
|
|
d9e6cbc1df | ||
|
|
9f1a159c57 | ||
|
|
3e48284e40 | ||
|
|
d72c33b680 | ||
|
|
a6e2d65a15 |
18
.gitignore
vendored
18
.gitignore
vendored
@@ -7,10 +7,26 @@
|
||||
/dataSources/
|
||||
/dataSources.local.xml
|
||||
|
||||
# Ignore local python virtual environment
|
||||
venv/
|
||||
|
||||
# Ignore streamlit_chat_app.py conversations pickle
|
||||
conversations.pkl
|
||||
*.pkl
|
||||
|
||||
# Ignore accounts created by api's
|
||||
accounts.txt
|
||||
|
||||
.idea/
|
||||
|
||||
*/__pycache__/
|
||||
**/__pycache__/
|
||||
|
||||
__pycache__/
|
||||
|
||||
*.log
|
||||
|
||||
cookie.json
|
||||
|
||||
*.pyc
|
||||
|
||||
dist/
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
FROM python:3.10-slim
|
||||
|
||||
RUN apt-get update && apt-get install -y git
|
||||
|
||||
RUN git clone https://github.com/xtekky/gpt4free.git
|
||||
WORKDIR /gpt4free
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
RUN cp gui/streamlit_app.py .
|
||||
|
||||
EXPOSE 8501
|
||||
|
||||
CMD ["streamlit", "run", "streamlit_app.py"]
|
||||
18
Dockerfile
Normal file
18
Dockerfile
Normal file
@@ -0,0 +1,18 @@
|
||||
FROM python:3.10
|
||||
|
||||
RUN apt-get update && apt-get install -y git
|
||||
|
||||
RUN mkdir -p /usr/src/gpt4free
|
||||
WORKDIR /usr/src/gpt4free
|
||||
|
||||
# RUN pip config set global.index-url https://mirrors.aliyun.com/pypi/simple/
|
||||
# RUN pip config set global.trusted-host mirrors.aliyun.com
|
||||
|
||||
COPY requirements.txt /usr/src/gpt4free/
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
COPY . /usr/src/gpt4free
|
||||
RUN cp gui/streamlit_app.py .
|
||||
|
||||
EXPOSE 8501
|
||||
|
||||
CMD ["streamlit", "run", "streamlit_app.py"]
|
||||
174
README.md
174
README.md
@@ -1,27 +1,45 @@
|
||||
# GPT4free - use ChatGPT, for free!!
|
||||
<img alt="gpt4free logo" src="https://user-images.githubusercontent.com/98614666/233799515-1a7cb6a3-b17f-42c4-956d-8d2a0664466f.png">
|
||||
<img src="https://media.giphy.com/media/LnQjpWaON8nhr21vNW/giphy.gif" width="100" align="left">
|
||||
Just API's from some language model sites.
|
||||
<p>Join our <a href="https://discord.com/invite/gpt4free">discord.gg/gpt4free<a> Discord community! <a href="https://discord.gg/gpt4free"><img align="center" alt="gpt4free Discord" width="22px" src="https://raw.githubusercontent.com/peterthehan/peterthehan/master/assets/discord.svg" /></a></p>
|
||||
|
||||
##### You may join our discord server for updates and support ; )
|
||||
- [Discord Link](https://discord.gg/gpt4free)
|
||||
|
||||
<img width="1383" alt="image" src="https://user-images.githubusercontent.com/98614666/233799515-1a7cb6a3-b17f-42c4-956d-8d2a0664466f.png">
|
||||
# Related gpt4free projects
|
||||
|
||||
Have you ever come across some amazing projects that you couldn't use **just because you didn't have an OpenAI API key?**
|
||||
|
||||
**We've got you covered!** This repository offers **reverse-engineered** third-party APIs for `GPT-4/3.5`, sourced from various websites. You can simply **download** this repository, and use the available modules, which are designed to be used **just like OpenAI's official package**. **Unleash ChatGPT's potential for your projects, now!** You are welcome ; ).
|
||||
|
||||
By the way, thank you so much for `11k` stars and all the support!!
|
||||
|
||||
## Legal Notice <a name="legal-notice"></a>
|
||||
|
||||
This repository uses third-party APIs and AI models and is *not* associated with or endorsed by the API providers or the original developers of the models. This project is intended **for educational purposes only**.
|
||||
|
||||
Please note the following:
|
||||
|
||||
1. **Disclaimer**: The APIs, services, and trademarks mentioned in this repository belong to their respective owners. This project is *not* claiming any right over them.
|
||||
|
||||
2. **Responsibility**: The author of this repository is *not* responsible for any consequences arising from the use or misuse of this repository or the content provided by the third-party APIs and any damage or losses caused by users' actions.
|
||||
|
||||
3. **Educational Purposes Only**: This repository and its content are provided strictly for educational purposes. By using the information and code provided, users acknowledge that they are using the APIs and models at their own risk and agree to comply with any applicable laws and regulations.
|
||||
<table>
|
||||
<thead align="center">
|
||||
<tr border: none;>
|
||||
<td><b>🎁 Projects</b></td>
|
||||
<td><b>⭐ Stars</b></td>
|
||||
<td><b>📚 Forks</b></td>
|
||||
<td><b>🛎 Issues</b></td>
|
||||
<td><b>📬 Pull requests</b></td>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td><a href="https://github.com/xtekky/gpt4free"><b>gpt4free</b></a></td>
|
||||
<td><a href="https://github.com/xtekky/gpt4free/stargazers"><img alt="Stars" src="https://img.shields.io/github/stars/xtekky/gpt4free?style=flat-square&labelColor=343b41"/></a></td>
|
||||
<td><a href="https://github.com/xtekky/gpt4free/network/members"><img alt="Forks" src="https://img.shields.io/github/forks/xtekky/gpt4free?style=flat-square&labelColor=343b41"/></a></td>
|
||||
<td><a href="https://github.com/xtekky/gpt4free/issues"><img alt="Issues" src="https://img.shields.io/github/issues/xtekky/gpt4free?style=flat-square&labelColor=343b41"/></a></td>
|
||||
<td><a href="https://github.com/xtekky/gpt4free/pulls"><img alt="Pull Requests" src="https://img.shields.io/github/issues-pr/xtekky/gpt4free?style=flat-square&labelColor=343b41"/></a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a href="https://github.com/xtekky/chatgpt-clone"><b>ChatGPT-Clone</b></a></td>
|
||||
<td><a href="https://github.com/xtekky/chatgpt-clone/stargazers"><img alt="Stars" src="https://img.shields.io/github/stars/xtekky/chatgpt-clone?style=flat-square&labelColor=343b41"/></a></td>
|
||||
<td><a href="https://github.com/xtekky/chatgpt-clone/network/members"><img alt="Forks" src="https://img.shields.io/github/forks/xtekky/chatgpt-clone?style=flat-square&labelColor=343b41"/></a></td>
|
||||
<td><a href="https://github.com/xtekky/chatgpt-clone/issues"><img alt="Issues" src="https://img.shields.io/github/issues/xtekky/chatgpt-clone?style=flat-square&labelColor=343b41"/></a></td>
|
||||
<td><a href="https://github.com/xtekky/chatgpt-clone/pulls"><img alt="Pull Requests" src="https://img.shields.io/github/issues-pr/xtekky/chatgpt-clone?style=flat-square&labelColor=343b41"/></a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td><a href="https://github.com/mishalhossin/Coding-Chatbot-Gpt4Free"><b>ChatGpt Discord Bot</b></a></td>
|
||||
<td><a href="https://github.com/mishalhossin/Coding-Chatbot-Gpt4Free/stargazers"><img alt="Stars" src="https://img.shields.io/github/stars/mishalhossin/Coding-Chatbot-Gpt4Free?style=flat-square&labelColor=343b41"/></a></td>
|
||||
<td><a href="https://github.com/mishalhossin/Coding-Chatbot-Gpt4Free/network/members"><img alt="Forks" src="https://img.shields.io/github/forks/mishalhossin/Coding-Chatbot-Gpt4Free?style=flat-square&labelColor=343b41"/></a></td>
|
||||
<td><a href="https://github.com/mishalhossin/Coding-Chatbot-Gpt4Free/issues"><img alt="Issues" src="https://img.shields.io/github/issues/mishalhossin/Coding-Chatbot-Gpt4Free?style=flat-square&labelColor=343b41"/></a></td>
|
||||
<td><a href="https://github.com/mishalhossin/Coding-Chatbot-Gpt4Free/pulls"><img alt="Pull Requests" src="https://img.shields.io/github/issues-pr/mishalhossin/Coding-Chatbot-Gpt4Free?style=flat-square&labelColor=343b41"/></a></td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
|
||||
## Table of Contents
|
||||
@@ -34,22 +52,23 @@ Please note the following:
|
||||
| **Docker** | Instructions on how to run gpt4free in a Docker container | [](#docker-instructions) | - |
|
||||
| **ChatGPT clone** | A ChatGPT clone with new features and scalability | [](https://chat.chatbot.sex/chat) | - |
|
||||
| **How to install** | Instructions on how to install gpt4free | [](#install) | - |
|
||||
| **Legal Notice** | Legal notice or disclaimer | [](#legal-notice) | - |
|
||||
| **Copyright** | Copyright information | [](#copyright) | - |
|
||||
| **Usage Examples** | | | |
|
||||
| `forefront` | Example usage for forefront (gpt-4) | [](./forefront/README.md) |  | | |
|
||||
| `quora (poe)` | Example usage for quora | [](./quora/README.md) |  | |
|
||||
| `phind` | Example usage for phind | [](./phind/README.md) |  |
|
||||
| `you` | Example usage for you | [](./you/README.md) |  |
|
||||
| `theb` | Example usage for theb (gpt-3.5) | [](gpt4free/theb/README.md) |  |
|
||||
| `forefront` | Example usage for forefront (gpt-4) | [](gpt4free/forefront/README.md) |  | ||
|
||||
| `quora (poe)` | Example usage for quora | [](gpt4free/quora/README.md) |  |
|
||||
| `you` | Example usage for you | [](gpt4free/you/README.md) |  |
|
||||
| **Try it Out** | | | |
|
||||
| Google Colab Jupyter Notebook | Example usage for gpt4free | [](https://colab.research.google.com/github/DanielShemesh/gpt4free-colab/blob/main/gpt4free.ipynb) | - |
|
||||
| replit Example (feel free to fork this repl) | Example usage for gpt4free | [](https://replit.com/@gpt4free/gpt4free-webui) | - |
|
||||
| **Legal Notice** | Legal notice or disclaimer | [](#legal-notice) | - |
|
||||
| **Copyright** | Copyright information | [](#copyright) | - |
|
||||
| **Star History** | Star History | [](#star-history) | - |
|
||||
|
||||
|
||||
## Todo <a name="todo"></a>
|
||||
## To do list <a name="todo"></a>
|
||||
|
||||
- [ ] Add a GUI for the repo
|
||||
- [ ] Make a general package named `openai_rev`, instead of different folders
|
||||
- [x] Add a GUI for the repo
|
||||
- [ ] Make a general package named `gpt4free`, instead of different folders
|
||||
- [ ] Live api status to know which are down and which can be used
|
||||
- [ ] Integrate more API's in `./unfinished` as well as other ones in the lists
|
||||
- [ ] Make an API to use as proxy for other projects
|
||||
@@ -57,67 +76,99 @@ Please note the following:
|
||||
|
||||
## Current Sites <a name="current-sites"></a>
|
||||
|
||||
| Website s | Model(s) |
|
||||
| ---------------------------------------------------- | ------------------------------- |
|
||||
| [forefront.ai](https://chat.forefront.ai) | GPT-4/3.5 |
|
||||
| [poe.com](https://poe.com) | GPT-4/3.5 |
|
||||
| [writesonic.com](https://writesonic.com) | GPT-3.5 / Internet |
|
||||
| [t3nsor.com](https://t3nsor.com) | GPT-3.5 |
|
||||
| [you.com](https://you.com) | GPT-3.5 / Internet / good search|
|
||||
| [phind.com](https://phind.com) | GPT-4 / Internet / good search |
|
||||
| [sqlchat.ai](https://sqlchat.ai) | GPT-3.5 |
|
||||
| [chat.openai.com/chat](https://chat.openai.com/chat) | GPT-3.5 |
|
||||
| [bard.google.com](https://bard.google.com) | custom / search |
|
||||
| [bing.com/chat](https://bing.com/chat) | GPT-4/3.5 |
|
||||
| [chat.forefront.ai/](https://chat.forefront.ai/) | GPT-4/3.5 |
|
||||
| Website s | Model(s) |
|
||||
| ------------------------------------------------ | -------------------------------- |
|
||||
| [forefront.ai](https://chat.forefront.ai) | GPT-4/3.5 |
|
||||
| [poe.com](https://poe.com) | GPT-4/3.5 |
|
||||
| [writesonic.com](https://writesonic.com) | GPT-3.5 / Internet |
|
||||
| [t3nsor.com](https://t3nsor.com) | GPT-3.5 |
|
||||
| [you.com](https://you.com) | GPT-3.5 / Internet / good search |
|
||||
| [sqlchat.ai](https://sqlchat.ai) | GPT-3.5 |
|
||||
| [bard.google.com](https://bard.google.com) | custom / search |
|
||||
| [bing.com/chat](https://bing.com/chat) | GPT-4/3.5 |
|
||||
| [chat.forefront.ai/](https://chat.forefront.ai/) | GPT-4/3.5 |
|
||||
|
||||
## Best sites <a name="best-sites"></a>
|
||||
## Best sites <a name="best-sites"></a>
|
||||
|
||||
#### gpt-4
|
||||
- [`/phind`](./phind/README.md)
|
||||
- pro: only stable gpt-4 with streaming ( no limit )
|
||||
- contra: weird backend prompting
|
||||
- why not `ora` anymore ? gpt-4 requires login + limited
|
||||
|
||||
- [`/forefront`](gpt4free/forefront/README.md)
|
||||
|
||||
#### gpt-3.5
|
||||
- looking for a stable api at the moment
|
||||
|
||||
## Install <a name="install"></a>
|
||||
download or clone this GitHub repo
|
||||
- [`/you`](gpt4free/you/README.md)
|
||||
|
||||
## Install <a name="install"></a>
|
||||
|
||||
Download or clone this GitHub repo
|
||||
install requirements with:
|
||||
|
||||
```sh
|
||||
pip3 install -r requirements.txt
|
||||
```
|
||||
|
||||
|
||||
## To start gpt4free GUI <a name="streamlit-gpt4free-gui"></a>
|
||||
move `streamlit_app.py` from `./gui` to the base folder
|
||||
then run:
|
||||
|
||||
Move `streamlit_app.py` from `./gui` to the base folder
|
||||
then run:
|
||||
`streamlit run streamlit_app.py` or `python3 -m streamlit run streamlit_app.py`
|
||||
|
||||
## Docker <a name="docker-instructions"></a>
|
||||
|
||||
Build
|
||||
|
||||
```
|
||||
docker build -t gpt4free:latest -f Docker/Dockerfile .
|
||||
```
|
||||
|
||||
Run
|
||||
|
||||
```
|
||||
docker run -p 8501:8501 gpt4free:latest
|
||||
```
|
||||
Another way - docker-compose (no docker build/run needed)
|
||||
```
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
## Deploy using docker-compose
|
||||
|
||||
Run the following:
|
||||
|
||||
```
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
## ChatGPT clone
|
||||
> currently implementing new features and trying to scale it, please be patient it may be unstable
|
||||
|
||||
> currently implementing new features and trying to scale it, please be patient it may be unstable
|
||||
> https://chat.chatbot.sex/chat
|
||||
> This site was developed by me and includes **gpt-4/3.5**, **internet access** and **gpt-jailbreak's** like DAN
|
||||
> This site was developed by me and includes **gpt-4/3.5**, **internet access** and **gpt-jailbreak's** like DAN
|
||||
> run locally here: https://github.com/xtekky/chatgpt-clone
|
||||
|
||||
## Copyright:
|
||||
This program is licensed under the [GNU GPL v3](https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
## Legal Notice <a name="legal-notice"></a>
|
||||
|
||||
This repository uses third-party APIs and is _not_ associated with or endorsed by the API providers. This project is intended **for educational purposes only**. This is just a little personal project. Sites may contact me to improve their security.
|
||||
|
||||
Please note the following:
|
||||
|
||||
1. **Disclaimer**: The APIs, services, and trademarks mentioned in this repository belong to their respective owners. This project is _not_ claiming any right over them.
|
||||
|
||||
2. **Responsibility**: The author of this repository is _not_ responsible for any consequences arising from the use or misuse of this repository or the content provided by the third-party APIs and any damage or losses caused by users' actions.
|
||||
|
||||
3. **Educational Purposes Only**: This repository and its content are provided strictly for educational purposes. By using the information and code provided, users acknowledge that they are using the APIs and models at their own risk and agree to comply with any applicable laws and regulations.
|
||||
|
||||
## Copyright:
|
||||
|
||||
This program is licensed under the [GNU GPL v3](https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
Most code, with the exception of `quora/api.py` (by [ading2210](https://github.com/ading2210)), has been written by me, [xtekky](https://github.com/xtekky).
|
||||
|
||||
### Copyright Notice: <a name="copyright"></a>
|
||||
|
||||
```
|
||||
xtekky/openai-gpt4: multiple reverse engineered language-model api's to decentralise the ai industry.
|
||||
xtekky/gpt4free: multiple reverse engineered language-model api's to decentralise the ai industry.
|
||||
Copyright (C) 2023 xtekky
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
@@ -133,3 +184,10 @@ GNU General Public License for more details.
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
```
|
||||
|
||||
|
||||
## Star History <a name="star-history"></a>
|
||||
|
||||
<a href="https://github.com/xtekky/gpt4free/stargazers">
|
||||
<img width="500" alt="Star History Chart" src="https://api.star-history.com/svg?repos=xtekky/gpt4free&type=Date">
|
||||
</a>
|
||||
|
||||
15
Singularity/gpt4free.sif
Normal file
15
Singularity/gpt4free.sif
Normal file
@@ -0,0 +1,15 @@
|
||||
Bootstrap: docker
|
||||
From: python:3.10-slim
|
||||
|
||||
%post
|
||||
apt-get update && apt-get install -y git
|
||||
git clone https://github.com/xtekky/gpt4free.git
|
||||
cd gpt4free
|
||||
pip install --no-cache-dir -r requirements.txt
|
||||
cp gui/streamlit_app.py .
|
||||
|
||||
%expose
|
||||
8501
|
||||
|
||||
%startscript
|
||||
exec streamlit run streamlit_app.py
|
||||
9
docker-compose.yaml
Normal file
9
docker-compose.yaml
Normal file
@@ -0,0 +1,9 @@
|
||||
version: "3.9"
|
||||
|
||||
services:
|
||||
gpt4free:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "8501:8501"
|
||||
12
docker-compose.yml
Normal file
12
docker-compose.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
gpt4:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
image: gpt4free:latest
|
||||
container_name: gpt4
|
||||
ports:
|
||||
- 8501:8501
|
||||
restart: unless-stopped
|
||||
@@ -1,15 +0,0 @@
|
||||
### Example: `forefront` (use like openai pypi package) <a name="example-forefront"></a>
|
||||
|
||||
```python
|
||||
import forefront
|
||||
|
||||
# create an account
|
||||
token = forefront.Account.create(logging=True)
|
||||
print(token)
|
||||
|
||||
# get a response
|
||||
for response in forefront.StreamingCompletion.create(token = token,
|
||||
prompt = 'hello world', model='gpt-4'):
|
||||
|
||||
print(response.completion.choices[0].text, end = '')
|
||||
```
|
||||
@@ -1,145 +0,0 @@
|
||||
from tls_client import Session
|
||||
from forefront.mail import Mail
|
||||
from time import time, sleep
|
||||
from re import match
|
||||
from forefront.typing import ForeFrontResponse
|
||||
from uuid import uuid4
|
||||
from requests import post
|
||||
from json import loads
|
||||
|
||||
|
||||
class Account:
|
||||
def create(proxy = None, logging = False):
|
||||
|
||||
proxies = {
|
||||
'http': 'http://' + proxy,
|
||||
'https': 'http://' + proxy } if proxy else False
|
||||
|
||||
start = time()
|
||||
|
||||
mail = Mail(proxies)
|
||||
mail_token = None
|
||||
mail_adress = mail.get_mail()
|
||||
|
||||
#print(mail_adress)
|
||||
|
||||
client = Session(client_identifier='chrome110')
|
||||
client.proxies = proxies
|
||||
client.headers = {
|
||||
"origin": "https://accounts.forefront.ai",
|
||||
"user-agent" : "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36",
|
||||
}
|
||||
|
||||
response = client.post('https://clerk.forefront.ai/v1/client/sign_ups?_clerk_js_version=4.32.6',
|
||||
data = {
|
||||
"email_address": mail_adress
|
||||
}
|
||||
)
|
||||
|
||||
trace_token = response.json()['response']['id']
|
||||
if logging: print(trace_token)
|
||||
|
||||
response = client.post(f"https://clerk.forefront.ai/v1/client/sign_ups/{trace_token}/prepare_verification?_clerk_js_version=4.32.6",
|
||||
data = {
|
||||
"strategy" : "email_code",
|
||||
}
|
||||
)
|
||||
|
||||
if logging: print(response.text)
|
||||
|
||||
if not 'sign_up_attempt' in response.text:
|
||||
return 'Failed to create account!'
|
||||
|
||||
while True:
|
||||
sleep(1)
|
||||
for _ in mail.fetch_inbox():
|
||||
print(mail.get_message_content(_["id"]))
|
||||
mail_token = match(r"(\d){5,6}", mail.get_message_content(_["id"])).group(0)
|
||||
|
||||
if mail_token:
|
||||
break
|
||||
|
||||
if logging: print(mail_token)
|
||||
|
||||
response = client.post(f'https://clerk.forefront.ai/v1/client/sign_ups/{trace_token}/attempt_verification?_clerk_js_version=4.38.4', data = {
|
||||
'code': mail_token,
|
||||
'strategy': 'email_code'
|
||||
})
|
||||
|
||||
if logging: print(response.json())
|
||||
|
||||
token = response.json()['client']['sessions'][0]['last_active_token']['jwt']
|
||||
|
||||
with open('accounts.txt', 'a') as f:
|
||||
f.write(f'{mail_adress}:{token}\n')
|
||||
|
||||
if logging: print(time() - start)
|
||||
|
||||
return token
|
||||
|
||||
|
||||
class StreamingCompletion:
|
||||
def create(
|
||||
token = None,
|
||||
chatId = None,
|
||||
prompt = '',
|
||||
actionType = 'new',
|
||||
defaultPersona = '607e41fe-95be-497e-8e97-010a59b2e2c0', # default
|
||||
model = 'gpt-4') -> ForeFrontResponse:
|
||||
|
||||
if not token: raise Exception('Token is required!')
|
||||
if not chatId: chatId = str(uuid4())
|
||||
|
||||
headers = {
|
||||
'authority' : 'chat-server.tenant-forefront-default.knative.chi.coreweave.com',
|
||||
'accept' : '*/*',
|
||||
'accept-language' : 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
|
||||
'authorization' : 'Bearer ' + token,
|
||||
'cache-control' : 'no-cache',
|
||||
'content-type' : 'application/json',
|
||||
'origin' : 'https://chat.forefront.ai',
|
||||
'pragma' : 'no-cache',
|
||||
'referer' : 'https://chat.forefront.ai/',
|
||||
'sec-ch-ua' : '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
|
||||
'sec-ch-ua-mobile' : '?0',
|
||||
'sec-ch-ua-platform': '"macOS"',
|
||||
'sec-fetch-dest' : 'empty',
|
||||
'sec-fetch-mode' : 'cors',
|
||||
'sec-fetch-site' : 'cross-site',
|
||||
'user-agent' : 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
|
||||
}
|
||||
|
||||
json_data = {
|
||||
'text' : prompt,
|
||||
'action' : actionType,
|
||||
'parentId' : chatId,
|
||||
'workspaceId' : chatId,
|
||||
'messagePersona' : defaultPersona,
|
||||
'model' : model
|
||||
}
|
||||
|
||||
for chunk in post('https://chat-server.tenant-forefront-default.knative.chi.coreweave.com/chat',
|
||||
headers=headers, json=json_data, stream=True).iter_lines():
|
||||
|
||||
if b'finish_reason":null' in chunk:
|
||||
data = loads(chunk.decode('utf-8').split('data: ')[1])
|
||||
token = data['choices'][0]['delta'].get('content')
|
||||
|
||||
if token != None:
|
||||
yield ForeFrontResponse({
|
||||
'id' : chatId,
|
||||
'object' : 'text_completion',
|
||||
'created': int(time()),
|
||||
'model' : model,
|
||||
'choices': [{
|
||||
'text' : token,
|
||||
'index' : 0,
|
||||
'logprobs' : None,
|
||||
'finish_reason' : 'stop'
|
||||
}],
|
||||
'usage': {
|
||||
'prompt_tokens' : len(prompt),
|
||||
'completion_tokens' : len(token),
|
||||
'total_tokens' : len(prompt) + len(token)
|
||||
}
|
||||
})
|
||||
@@ -1,55 +0,0 @@
|
||||
from requests import Session
|
||||
from string import ascii_letters
|
||||
from random import choices
|
||||
|
||||
class Mail:
|
||||
def __init__(self, proxies: dict = None) -> None:
|
||||
self.client = Session()
|
||||
self.client.proxies = proxies
|
||||
self.client.headers = {
|
||||
"host": "api.mail.tm",
|
||||
"connection": "keep-alive",
|
||||
"sec-ch-ua": "\"Google Chrome\";v=\"111\", \"Not(A:Brand\";v=\"8\", \"Chromium\";v=\"111\"",
|
||||
"accept": "application/json, text/plain, */*",
|
||||
"content-type": "application/json",
|
||||
"sec-ch-ua-mobile": "?0",
|
||||
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36",
|
||||
"sec-ch-ua-platform": "\"macOS\"",
|
||||
"origin": "https://mail.tm",
|
||||
"sec-fetch-site": "same-site",
|
||||
"sec-fetch-mode": "cors",
|
||||
"sec-fetch-dest": "empty",
|
||||
"referer": "https://mail.tm/",
|
||||
"accept-encoding": "gzip, deflate, br",
|
||||
"accept-language": "en-GB,en-US;q=0.9,en;q=0.8"
|
||||
}
|
||||
|
||||
def get_mail(self) -> str:
|
||||
token = ''.join(choices(ascii_letters, k=14)).lower()
|
||||
init = self.client.post("https://api.mail.tm/accounts", json={
|
||||
"address" : f"{token}@bugfoo.com",
|
||||
"password": token
|
||||
})
|
||||
|
||||
if init.status_code == 201:
|
||||
resp = self.client.post("https://api.mail.tm/token", json = {
|
||||
**init.json(),
|
||||
"password": token
|
||||
})
|
||||
|
||||
self.client.headers['authorization'] = 'Bearer ' + resp.json()['token']
|
||||
|
||||
return f"{token}@bugfoo.com"
|
||||
|
||||
else:
|
||||
raise Exception("Failed to create email")
|
||||
|
||||
def fetch_inbox(self):
|
||||
return self.client.get(f"https://api.mail.tm/messages").json()["hydra:member"]
|
||||
|
||||
def get_message(self, message_id: str):
|
||||
return self.client.get(f"https://api.mail.tm/messages/{message_id}").json()
|
||||
|
||||
def get_message_content(self, message_id: str):
|
||||
return self.get_message(message_id)["text"]
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
class ForeFrontResponse:
|
||||
class Completion:
|
||||
class Choices:
|
||||
def __init__(self, choice: dict) -> None:
|
||||
self.text = choice['text']
|
||||
self.content = self.text.encode()
|
||||
self.index = choice['index']
|
||||
self.logprobs = choice['logprobs']
|
||||
self.finish_reason = choice['finish_reason']
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f'''<__main__.APIResponse.Completion.Choices(\n text = {self.text.encode()},\n index = {self.index},\n logprobs = {self.logprobs},\n finish_reason = {self.finish_reason})object at 0x1337>'''
|
||||
|
||||
def __init__(self, choices: dict) -> None:
|
||||
self.choices = [self.Choices(choice) for choice in choices]
|
||||
|
||||
class Usage:
|
||||
def __init__(self, usage_dict: dict) -> None:
|
||||
self.prompt_tokens = usage_dict['prompt_tokens']
|
||||
self.completion_tokens = usage_dict['completion_tokens']
|
||||
self.total_tokens = usage_dict['total_tokens']
|
||||
|
||||
def __repr__(self):
|
||||
return f'''<__main__.APIResponse.Usage(\n prompt_tokens = {self.prompt_tokens},\n completion_tokens = {self.completion_tokens},\n total_tokens = {self.total_tokens})object at 0x1337>'''
|
||||
|
||||
def __init__(self, response_dict: dict) -> None:
|
||||
|
||||
self.response_dict = response_dict
|
||||
self.id = response_dict['id']
|
||||
self.object = response_dict['object']
|
||||
self.created = response_dict['created']
|
||||
self.model = response_dict['model']
|
||||
self.completion = self.Completion(response_dict['choices'])
|
||||
self.usage = self.Usage(response_dict['usage'])
|
||||
|
||||
def json(self) -> dict:
|
||||
return self.response_dict
|
||||
115
gpt4free/README.md
Normal file
115
gpt4free/README.md
Normal file
@@ -0,0 +1,115 @@
|
||||
# gpt4free package
|
||||
|
||||
### What is it?
|
||||
|
||||
gpt4free is a python package that provides some language model api's
|
||||
|
||||
### Main Features
|
||||
|
||||
- It's free to use
|
||||
- Easy access
|
||||
|
||||
### Installation:
|
||||
|
||||
```bash
|
||||
pip install gpt4free
|
||||
```
|
||||
|
||||
#### Usage:
|
||||
|
||||
```python
|
||||
import gpt4free
|
||||
from gpt4free import Provider, quora, forefront
|
||||
|
||||
# usage You
|
||||
response = gpt4free.Completion.create(Provider.You, prompt='Write a poem on Lionel Messi')
|
||||
print(response)
|
||||
|
||||
# usage Poe
|
||||
token = quora.Account.create(logging=False)
|
||||
response = gpt4free.Completion.create(Provider.Poe, prompt='Write a poem on Lionel Messi', token=token, model='ChatGPT')
|
||||
print(response)
|
||||
|
||||
# usage forefront
|
||||
token = forefront.Account.create(logging=False)
|
||||
response = gpt4free.Completion.create(
|
||||
Provider.ForeFront, prompt='Write a poem on Lionel Messi', model='gpt-4', token=token
|
||||
)
|
||||
print(response)
|
||||
print(f'END')
|
||||
|
||||
# usage theb
|
||||
response = gpt4free.Completion.create(Provider.Theb, prompt='Write a poem on Lionel Messi')
|
||||
print(response)
|
||||
|
||||
# usage cocalc
|
||||
response = gpt4free.Completion.create(Provider.CoCalc, prompt='Write a poem on Lionel Messi', cookie_input='')
|
||||
print(response)
|
||||
|
||||
```
|
||||
|
||||
### Invocation Arguments
|
||||
|
||||
`gpt4free.Completion.create()` method has two required arguments
|
||||
|
||||
1. Provider: This is an enum representing different provider
|
||||
2. prompt: This is the user input
|
||||
|
||||
#### Keyword Arguments
|
||||
|
||||
Some of the keyword arguments are optional, while others are required.
|
||||
|
||||
- You:
|
||||
- `safe_search`: boolean - default value is `False`
|
||||
- `include_links`: boolean - default value is `False`
|
||||
- `detailed`: boolean - default value is `False`
|
||||
- Quora:
|
||||
- `token`: str - this needs to be provided by the user
|
||||
- `model`: str - default value is `gpt-4`.
|
||||
|
||||
(Available models: `['Sage', 'GPT-4', 'Claude+', 'Claude-instant', 'ChatGPT', 'Dragonfly', 'NeevaAI']`)
|
||||
- ForeFront:
|
||||
- `token`: str - this need to be provided by the user
|
||||
|
||||
- Theb:
|
||||
(no keyword arguments required)
|
||||
- CoCalc:
|
||||
- `cookie_input`: str - this needs to be provided by user
|
||||
|
||||
#### Token generation of quora
|
||||
```python
|
||||
from gpt4free import quora
|
||||
|
||||
token = quora.Account.create(logging=False)
|
||||
```
|
||||
|
||||
### Token generation of ForeFront
|
||||
```python
|
||||
from gpt4free import forefront
|
||||
|
||||
token = forefront.Account.create(logging=False)
|
||||
```
|
||||
|
||||
## Copyright:
|
||||
|
||||
This program is licensed under the [GNU GPL v3](https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
### Copyright Notice: <a name="copyright"></a>
|
||||
|
||||
```
|
||||
xtekky/gpt4free: multiple reverse engineered language-model api's to decentralise the ai industry.
|
||||
Copyright (C) 2023 xtekky
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
```
|
||||
73
gpt4free/__init__.py
Normal file
73
gpt4free/__init__.py
Normal file
@@ -0,0 +1,73 @@
|
||||
from enum import Enum
|
||||
|
||||
from gpt4free import cocalc
|
||||
from gpt4free import forefront
|
||||
from gpt4free import quora
|
||||
from gpt4free import theb
|
||||
from gpt4free import you
|
||||
from gpt4free import usesless
|
||||
|
||||
|
||||
class Provider(Enum):
|
||||
"""An enum representing different providers."""
|
||||
|
||||
You = 'you'
|
||||
Poe = 'poe'
|
||||
ForeFront = 'fore_front'
|
||||
Theb = 'theb'
|
||||
CoCalc = 'cocalc'
|
||||
UseLess = 'useless'
|
||||
|
||||
|
||||
class Completion:
|
||||
"""This class will be used for invoking the given provider"""
|
||||
|
||||
@staticmethod
|
||||
def create(provider: Provider, prompt: str, **kwargs) -> str:
|
||||
|
||||
"""
|
||||
Invokes the given provider with given prompt and addition arguments and returns the string response
|
||||
|
||||
:param provider: an enum representing the provider to use while invoking
|
||||
:param prompt: input provided by the user
|
||||
:param kwargs: Additional keyword arguments to pass to the provider while invoking
|
||||
:return: A string representing the response from the provider
|
||||
"""
|
||||
if provider == Provider.Poe:
|
||||
return Completion.__poe_service(prompt, **kwargs)
|
||||
elif provider == Provider.You:
|
||||
return Completion.__you_service(prompt, **kwargs)
|
||||
elif provider == Provider.ForeFront:
|
||||
return Completion.__fore_front_service(prompt, **kwargs)
|
||||
elif provider == Provider.Theb:
|
||||
return Completion.__theb_service(prompt, **kwargs)
|
||||
elif provider == Provider.CoCalc:
|
||||
return Completion.__cocalc_service(prompt, **kwargs)
|
||||
elif provider == Provider.UseLess:
|
||||
return Completion.__useless_service(prompt, **kwargs)
|
||||
else:
|
||||
raise Exception('Provider not exist, Please try again')
|
||||
|
||||
@staticmethod
|
||||
def __useless_service(prompt: str, **kwargs) -> str:
|
||||
return usesless.Completion.create(prompt = prompt, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def __you_service(prompt: str, **kwargs) -> str:
|
||||
return you.Completion.create(prompt, **kwargs).text
|
||||
|
||||
@staticmethod
|
||||
def __poe_service(prompt: str, **kwargs) -> str:
|
||||
return quora.Completion.create(prompt=prompt, **kwargs).text
|
||||
|
||||
@staticmethod
|
||||
def __fore_front_service(prompt: str, **kwargs) -> str:
|
||||
return forefront.Completion.create(prompt=prompt, **kwargs).text
|
||||
|
||||
@staticmethod
|
||||
def __theb_service(prompt: str, **kwargs):
|
||||
return ''.join(theb.Completion.create(prompt=prompt))
|
||||
|
||||
@staticmethod
|
||||
def __cocalc_service(prompt: str, **kwargs):
|
||||
return cocalc.Completion.create(prompt, cookie_input=kwargs.get('cookie_input', '')).text
|
||||
67
gpt4free/cocalc/__init__.py
Normal file
67
gpt4free/cocalc/__init__.py
Normal file
@@ -0,0 +1,67 @@
|
||||
import requests
|
||||
from fake_useragent import UserAgent
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class CoCalcResponse(BaseModel):
|
||||
text: str
|
||||
status: bool
|
||||
|
||||
|
||||
class Completion:
|
||||
"""A class for generating text completions using CoCalc's GPT-based chatbot."""
|
||||
|
||||
API_ENDPOINT = "https://cocalc.com/api/v2/openai/chatgpt"
|
||||
DEFAULT_SYSTEM_PROMPT = "ASSUME I HAVE FULL ACCESS TO COCALC. "
|
||||
|
||||
@staticmethod
|
||||
def create(prompt: str, cookie_input: str) -> CoCalcResponse:
|
||||
"""
|
||||
Generate a text completion for the given prompt using CoCalc's GPT-based chatbot.
|
||||
|
||||
Args:
|
||||
prompt: The text prompt to complete.
|
||||
cookie_input: The cookie required to authenticate the chatbot API request.
|
||||
|
||||
Returns:
|
||||
A CoCalcResponse object containing the text completion and a boolean indicating
|
||||
whether the request was successful.
|
||||
"""
|
||||
|
||||
# Initialize a session with custom headers
|
||||
session = Completion._initialize_session(cookie_input)
|
||||
|
||||
# Set the data that will be submitted
|
||||
payload = Completion._create_payload(prompt, Completion.DEFAULT_SYSTEM_PROMPT)
|
||||
|
||||
try:
|
||||
# Submit the request and return the results
|
||||
response = session.post(Completion.API_ENDPOINT, json=payload).json()
|
||||
return CoCalcResponse(text=response['output'], status=response['success'])
|
||||
except requests.exceptions.RequestException as e:
|
||||
# Handle exceptions that may occur during the request
|
||||
print(f"Error: {e}")
|
||||
return CoCalcResponse(text="", status=False)
|
||||
|
||||
@classmethod
|
||||
def _initialize_session(cls, conversation_cookie: str) -> requests.Session:
|
||||
"""Initialize a session with custom headers for the request."""
|
||||
|
||||
session = requests.Session()
|
||||
headers = {
|
||||
"Accept": "*/*",
|
||||
"Accept-Language": "en-US,en;q=0.5",
|
||||
"Origin": "https://cocalc.com",
|
||||
"Referer": "https://cocalc.com/api/v2/openai/chatgpt",
|
||||
"Cookie": conversation_cookie,
|
||||
"User-Agent": UserAgent().random,
|
||||
}
|
||||
session.headers.update(headers)
|
||||
|
||||
return session
|
||||
|
||||
@staticmethod
|
||||
def _create_payload(prompt: str, system_prompt: str) -> dict:
|
||||
"""Create the payload for the API request."""
|
||||
|
||||
return {"input": prompt, "system": system_prompt, "tag": "next:index"}
|
||||
19
gpt4free/cocalc/readme.md
Normal file
19
gpt4free/cocalc/readme.md
Normal file
@@ -0,0 +1,19 @@
|
||||
### Example: `cocalc` <a name="example-cocalc"></a>
|
||||
|
||||
```python
|
||||
# import library
|
||||
from gpt4free import cocalc
|
||||
|
||||
cocalc.Completion.create(prompt="How are you!", cookie_input="cookieinput") ## Tutorial
|
||||
```
|
||||
|
||||
### How to grab cookie input
|
||||
```js
|
||||
// input this into ur developer tools console and the exact response u get from this u put into ur cookieInput!
|
||||
var cookies = document.cookie.split("; ");
|
||||
var cookieString = "";
|
||||
for (var i = 0; i < cookies.length; i++) {
|
||||
cookieString += cookies[i] + "; ";
|
||||
}
|
||||
console.log(cookieString);
|
||||
```
|
||||
13
gpt4free/forefront/README.md
Normal file
13
gpt4free/forefront/README.md
Normal file
@@ -0,0 +1,13 @@
|
||||
### Example: `forefront` (use like openai pypi package) <a name="example-forefront"></a>
|
||||
|
||||
```python
|
||||
from gpt4free import forefront
|
||||
# create an account
|
||||
token = forefront.Account.create(logging=False)
|
||||
print(token)
|
||||
# get a response
|
||||
for response in forefront.StreamingCompletion.create(token=token,
|
||||
prompt='hello world', model='gpt-4'):
|
||||
print(response.completion.choices[0].text, end='')
|
||||
print("")
|
||||
```
|
||||
194
gpt4free/forefront/__init__.py
Normal file
194
gpt4free/forefront/__init__.py
Normal file
@@ -0,0 +1,194 @@
|
||||
from json import loads
|
||||
from re import findall
|
||||
from time import time, sleep
|
||||
from typing import Generator, Optional
|
||||
from uuid import uuid4
|
||||
|
||||
from fake_useragent import UserAgent
|
||||
from requests import post
|
||||
from pymailtm import MailTm, Message
|
||||
from tls_client import Session
|
||||
|
||||
from .typing import ForeFrontResponse
|
||||
|
||||
|
||||
class Account:
|
||||
@staticmethod
|
||||
def create(proxy: Optional[str] = None, logging: bool = False):
|
||||
proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else False
|
||||
|
||||
start = time()
|
||||
|
||||
mail_client = MailTm().get_account()
|
||||
mail_address = mail_client.address
|
||||
|
||||
client = Session(client_identifier='chrome110')
|
||||
client.proxies = proxies
|
||||
client.headers = {
|
||||
'origin': 'https://accounts.forefront.ai',
|
||||
'user-agent': UserAgent().random,
|
||||
}
|
||||
|
||||
response = client.post(
|
||||
'https://clerk.forefront.ai/v1/client/sign_ups?_clerk_js_version=4.38.4',
|
||||
data={'email_address': mail_address},
|
||||
)
|
||||
|
||||
try:
|
||||
trace_token = response.json()['response']['id']
|
||||
if logging:
|
||||
print(trace_token)
|
||||
except KeyError:
|
||||
return 'Failed to create account!'
|
||||
|
||||
response = client.post(
|
||||
f'https://clerk.forefront.ai/v1/client/sign_ups/{trace_token}/prepare_verification?_clerk_js_version=4.38.4',
|
||||
data={
|
||||
'strategy': 'email_link',
|
||||
'redirect_url': 'https://accounts.forefront.ai/sign-up/verify'
|
||||
},
|
||||
)
|
||||
|
||||
if logging:
|
||||
print(response.text)
|
||||
|
||||
if 'sign_up_attempt' not in response.text:
|
||||
return 'Failed to create account!'
|
||||
|
||||
while True:
|
||||
sleep(1)
|
||||
new_message: Message = mail_client.wait_for_message()
|
||||
if logging:
|
||||
print(new_message.data['id'])
|
||||
|
||||
verification_url = findall(r'https:\/\/clerk\.forefront\.ai\/v1\/verify\?token=\w.+', new_message.text)[0]
|
||||
|
||||
if verification_url:
|
||||
break
|
||||
|
||||
if logging:
|
||||
print(verification_url)
|
||||
|
||||
response = client.get(verification_url)
|
||||
|
||||
response = client.get('https://clerk.forefront.ai/v1/client?_clerk_js_version=4.38.4')
|
||||
|
||||
token = response.json()['response']['sessions'][0]['last_active_token']['jwt']
|
||||
|
||||
with open('accounts.txt', 'a') as f:
|
||||
f.write(f'{mail_address}:{token}\n')
|
||||
|
||||
if logging:
|
||||
print(time() - start)
|
||||
|
||||
return token
|
||||
|
||||
|
||||
class StreamingCompletion:
|
||||
@staticmethod
|
||||
def create(
|
||||
token=None,
|
||||
chat_id=None,
|
||||
prompt='',
|
||||
action_type='new',
|
||||
default_persona='607e41fe-95be-497e-8e97-010a59b2e2c0', # default
|
||||
model='gpt-4',
|
||||
proxy=None
|
||||
) -> Generator[ForeFrontResponse, None, None]:
|
||||
if not token:
|
||||
raise Exception('Token is required!')
|
||||
if not chat_id:
|
||||
chat_id = str(uuid4())
|
||||
|
||||
proxies = { 'http': 'http://' + proxy, 'https': 'http://' + proxy } if proxy else None
|
||||
|
||||
headers = {
|
||||
'authority': 'chat-server.tenant-forefront-default.knative.chi.coreweave.com',
|
||||
'accept': '*/*',
|
||||
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
|
||||
'authorization': 'Bearer ' + token,
|
||||
'cache-control': 'no-cache',
|
||||
'content-type': 'application/json',
|
||||
'origin': 'https://chat.forefront.ai',
|
||||
'pragma': 'no-cache',
|
||||
'referer': 'https://chat.forefront.ai/',
|
||||
'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-platform': '"macOS"',
|
||||
'sec-fetch-dest': 'empty',
|
||||
'sec-fetch-mode': 'cors',
|
||||
'sec-fetch-site': 'cross-site',
|
||||
'user-agent': UserAgent().random,
|
||||
}
|
||||
|
||||
json_data = {
|
||||
'text': prompt,
|
||||
'action': action_type,
|
||||
'parentId': chat_id,
|
||||
'workspaceId': chat_id,
|
||||
'messagePersona': default_persona,
|
||||
'model': model,
|
||||
}
|
||||
|
||||
for chunk in post(
|
||||
'https://chat-server.tenant-forefront-default.knative.chi.coreweave.com/chat',
|
||||
headers=headers,
|
||||
proxies=proxies,
|
||||
json=json_data,
|
||||
stream=True,
|
||||
).iter_lines():
|
||||
if b'finish_reason":null' in chunk:
|
||||
data = loads(chunk.decode('utf-8').split('data: ')[1])
|
||||
token = data['choices'][0]['delta'].get('content')
|
||||
|
||||
if token is not None:
|
||||
yield ForeFrontResponse(
|
||||
**{
|
||||
'id': chat_id,
|
||||
'object': 'text_completion',
|
||||
'created': int(time()),
|
||||
'text': token,
|
||||
'model': model,
|
||||
'choices': [{'text': token, 'index': 0, 'logprobs': None, 'finish_reason': 'stop'}],
|
||||
'usage': {
|
||||
'prompt_tokens': len(prompt),
|
||||
'completion_tokens': len(token),
|
||||
'total_tokens': len(prompt) + len(token),
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class Completion:
|
||||
@staticmethod
|
||||
def create(
|
||||
token=None,
|
||||
chat_id=None,
|
||||
prompt='',
|
||||
action_type='new',
|
||||
default_persona='607e41fe-95be-497e-8e97-010a59b2e2c0', # default
|
||||
model='gpt-4',
|
||||
proxy=None
|
||||
) -> ForeFrontResponse:
|
||||
text = ''
|
||||
final_response = None
|
||||
for response in StreamingCompletion.create(
|
||||
token=token,
|
||||
chat_id=chat_id,
|
||||
prompt=prompt,
|
||||
action_type=action_type,
|
||||
default_persona=default_persona,
|
||||
model=model,
|
||||
proxy=proxy
|
||||
):
|
||||
if response:
|
||||
final_response = response
|
||||
text += response.text
|
||||
|
||||
if final_response:
|
||||
final_response.text = text
|
||||
else:
|
||||
raise Exception('Unable to get the response, Please try again')
|
||||
|
||||
return final_response
|
||||
|
||||
25
gpt4free/forefront/typing.py
Normal file
25
gpt4free/forefront/typing.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from typing import Any, List
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class Choice(BaseModel):
|
||||
text: str
|
||||
index: int
|
||||
logprobs: Any
|
||||
finish_reason: str
|
||||
|
||||
|
||||
class Usage(BaseModel):
|
||||
prompt_tokens: int
|
||||
completion_tokens: int
|
||||
total_tokens: int
|
||||
|
||||
|
||||
class ForeFrontResponse(BaseModel):
|
||||
id: str
|
||||
object: str
|
||||
created: int
|
||||
model: str
|
||||
choices: List[Choice]
|
||||
usage: Usage
|
||||
text: str
|
||||
@@ -1,4 +1,5 @@
|
||||
#### warning !!!
|
||||
|
||||
> ⚠ Warning !!!
|
||||
poe.com added security and can detect if you are making automated requests. You may get your account banned if you are using this api.
|
||||
The normal non-driver api is also currently not very stable
|
||||
|
||||
@@ -16,34 +17,33 @@ models = {
|
||||
}
|
||||
```
|
||||
|
||||
#### !! new: bot creation
|
||||
### New: bot creation
|
||||
|
||||
```python
|
||||
# import quora (poe) package
|
||||
import quora
|
||||
from gpt4free import quora
|
||||
|
||||
# create account
|
||||
# make sure to set enable_bot_creation to True
|
||||
token = quora.Account.create(logging = True, enable_bot_creation=True)
|
||||
token = quora.Account.create(logging=True, enable_bot_creation=True)
|
||||
|
||||
model = quora.Model.create(
|
||||
token = token,
|
||||
model = 'gpt-3.5-turbo', # or claude-instant-v1.0
|
||||
system_prompt = 'you are ChatGPT a large language model ...'
|
||||
token=token,
|
||||
model='gpt-3.5-turbo', # or claude-instant-v1.0
|
||||
system_prompt='you are ChatGPT a large language model ...'
|
||||
)
|
||||
|
||||
print(model.name) # gptx....
|
||||
print(model.name) # gptx....
|
||||
|
||||
# streaming response
|
||||
for response in quora.StreamingCompletion.create(
|
||||
custom_model = model.name,
|
||||
prompt ='hello world',
|
||||
token = token):
|
||||
|
||||
custom_model=model.name,
|
||||
prompt='hello world',
|
||||
token=token):
|
||||
print(response.completion.choices[0].text)
|
||||
```
|
||||
|
||||
#### Normal Response:
|
||||
### Normal Response:
|
||||
```python
|
||||
|
||||
response = quora.Completion.create(model = 'gpt-4',
|
||||
@@ -53,9 +53,9 @@ response = quora.Completion.create(model = 'gpt-4',
|
||||
print(response.completion.choices[0].text)
|
||||
```
|
||||
|
||||
#### Update Use This For Poe
|
||||
### Update Use This For Poe
|
||||
```python
|
||||
from quora import Poe
|
||||
from gpt4free.quora import Poe
|
||||
|
||||
# available models: ['Sage', 'GPT-4', 'Claude+', 'Claude-instant', 'ChatGPT', 'Dragonfly', 'NeevaAI']
|
||||
|
||||
@@ -6,11 +6,12 @@ from pathlib import Path
|
||||
from random import choice, choices, randint
|
||||
from re import search, findall
|
||||
from string import ascii_letters, digits
|
||||
from typing import Optional, Union
|
||||
from typing import Optional, Union, List, Any, Generator
|
||||
from urllib.parse import unquote
|
||||
|
||||
import selenium.webdriver.support.expected_conditions as EC
|
||||
from fake_useragent import UserAgent
|
||||
from pydantic import BaseModel
|
||||
from pypasser import reCaptchaV3
|
||||
from requests import Session
|
||||
from selenium.webdriver import Firefox, Chrome, FirefoxOptions, ChromeOptions
|
||||
@@ -18,8 +19,8 @@ from selenium.webdriver.common.by import By
|
||||
from selenium.webdriver.support.wait import WebDriverWait
|
||||
from tls_client import Session as TLS
|
||||
|
||||
from quora.api import Client as PoeClient
|
||||
from quora.mail import Emailnator
|
||||
from .api import Client as PoeClient
|
||||
from .mail import Emailnator
|
||||
|
||||
SELENIUM_WEB_DRIVER_ERROR_MSG = b'''The error message you are receiving is due to the `geckodriver` executable not
|
||||
being found in your system\'s PATH. To resolve this issue, you need to download the geckodriver and add its location
|
||||
@@ -67,42 +68,27 @@ def extract_formkey(html):
|
||||
return formkey
|
||||
|
||||
|
||||
class PoeResponse:
|
||||
class Completion:
|
||||
class Choices:
|
||||
def __init__(self, choice: dict) -> None:
|
||||
self.text = choice['text']
|
||||
self.content = self.text.encode()
|
||||
self.index = choice['index']
|
||||
self.logprobs = choice['logprobs']
|
||||
self.finish_reason = choice['finish_reason']
|
||||
class Choice(BaseModel):
|
||||
text: str
|
||||
index: int
|
||||
logprobs: Any
|
||||
finish_reason: str
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f'''<__main__.APIResponse.Completion.Choices(\n text = {self.text.encode()},\n index = {self.index},\n logprobs = {self.logprobs},\n finish_reason = {self.finish_reason})object at 0x1337>'''
|
||||
|
||||
def __init__(self, choices: dict) -> None:
|
||||
self.choices = [self.Choices(choice) for choice in choices]
|
||||
class Usage(BaseModel):
|
||||
prompt_tokens: int
|
||||
completion_tokens: int
|
||||
total_tokens: int
|
||||
|
||||
class Usage:
|
||||
def __init__(self, usage_dict: dict) -> None:
|
||||
self.prompt_tokens = usage_dict['prompt_tokens']
|
||||
self.completion_tokens = usage_dict['completion_tokens']
|
||||
self.total_tokens = usage_dict['total_tokens']
|
||||
|
||||
def __repr__(self):
|
||||
return f'''<__main__.APIResponse.Usage(\n prompt_tokens = {self.prompt_tokens},\n completion_tokens = {self.completion_tokens},\n total_tokens = {self.total_tokens})object at 0x1337>'''
|
||||
|
||||
def __init__(self, response_dict: dict) -> None:
|
||||
self.response_dict = response_dict
|
||||
self.id = response_dict['id']
|
||||
self.object = response_dict['object']
|
||||
self.created = response_dict['created']
|
||||
self.model = response_dict['model']
|
||||
self.completion = self.Completion(response_dict['choices'])
|
||||
self.usage = self.Usage(response_dict['usage'])
|
||||
|
||||
def json(self) -> dict:
|
||||
return self.response_dict
|
||||
class PoeResponse(BaseModel):
|
||||
id: int
|
||||
object: str
|
||||
created: int
|
||||
model: str
|
||||
choices: List[Choice]
|
||||
usage: Usage
|
||||
text: str
|
||||
|
||||
|
||||
class ModelResponse:
|
||||
@@ -122,12 +108,6 @@ class Model:
|
||||
description: str = 'gpt-3.5 language model from openai, skidded by poe.com',
|
||||
handle: str = None,
|
||||
) -> ModelResponse:
|
||||
models = {
|
||||
'gpt-3.5-turbo': 'chinchilla',
|
||||
'claude-instant-v1.0': 'a2',
|
||||
'gpt-4': 'beaver',
|
||||
}
|
||||
|
||||
if not handle:
|
||||
handle = f'gptx{randint(1111111, 9999999)}'
|
||||
|
||||
@@ -162,7 +142,7 @@ class Model:
|
||||
obj={
|
||||
'queryName': 'CreateBotMain_poeBotCreate_Mutation',
|
||||
'variables': {
|
||||
'model': models[model],
|
||||
'model': MODELS[model],
|
||||
'handle': handle,
|
||||
'prompt': system_prompt,
|
||||
'isPromptPublic': True,
|
||||
@@ -207,7 +187,7 @@ class Account:
|
||||
enable_bot_creation: bool = False,
|
||||
):
|
||||
client = TLS(client_identifier='chrome110')
|
||||
client.proxies = {'http': f'http://{proxy}', 'https': f'http://{proxy}'} if proxy else None
|
||||
client.proxies = {'http': f'http://{proxy}', 'https': f'http://{proxy}'} if proxy else {}
|
||||
|
||||
mail_client = Emailnator()
|
||||
mail_address = mail_client.get_mail()
|
||||
@@ -313,18 +293,22 @@ class StreamingCompletion:
|
||||
custom_model: bool = None,
|
||||
prompt: str = 'hello world',
|
||||
token: str = '',
|
||||
):
|
||||
proxy: Optional[str] = None
|
||||
) -> Generator[PoeResponse, None, None]:
|
||||
_model = MODELS[model] if not custom_model else custom_model
|
||||
|
||||
proxies = { 'http': 'http://' + proxy, 'https': 'http://' + proxy } if proxy else False
|
||||
client = PoeClient(token)
|
||||
client.proxy = proxies
|
||||
|
||||
for chunk in client.send_message(_model, prompt):
|
||||
yield PoeResponse(
|
||||
{
|
||||
**{
|
||||
'id': chunk['messageId'],
|
||||
'object': 'text_completion',
|
||||
'created': chunk['creationTime'],
|
||||
'model': _model,
|
||||
'text': chunk['text_new'],
|
||||
'choices': [
|
||||
{
|
||||
'text': chunk['text_new'],
|
||||
@@ -343,33 +327,31 @@ class StreamingCompletion:
|
||||
|
||||
|
||||
class Completion:
|
||||
@staticmethod
|
||||
def create(
|
||||
model: str = 'gpt-4',
|
||||
custom_model: str = None,
|
||||
prompt: str = 'hello world',
|
||||
token: str = '',
|
||||
):
|
||||
models = {
|
||||
'sage': 'capybara',
|
||||
'gpt-4': 'beaver',
|
||||
'claude-v1.2': 'a2_2',
|
||||
'claude-instant-v1.0': 'a2',
|
||||
'gpt-3.5-turbo': 'chinchilla',
|
||||
}
|
||||
|
||||
_model = models[model] if not custom_model else custom_model
|
||||
proxy: Optional[str] = None
|
||||
) -> PoeResponse:
|
||||
_model = MODELS[model] if not custom_model else custom_model
|
||||
|
||||
proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else False
|
||||
client = PoeClient(token)
|
||||
client.proxy = proxies
|
||||
|
||||
for chunk in client.send_message(_model, prompt):
|
||||
pass
|
||||
chunk = None
|
||||
for response in client.send_message(_model, prompt):
|
||||
chunk = response
|
||||
|
||||
return PoeResponse(
|
||||
{
|
||||
**{
|
||||
'id': chunk['messageId'],
|
||||
'object': 'text_completion',
|
||||
'created': chunk['creationTime'],
|
||||
'model': _model,
|
||||
'text': chunk['text'],
|
||||
'choices': [
|
||||
{
|
||||
'text': chunk['text'],
|
||||
@@ -401,10 +383,10 @@ class Poe:
|
||||
raise RuntimeError('Sorry, the model you provided does not exist. Please check and try again.')
|
||||
self.model = MODELS[model]
|
||||
self.cookie_path = cookie_path
|
||||
self.cookie = self.__load_cookie(driver, download_driver, driver_path=driver_path)
|
||||
self.cookie = self.__load_cookie(driver, driver_path=driver_path)
|
||||
self.client = PoeClient(self.cookie)
|
||||
|
||||
def __load_cookie(self, driver: str, download_driver: bool, driver_path: Optional[str] = None) -> str:
|
||||
def __load_cookie(self, driver: str, driver_path: Optional[str] = None) -> str:
|
||||
if (cookie_file := Path(self.cookie_path)).exists():
|
||||
with cookie_file.open() as fp:
|
||||
cookie = json.load(fp)
|
||||
@@ -451,8 +433,8 @@ class Poe:
|
||||
driver.close()
|
||||
return cookie
|
||||
|
||||
@classmethod
|
||||
def __resolve_driver(cls, driver: str, driver_path: Optional[str] = None) -> Union[Firefox, Chrome]:
|
||||
@staticmethod
|
||||
def __resolve_driver(driver: str, driver_path: Optional[str] = None) -> Union[Firefox, Chrome]:
|
||||
options = FirefoxOptions() if driver == 'firefox' else ChromeOptions()
|
||||
options.add_argument('-headless')
|
||||
|
||||
@@ -225,7 +225,7 @@ class Client:
|
||||
r = request_with_retries(self.session.post, self.gql_url, data=payload, headers=headers)
|
||||
|
||||
data = r.json()
|
||||
if data["data"] == None:
|
||||
if data["data"] is None:
|
||||
logger.warn(f'{query_name} returned an error: {data["errors"][0]["message"]} | Retrying ({i + 1}/20)')
|
||||
time.sleep(2)
|
||||
continue
|
||||
@@ -316,7 +316,7 @@ class Client:
|
||||
return
|
||||
|
||||
# indicate that the response id is tied to the human message id
|
||||
elif key != "pending" and value == None and message["state"] != "complete":
|
||||
elif key != "pending" and value is None and message["state"] != "complete":
|
||||
self.active_messages[key] = message["messageId"]
|
||||
self.message_queues[key].put(message)
|
||||
return
|
||||
@@ -402,7 +402,7 @@ class Client:
|
||||
logger.info(f"Downloading {count} messages from {chatbot}")
|
||||
|
||||
messages = []
|
||||
if cursor == None:
|
||||
if cursor is None:
|
||||
chat_data = self.get_bot(self.bot_names[chatbot])
|
||||
if not chat_data["messagesConnection"]["edges"]:
|
||||
return []
|
||||
37
gpt4free/quora/backup-mail.py
Normal file
37
gpt4free/quora/backup-mail.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from requests import Session
|
||||
from time import sleep
|
||||
from json import loads
|
||||
from re import findall
|
||||
class Mail:
|
||||
def __init__(self) -> None:
|
||||
self.client = Session()
|
||||
self.client.post("https://etempmail.com/")
|
||||
self.cookies = {'acceptcookie': 'true'}
|
||||
self.cookies["ci_session"] = self.client.cookies.get_dict()["ci_session"]
|
||||
self.email = None
|
||||
def get_mail(self):
|
||||
respone=self.client.post("https://etempmail.com/getEmailAddress")
|
||||
#cookies
|
||||
self.cookies["lisansimo"] = eval(respone.text)["recover_key"]
|
||||
self.email = eval(respone.text)["address"]
|
||||
return self.email
|
||||
def get_message(self):
|
||||
print("Waiting for message...")
|
||||
while True:
|
||||
sleep(5)
|
||||
respone=self.client.post("https://etempmail.com/getInbox")
|
||||
mail_token=loads(respone.text)
|
||||
print(self.client.cookies.get_dict())
|
||||
if len(mail_token) == 1:
|
||||
break
|
||||
|
||||
params = {'id': '1',}
|
||||
self.mail_context = self.client.post("https://etempmail.com/getInbox",params=params)
|
||||
self.mail_context = eval(self.mail_context.text)[0]["body"]
|
||||
return self.mail_context
|
||||
#,cookies=self.cookies
|
||||
def get_verification_code(self):
|
||||
message = self.mail_context
|
||||
code = findall(r';">(\d{6,7})</div>', message)[0]
|
||||
print(f"Verification code: {code}")
|
||||
return code
|
||||
@@ -38,17 +38,16 @@ class Emailnator:
|
||||
return self.email
|
||||
|
||||
def get_message(self):
|
||||
print("waiting for code...")
|
||||
print("Waiting for message...")
|
||||
|
||||
while True:
|
||||
sleep(2)
|
||||
mail_token = self.client.post(
|
||||
"https://www.emailnator.com/message-list", json={"email": self.email}
|
||||
)
|
||||
mail_token = self.client.post("https://www.emailnator.com/message-list", json={"email": self.email})
|
||||
|
||||
mail_token = loads(mail_token.text)["messageData"]
|
||||
|
||||
if len(mail_token) == 2:
|
||||
print("Message received!")
|
||||
print(mail_token[1]["messageID"])
|
||||
break
|
||||
|
||||
@@ -63,4 +62,19 @@ class Emailnator:
|
||||
return mail_context.text
|
||||
|
||||
def get_verification_code(self):
|
||||
return findall(r';">(\d{6,7})</div>', self.get_message())[0]
|
||||
message = self.get_message()
|
||||
code = findall(r';">(\d{6,7})</div>', message)[0]
|
||||
print(f"Verification code: {code}")
|
||||
return code
|
||||
|
||||
def clear_inbox(self):
|
||||
print("Clearing inbox...")
|
||||
self.client.post(
|
||||
"https://www.emailnator.com/delete-all",
|
||||
json={"email": self.email},
|
||||
)
|
||||
print("Inbox cleared!")
|
||||
|
||||
def __del__(self):
|
||||
if self.email:
|
||||
self.clear_inbox()
|
||||
11
gpt4free/theb/README.md
Normal file
11
gpt4free/theb/README.md
Normal file
@@ -0,0 +1,11 @@
|
||||
### Example: `theb` (use like openai pypi package) <a name="example-theb"></a>
|
||||
|
||||
```python
|
||||
# import library
|
||||
from gpt4free import theb
|
||||
|
||||
# simple streaming completion
|
||||
for token in theb.Completion.create('hello world'):
|
||||
print(token, end='', flush=True)
|
||||
print("")
|
||||
```
|
||||
57
gpt4free/theb/__init__.py
Normal file
57
gpt4free/theb/__init__.py
Normal file
@@ -0,0 +1,57 @@
|
||||
from json import loads
|
||||
from queue import Queue, Empty
|
||||
from re import findall
|
||||
from threading import Thread
|
||||
from typing import Generator, Optional
|
||||
|
||||
from curl_cffi import requests
|
||||
from fake_useragent import UserAgent
|
||||
|
||||
|
||||
class Completion:
|
||||
# experimental
|
||||
part1 = '{"role":"assistant","id":"chatcmpl'
|
||||
part2 = '"},"index":0,"finish_reason":null}]}}'
|
||||
regex = rf'{part1}(.*){part2}'
|
||||
|
||||
timer = None
|
||||
message_queue = Queue()
|
||||
stream_completed = False
|
||||
|
||||
@staticmethod
|
||||
def request(prompt: str, proxy: Optional[str]=None):
|
||||
headers = {
|
||||
'authority': 'chatbot.theb.ai',
|
||||
'content-type': 'application/json',
|
||||
'origin': 'https://chatbot.theb.ai',
|
||||
'user-agent': UserAgent().random,
|
||||
}
|
||||
|
||||
proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else None
|
||||
|
||||
requests.post(
|
||||
'https://chatbot.theb.ai/api/chat-process',
|
||||
headers=headers,
|
||||
proxies=proxies,
|
||||
content_callback=Completion.handle_stream_response,
|
||||
json={'prompt': prompt, 'options': {}},
|
||||
)
|
||||
|
||||
Completion.stream_completed = True
|
||||
|
||||
@staticmethod
|
||||
def create(prompt: str, proxy: Optional[str]=None) -> Generator[str, None, None]:
|
||||
Thread(target=Completion.request, args=[prompt, proxy]).start()
|
||||
|
||||
while not Completion.stream_completed or not Completion.message_queue.empty():
|
||||
try:
|
||||
message = Completion.message_queue.get(timeout=0.01)
|
||||
for message in findall(Completion.regex, message):
|
||||
yield loads(Completion.part1 + message + Completion.part2)['delta']
|
||||
|
||||
except Empty:
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def handle_stream_response(response):
|
||||
Completion.message_queue.put(response.decode())
|
||||
4
gpt4free/theb/theb_test.py
Normal file
4
gpt4free/theb/theb_test.py
Normal file
@@ -0,0 +1,4 @@
|
||||
import theb
|
||||
|
||||
for token in theb.Completion.create('hello world'):
|
||||
print(token, end='', flush=True)
|
||||
23
gpt4free/usesless/README.md
Normal file
23
gpt4free/usesless/README.md
Normal file
@@ -0,0 +1,23 @@
|
||||
ai.usesless.com
|
||||
|
||||
to do:
|
||||
|
||||
- use random user agent in header
|
||||
- make the code better I guess (?)
|
||||
|
||||
### Example: `usesless` <a name="example-usesless"></a>
|
||||
|
||||
```python
|
||||
import usesless
|
||||
|
||||
message_id = ""
|
||||
while True:
|
||||
prompt = input("Question: ")
|
||||
if prompt == "!stop":
|
||||
break
|
||||
|
||||
req = usesless.Completion.create(prompt=prompt, parentMessageId=message_id)
|
||||
|
||||
print(f"Answer: {req['text']}")
|
||||
message_id = req["id"]
|
||||
```
|
||||
55
gpt4free/usesless/__init__.py
Normal file
55
gpt4free/usesless/__init__.py
Normal file
@@ -0,0 +1,55 @@
|
||||
import requests
|
||||
import json
|
||||
|
||||
|
||||
class Completion:
|
||||
headers = {
|
||||
"authority": "ai.usesless.com",
|
||||
"accept": "application/json, text/plain, */*",
|
||||
"accept-language": "en-US,en;q=0.5",
|
||||
"cache-control": "no-cache",
|
||||
"sec-fetch-dest": "empty",
|
||||
"sec-fetch-mode": "cors",
|
||||
"sec-fetch-site": "same-origin",
|
||||
"user-agent": "Mozilla/5.0 (X11; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/112.0",
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def create(
|
||||
systemMessage: str = "You are a helpful assistant",
|
||||
prompt: str = "",
|
||||
parentMessageId: str = "",
|
||||
presence_penalty: float = 1,
|
||||
temperature: float = 1,
|
||||
model: str = "gpt-3.5-turbo",
|
||||
):
|
||||
print(parentMessageId, prompt)
|
||||
|
||||
json_data = {
|
||||
"openaiKey": "",
|
||||
"prompt": prompt,
|
||||
"options": {
|
||||
"parentMessageId": parentMessageId,
|
||||
"systemMessage": systemMessage,
|
||||
"completionParams": {
|
||||
"presence_penalty": presence_penalty,
|
||||
"temperature": temperature,
|
||||
"model": model,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
url = "https://ai.usesless.com/api/chat-process"
|
||||
request = requests.post(url, headers=Completion.headers, json=json_data)
|
||||
content = request.content
|
||||
|
||||
response = Completion.__response_to_json(content)
|
||||
return response
|
||||
|
||||
@classmethod
|
||||
def __response_to_json(cls, text) -> dict:
|
||||
text = str(text.decode("utf-8"))
|
||||
|
||||
split_text = text.rsplit("\n", 1)[1]
|
||||
to_json = json.loads(split_text)
|
||||
return to_json
|
||||
@@ -1,7 +1,8 @@
|
||||
### Example: `you` (use like openai pypi package) <a name="example-you"></a>
|
||||
|
||||
```python
|
||||
import you
|
||||
|
||||
from gpt4free import you
|
||||
|
||||
# simple request with links and details
|
||||
response = you.Completion.create(
|
||||
@@ -9,7 +10,7 @@ response = you.Completion.create(
|
||||
detailed=True,
|
||||
include_links=True, )
|
||||
|
||||
print(response)
|
||||
print(response.dict())
|
||||
|
||||
# {
|
||||
# "response": "...",
|
||||
@@ -25,12 +26,13 @@ chat = []
|
||||
|
||||
while True:
|
||||
prompt = input("You: ")
|
||||
|
||||
if prompt == 'q':
|
||||
break
|
||||
response = you.Completion.create(
|
||||
prompt=prompt,
|
||||
chat=chat)
|
||||
|
||||
print("Bot:", response["response"])
|
||||
print("Bot:", response.text)
|
||||
|
||||
chat.append({"question": prompt, "answer": response["response"]})
|
||||
```
|
||||
chat.append({"question": prompt, "answer": response.text})
|
||||
```
|
||||
@@ -1,11 +1,19 @@
|
||||
import json
|
||||
import re
|
||||
from json import loads
|
||||
from typing import Optional, List, Dict, Any
|
||||
from uuid import uuid4
|
||||
|
||||
from fake_useragent import UserAgent
|
||||
from pydantic import BaseModel
|
||||
from tls_client import Session
|
||||
|
||||
|
||||
class PoeResponse(BaseModel):
|
||||
text: Optional[str] = None
|
||||
links: List[str] = []
|
||||
extra: Dict[str, Any] = {}
|
||||
|
||||
|
||||
class Completion:
|
||||
@staticmethod
|
||||
def create(
|
||||
@@ -22,12 +30,16 @@ class Completion:
|
||||
include_links: bool = False,
|
||||
detailed: bool = False,
|
||||
debug: bool = False,
|
||||
) -> dict:
|
||||
proxy: Optional[str] = None
|
||||
) -> PoeResponse:
|
||||
if chat is None:
|
||||
chat = []
|
||||
|
||||
proxies = { 'http': 'http://' + proxy, 'https': 'http://' + proxy } if proxy else {}
|
||||
|
||||
client = Session(client_identifier='chrome_108')
|
||||
client.headers = Completion.__get_headers()
|
||||
client.proxies = proxies
|
||||
|
||||
response = client.get(
|
||||
f'https://you.com/api/streamingSearch',
|
||||
@@ -64,20 +76,21 @@ class Completion:
|
||||
text = ''.join(re.findall(r'{\"youChatToken\": \"(.*?)\"}', response.text))
|
||||
|
||||
extra = {
|
||||
'youChatSerpResults': loads(you_chat_serp_results),
|
||||
'youChatSerpResults': json.loads(you_chat_serp_results),
|
||||
# 'slots' : loads(slots)
|
||||
}
|
||||
|
||||
return {
|
||||
'response': text.replace('\\n', '\n').replace('\\\\', '\\'),
|
||||
'links': loads(third_party_search_results)['search']['third_party_search_results']
|
||||
if include_links
|
||||
else None,
|
||||
'extra': extra if detailed else None,
|
||||
}
|
||||
response = PoeResponse(text=text.replace('\\n', '\n').replace('\\\\', '\\').replace('\\"', '"'))
|
||||
if include_links:
|
||||
response.links = json.loads(third_party_search_results)['search']['third_party_search_results']
|
||||
|
||||
@classmethod
|
||||
def __get_headers(cls) -> dict:
|
||||
if detailed:
|
||||
response.extra = extra
|
||||
|
||||
return response
|
||||
|
||||
@staticmethod
|
||||
def __get_headers() -> dict:
|
||||
return {
|
||||
'authority': 'you.com',
|
||||
'accept': 'text/event-stream',
|
||||
@@ -94,6 +107,6 @@ class Completion:
|
||||
'user-agent': UserAgent().random,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def __get_failure_response(cls) -> dict:
|
||||
return dict(response='Unable to fetch the response, Please try again.', links=[], extra={})
|
||||
@staticmethod
|
||||
def __get_failure_response() -> PoeResponse:
|
||||
return PoeResponse(text='Unable to fetch the response, Please try again.')
|
||||
@@ -1,9 +1,72 @@
|
||||
# gpt4free gui
|
||||
|
||||
This code provides a Graphical User Interface (GUI) for gpt4free. Users can ask questions and get answers from GPT-4 API's, utilizing multiple API implementations. The project contains two different Streamlit applications: `streamlit_app.py` and `streamlit_chat_app.py`.
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
1. Clone the repository.
|
||||
2. Install the required dependencies with: `pip install -r requirements.txt`.
|
||||
3. To use `streamlit_chat_app.py`, note that it depends on a pull request (PR #24) from the https://github.com/AI-Yash/st-chat/ repository, which may change in the future. The current dependency library can be found at https://github.com/AI-Yash/st-chat/archive/refs/pull/24/head.zip.
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
Choose one of the Streamlit applications to run:
|
||||
|
||||
### streamlit\_app.py
|
||||
|
||||
This application provides a simple interface for asking GPT-4 questions and receiving answers.
|
||||
|
||||
To run the application:
|
||||
|
||||
run:
|
||||
```arduino
|
||||
streamlit run gui/streamlit_app.py
|
||||
```
|
||||
<br>
|
||||
|
||||
<img width="724" alt="image" src="https://user-images.githubusercontent.com/98614666/234232449-0d5cd092-a29d-4759-8197-e00ba712cb1a.png">
|
||||
|
||||
<br>
|
||||
<br>
|
||||
|
||||
preview:
|
||||
|
||||
<img width="1125" alt="image" src="https://user-images.githubusercontent.com/98614666/234232398-09e9d3c5-08e6-4b8a-b4f2-0666e9790c7d.png">
|
||||
|
||||
run:
|
||||
|
||||
<img width="724" alt="image" src="https://user-images.githubusercontent.com/98614666/234232449-0d5cd092-a29d-4759-8197-e00ba712cb1a.png">
|
||||
|
||||
### streamlit\_chat\_app.py
|
||||
|
||||
This application provides a chat-like interface for asking GPT-4 questions and receiving answers. It supports multiple query methods, and users can select the desired API for their queries. The application also maintains a conversation history.
|
||||
|
||||
To run the application:
|
||||
|
||||
```arduino
|
||||
streamlit run streamlit_chat_app.py
|
||||
```
|
||||
|
||||
<br>
|
||||
|
||||
<img width="724" alt="image" src="image1.png">
|
||||
|
||||
<br>
|
||||
<br>
|
||||
|
||||
preview:
|
||||
|
||||
<img width="1125" alt="image" src="image2.png">
|
||||
|
||||
Contributing
|
||||
------------
|
||||
|
||||
Feel free to submit pull requests, report bugs, or request new features by opening issues on the GitHub repository.
|
||||
|
||||
Bug
|
||||
----
|
||||
There is a bug in `streamlit_chat_app.py` right now that I haven't pinpointed yet, probably is really simple but havent had the time to look for it. Whenever you open a new conversation or access an old conversation it will only start prompt-answering after the second time you input to the text input, other than that, everything else seems to work accordingly.
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
This project is licensed under the MIT License.
|
||||
0
gui/__init__.py
Normal file
0
gui/__init__.py
Normal file
BIN
gui/image1.png
Normal file
BIN
gui/image1.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 168 KiB |
BIN
gui/image2.png
Normal file
BIN
gui/image2.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 336 KiB |
100
gui/query_methods.py
Normal file
100
gui/query_methods.py
Normal file
@@ -0,0 +1,100 @@
|
||||
import os
|
||||
import sys
|
||||
from typing import Optional
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir))
|
||||
|
||||
from gpt4free import quora, forefront, theb, you
|
||||
import random
|
||||
|
||||
|
||||
def query_forefront(question: str, proxy: Optional[str] = None) -> str:
|
||||
# create an account
|
||||
token = forefront.Account.create(logging=False, proxy=proxy)
|
||||
|
||||
response = ""
|
||||
# get a response
|
||||
try:
|
||||
return forefront.Completion.create(token=token, prompt='hello world', model='gpt-4', proxy=proxy).text
|
||||
except Exception as e:
|
||||
# Return error message if an exception occurs
|
||||
return (
|
||||
f'An error occurred: {e}. Please make sure you are using a valid cloudflare clearance token and user agent.'
|
||||
)
|
||||
|
||||
|
||||
def query_quora(question: str, proxy: Optional[str] = None) -> str:
|
||||
token = quora.Account.create(logging=False, enable_bot_creation=True, proxy=proxy)
|
||||
return quora.Completion.create(model='gpt-4', prompt=question, token=token, proxy=proxy).text
|
||||
|
||||
|
||||
def query_theb(question: str, proxy: Optional[str] = None) -> str:
|
||||
# Set cloudflare clearance cookie and get answer from GPT-4 model
|
||||
response = ""
|
||||
try:
|
||||
return ''.join(theb.Completion.create(prompt=question, proxy=proxy))
|
||||
|
||||
except Exception as e:
|
||||
# Return error message if an exception occurs
|
||||
return (
|
||||
f'An error occurred: {e}. Please make sure you are using a valid cloudflare clearance token and user agent.'
|
||||
)
|
||||
|
||||
|
||||
def query_you(question: str, proxy: Optional[str] = None) -> str:
|
||||
# Set cloudflare clearance cookie and get answer from GPT-4 model
|
||||
try:
|
||||
result = you.Completion.create(prompt=question, proxy=proxy)
|
||||
return result.text
|
||||
|
||||
except Exception as e:
|
||||
# Return error message if an exception occurs
|
||||
return (
|
||||
f'An error occurred: {e}. Please make sure you are using a valid cloudflare clearance token and user agent.'
|
||||
)
|
||||
|
||||
|
||||
# Define a dictionary containing all query methods
|
||||
avail_query_methods = {
|
||||
"Forefront": query_forefront,
|
||||
"Poe": query_quora,
|
||||
"Theb": query_theb,
|
||||
"You": query_you,
|
||||
# "Writesonic": query_writesonic,
|
||||
# "T3nsor": query_t3nsor,
|
||||
# "Phind": query_phind,
|
||||
# "Ora": query_ora,
|
||||
}
|
||||
|
||||
|
||||
def query(user_input: str, selected_method: str = "Random", proxy: Optional[str] = None) -> str:
|
||||
# If a specific query method is selected (not "Random") and the method is in the dictionary, try to call it
|
||||
if selected_method != "Random" and selected_method in avail_query_methods:
|
||||
try:
|
||||
return avail_query_methods[selected_method](user_input, proxy=proxy)
|
||||
except Exception as e:
|
||||
print(f"Error with {selected_method}: {e}")
|
||||
return "😵 Sorry, some error occurred please try again."
|
||||
|
||||
# Initialize variables for determining success and storing the result
|
||||
success = False
|
||||
result = "😵 Sorry, some error occurred please try again."
|
||||
# Create a list of available query methods
|
||||
query_methods_list = list(avail_query_methods.values())
|
||||
|
||||
# Continue trying different methods until a successful result is obtained or all methods have been tried
|
||||
while not success and query_methods_list:
|
||||
# Choose a random method from the list
|
||||
chosen_query = random.choice(query_methods_list)
|
||||
# Find the name of the chosen method
|
||||
chosen_query_name = [k for k, v in avail_query_methods.items() if v == chosen_query][0]
|
||||
try:
|
||||
# Try to call the chosen method with the user input
|
||||
result = chosen_query(user_input, proxy=proxy)
|
||||
success = True
|
||||
except Exception as e:
|
||||
print(f"Error with {chosen_query_name}: {e}")
|
||||
# Remove the failed method from the list of available methods
|
||||
query_methods_list.remove(chosen_query)
|
||||
|
||||
return result
|
||||
@@ -1,25 +1,27 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir))
|
||||
|
||||
import streamlit as st
|
||||
import phind
|
||||
from gpt4free import you
|
||||
|
||||
phind.cf_clearance = ''
|
||||
phind.user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36'
|
||||
|
||||
def phind_get_answer(question:str)->str:
|
||||
# set cf_clearance cookie
|
||||
def get_answer(question: str) -> str:
|
||||
# Set cloudflare clearance cookie and get answer from GPT-4 model
|
||||
try:
|
||||
|
||||
result = phind.Completion.create(
|
||||
model = 'gpt-4',
|
||||
prompt = question,
|
||||
results = phind.Search.create(question, actualSearch = True),
|
||||
creative = False,
|
||||
detailed = False,
|
||||
codeContext = '')
|
||||
return result.completion.choices[0].text
|
||||
result = you.Completion.create(prompt=question)
|
||||
|
||||
return result.text
|
||||
|
||||
except Exception as e:
|
||||
return 'An error occured, please make sure you are using a cf_clearance token and correct useragent | %s' % e
|
||||
# Return error message if an exception occurs
|
||||
return (
|
||||
f'An error occurred: {e}. Please make sure you are using a valid cloudflare clearance token and user agent.'
|
||||
)
|
||||
|
||||
|
||||
# Set page configuration and add header
|
||||
st.set_page_config(
|
||||
page_title="gpt4freeGUI",
|
||||
initial_sidebar_state="expanded",
|
||||
@@ -27,22 +29,24 @@ st.set_page_config(
|
||||
menu_items={
|
||||
'Get Help': 'https://github.com/xtekky/gpt4free/blob/main/README.md',
|
||||
'Report a bug': "https://github.com/xtekky/gpt4free/issues",
|
||||
'About': "### gptfree GUI"
|
||||
}
|
||||
'About': "### gptfree GUI",
|
||||
},
|
||||
)
|
||||
|
||||
st.header('GPT4free GUI')
|
||||
|
||||
# Add text area for user input and button to get answer
|
||||
question_text_area = st.text_area('🤖 Ask Any Question :', placeholder='Explain quantum computing in 50 words')
|
||||
if st.button('🧠 Think'):
|
||||
answer = phind_get_answer(question_text_area)
|
||||
answer = get_answer(question_text_area)
|
||||
escaped = answer.encode('utf-8').decode('unicode-escape')
|
||||
# Display answer
|
||||
st.caption("Answer :")
|
||||
st.markdown(answer)
|
||||
|
||||
st.markdown(escaped)
|
||||
|
||||
# Hide Streamlit footer
|
||||
hide_streamlit_style = """
|
||||
<style>
|
||||
footer {visibility: hidden;}
|
||||
</style>
|
||||
"""
|
||||
st.markdown(hide_streamlit_style, unsafe_allow_html=True)
|
||||
st.markdown(hide_streamlit_style, unsafe_allow_html=True)
|
||||
|
||||
118
gui/streamlit_chat_app.py
Normal file
118
gui/streamlit_chat_app.py
Normal file
@@ -0,0 +1,118 @@
|
||||
import atexit
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir))
|
||||
|
||||
import streamlit as st
|
||||
from streamlit_chat import message
|
||||
from query_methods import query, avail_query_methods
|
||||
import pickle
|
||||
|
||||
conversations_file = "conversations.pkl"
|
||||
|
||||
def load_conversations():
|
||||
try:
|
||||
with open(conversations_file, "rb") as f:
|
||||
return pickle.load(f)
|
||||
except FileNotFoundError:
|
||||
return []
|
||||
except EOFError:
|
||||
return []
|
||||
|
||||
|
||||
def save_conversations(conversations, current_conversation):
|
||||
updated = False
|
||||
for idx, conversation in enumerate(conversations):
|
||||
if conversation == current_conversation:
|
||||
conversations[idx] = current_conversation
|
||||
updated = True
|
||||
break
|
||||
if not updated:
|
||||
conversations.append(current_conversation)
|
||||
|
||||
temp_conversations_file = "temp_" + conversations_file
|
||||
with open(temp_conversations_file, "wb") as f:
|
||||
pickle.dump(conversations, f)
|
||||
|
||||
os.replace(temp_conversations_file, conversations_file)
|
||||
|
||||
|
||||
def exit_handler():
|
||||
print("Exiting, saving data...")
|
||||
# Perform cleanup operations here, like saving data or closing open files.
|
||||
save_conversations(st.session_state.conversations, st.session_state.current_conversation)
|
||||
|
||||
|
||||
# Register the exit_handler function to be called when the program is closing.
|
||||
atexit.register(exit_handler)
|
||||
|
||||
st.header("Chat Placeholder")
|
||||
|
||||
if 'conversations' not in st.session_state:
|
||||
st.session_state['conversations'] = load_conversations()
|
||||
|
||||
if 'input_text' not in st.session_state:
|
||||
st.session_state['input_text'] = ''
|
||||
|
||||
if 'selected_conversation' not in st.session_state:
|
||||
st.session_state['selected_conversation'] = None
|
||||
|
||||
if 'input_field_key' not in st.session_state:
|
||||
st.session_state['input_field_key'] = 0
|
||||
|
||||
if 'query_method' not in st.session_state:
|
||||
st.session_state['query_method'] = query
|
||||
|
||||
# Initialize new conversation
|
||||
if 'current_conversation' not in st.session_state or st.session_state['current_conversation'] is None:
|
||||
st.session_state['current_conversation'] = {'user_inputs': [], 'generated_responses': []}
|
||||
|
||||
input_placeholder = st.empty()
|
||||
user_input = input_placeholder.text_input(
|
||||
'You:', value=st.session_state['input_text'], key=f'input_text_{st.session_state["input_field_key"]}'
|
||||
)
|
||||
submit_button = st.button("Submit")
|
||||
|
||||
|
||||
if (user_input and user_input != st.session_state['input_text']) or submit_button:
|
||||
output = query(user_input, st.session_state['query_method'])
|
||||
|
||||
escaped_output = output.encode('utf-8').decode('unicode-escape')
|
||||
|
||||
st.session_state.current_conversation['user_inputs'].append(user_input)
|
||||
st.session_state.current_conversation['generated_responses'].append(escaped_output)
|
||||
save_conversations(st.session_state.conversations, st.session_state.current_conversation)
|
||||
st.session_state['input_text'] = ''
|
||||
user_input = input_placeholder.text_input(
|
||||
'You:', value=st.session_state['input_text'], key=f'input_text_{st.session_state["input_field_key"]}'
|
||||
) # Clear the input field
|
||||
|
||||
# Add a button to create a new conversation
|
||||
if st.sidebar.button("New Conversation"):
|
||||
st.session_state['selected_conversation'] = None
|
||||
st.session_state['current_conversation'] = {'user_inputs': [], 'generated_responses': []}
|
||||
st.session_state['input_field_key'] += 1
|
||||
|
||||
st.session_state['query_method'] = st.sidebar.selectbox("Select API:", options=avail_query_methods, index=0)
|
||||
|
||||
# Proxy
|
||||
st.session_state['proxy'] = st.sidebar.text_input("Proxy: ")
|
||||
|
||||
# Sidebar
|
||||
st.sidebar.header("Conversation History")
|
||||
|
||||
for idx, conversation in enumerate(st.session_state.conversations):
|
||||
if st.sidebar.button(f"Conversation {idx + 1}: {conversation['user_inputs'][0]}", key=f"sidebar_btn_{idx}"):
|
||||
st.session_state['selected_conversation'] = idx
|
||||
st.session_state['current_conversation'] = st.session_state.conversations[idx]
|
||||
|
||||
if st.session_state['selected_conversation'] is not None:
|
||||
conversation_to_display = st.session_state.conversations[st.session_state['selected_conversation']]
|
||||
else:
|
||||
conversation_to_display = st.session_state.current_conversation
|
||||
|
||||
if conversation_to_display['generated_responses']:
|
||||
for i in range(len(conversation_to_display['generated_responses']) - 1, -1, -1):
|
||||
message(conversation_to_display["generated_responses"][i], key=f"display_generated_{i}")
|
||||
message(conversation_to_display['user_inputs'][i], is_user=True, key=f"display_user_{i}")
|
||||
@@ -1,34 +0,0 @@
|
||||
### Example: `phind` (use like openai pypi package) <a name="example-phind"></a>
|
||||
|
||||
```python
|
||||
import phind
|
||||
|
||||
# set cf_clearance cookie (needed again)
|
||||
phind.cf_clearance = 'xx.xx-1682166681-0-160'
|
||||
phind.user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36' # same as the one from browser you got cf_clearance from
|
||||
|
||||
prompt = 'who won the quatar world cup'
|
||||
|
||||
# help needed: not getting newlines from the stream, please submit a PR if you know how to fix this
|
||||
# stream completion
|
||||
for result in phind.StreamingCompletion.create(
|
||||
model = 'gpt-4',
|
||||
prompt = prompt,
|
||||
results = phind.Search.create(prompt, actualSearch = True), # create search (set actualSearch to False to disable internet)
|
||||
creative = False,
|
||||
detailed = False,
|
||||
codeContext = ''): # up to 3000 chars of code
|
||||
|
||||
print(result.completion.choices[0].text, end='', flush=True)
|
||||
|
||||
# normal completion
|
||||
result = phind.Completion.create(
|
||||
model = 'gpt-4',
|
||||
prompt = prompt,
|
||||
results = phind.Search.create(prompt, actualSearch = True), # create search (set actualSearch to False to disable internet)
|
||||
creative = False,
|
||||
detailed = False,
|
||||
codeContext = '') # up to 3000 chars of code
|
||||
|
||||
print(result.completion.choices[0].text)
|
||||
```
|
||||
@@ -1,291 +0,0 @@
|
||||
from urllib.parse import quote
|
||||
from time import time
|
||||
from datetime import datetime
|
||||
from queue import Queue, Empty
|
||||
from threading import Thread
|
||||
from re import findall
|
||||
|
||||
from curl_cffi.requests import post
|
||||
|
||||
cf_clearance = ''
|
||||
user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36'
|
||||
|
||||
class PhindResponse:
|
||||
|
||||
class Completion:
|
||||
|
||||
class Choices:
|
||||
def __init__(self, choice: dict) -> None:
|
||||
self.text = choice['text']
|
||||
self.content = self.text.encode()
|
||||
self.index = choice['index']
|
||||
self.logprobs = choice['logprobs']
|
||||
self.finish_reason = choice['finish_reason']
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f'''<__main__.APIResponse.Completion.Choices(\n text = {self.text.encode()},\n index = {self.index},\n logprobs = {self.logprobs},\n finish_reason = {self.finish_reason})object at 0x1337>'''
|
||||
|
||||
def __init__(self, choices: dict) -> None:
|
||||
self.choices = list(map(self.Choices, choices))
|
||||
|
||||
class Usage:
|
||||
def __init__(self, usage_dict: dict) -> None:
|
||||
self.prompt_tokens = usage_dict['prompt_tokens']
|
||||
self.completion_tokens = usage_dict['completion_tokens']
|
||||
self.total_tokens = usage_dict['total_tokens']
|
||||
|
||||
def __repr__(self):
|
||||
return f'''<__main__.APIResponse.Usage(\n prompt_tokens = {self.prompt_tokens},\n completion_tokens = {self.completion_tokens},\n total_tokens = {self.total_tokens})object at 0x1337>'''
|
||||
|
||||
def __init__(self, response_dict: dict) -> None:
|
||||
|
||||
self.response_dict = response_dict
|
||||
self.id = response_dict['id']
|
||||
self.object = response_dict['object']
|
||||
self.created = response_dict['created']
|
||||
self.model = response_dict['model']
|
||||
self.completion = self.Completion(response_dict['choices'])
|
||||
self.usage = self.Usage(response_dict['usage'])
|
||||
|
||||
def json(self) -> dict:
|
||||
return self.response_dict
|
||||
|
||||
|
||||
class Search:
|
||||
def create(prompt: str, actualSearch: bool = True, language: str = 'en') -> dict: # None = no search
|
||||
if user_agent == '':
|
||||
raise ValueError('user_agent must be set, refer to documentation')
|
||||
if cf_clearance == '' :
|
||||
raise ValueError('cf_clearance must be set, refer to documentation')
|
||||
|
||||
if not actualSearch:
|
||||
return {
|
||||
'_type': 'SearchResponse',
|
||||
'queryContext': {
|
||||
'originalQuery': prompt
|
||||
},
|
||||
'webPages': {
|
||||
'webSearchUrl': f'https://www.bing.com/search?q={quote(prompt)}',
|
||||
'totalEstimatedMatches': 0,
|
||||
'value': []
|
||||
},
|
||||
'rankingResponse': {
|
||||
'mainline': {
|
||||
'items': []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
headers = {
|
||||
'authority': 'www.phind.com',
|
||||
'accept': '*/*',
|
||||
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
|
||||
'cookie': f'cf_clearance={cf_clearance}',
|
||||
'origin': 'https://www.phind.com',
|
||||
'referer': 'https://www.phind.com/search?q=hi&c=&source=searchbox&init=true',
|
||||
'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-platform': '"macOS"',
|
||||
'sec-fetch-dest': 'empty',
|
||||
'sec-fetch-mode': 'cors',
|
||||
'sec-fetch-site': 'same-origin',
|
||||
'user-agent': user_agent
|
||||
}
|
||||
|
||||
return post('https://www.phind.com/api/bing/search', headers = headers, json = {
|
||||
'q': prompt,
|
||||
'userRankList': {},
|
||||
'browserLanguage': language}).json()['rawBingResults']
|
||||
|
||||
|
||||
class Completion:
|
||||
def create(
|
||||
model = 'gpt-4',
|
||||
prompt: str = '',
|
||||
results: dict = None,
|
||||
creative: bool = False,
|
||||
detailed: bool = False,
|
||||
codeContext: str = '',
|
||||
language: str = 'en') -> PhindResponse:
|
||||
|
||||
if user_agent == '' :
|
||||
raise ValueError('user_agent must be set, refer to documentation')
|
||||
|
||||
if cf_clearance == '' :
|
||||
raise ValueError('cf_clearance must be set, refer to documentation')
|
||||
|
||||
if results is None:
|
||||
results = Search.create(prompt, actualSearch = True)
|
||||
|
||||
if len(codeContext) > 2999:
|
||||
raise ValueError('codeContext must be less than 3000 characters')
|
||||
|
||||
models = {
|
||||
'gpt-4' : 'expert',
|
||||
'gpt-3.5-turbo' : 'intermediate',
|
||||
'gpt-3.5': 'intermediate',
|
||||
}
|
||||
|
||||
json_data = {
|
||||
'question' : prompt,
|
||||
'bingResults' : results, #response.json()['rawBingResults'],
|
||||
'codeContext' : codeContext,
|
||||
'options': {
|
||||
'skill' : models[model],
|
||||
'date' : datetime.now().strftime("%d/%m/%Y"),
|
||||
'language': language,
|
||||
'detailed': detailed,
|
||||
'creative': creative
|
||||
}
|
||||
}
|
||||
|
||||
headers = {
|
||||
'authority': 'www.phind.com',
|
||||
'accept': '*/*',
|
||||
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
|
||||
'content-type': 'application/json',
|
||||
'cookie': f'cf_clearance={cf_clearance}',
|
||||
'origin': 'https://www.phind.com',
|
||||
'referer': 'https://www.phind.com/search?q=hi&c=&source=searchbox&init=true',
|
||||
'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-platform': '"macOS"',
|
||||
'sec-fetch-dest': 'empty',
|
||||
'sec-fetch-mode': 'cors',
|
||||
'sec-fetch-site': 'same-origin',
|
||||
'user-agent': user_agent
|
||||
}
|
||||
|
||||
completion = ''
|
||||
response = post('https://www.phind.com/api/infer/answer', headers = headers, json = json_data, timeout=99999, impersonate='chrome110')
|
||||
for line in response.text.split('\r\n\r\n'):
|
||||
completion += (line.replace('data: ', ''))
|
||||
|
||||
return PhindResponse({
|
||||
'id' : f'cmpl-1337-{int(time())}',
|
||||
'object' : 'text_completion',
|
||||
'created': int(time()),
|
||||
'model' : models[model],
|
||||
'choices': [{
|
||||
'text' : completion,
|
||||
'index' : 0,
|
||||
'logprobs' : None,
|
||||
'finish_reason' : 'stop'
|
||||
}],
|
||||
'usage': {
|
||||
'prompt_tokens' : len(prompt),
|
||||
'completion_tokens' : len(completion),
|
||||
'total_tokens' : len(prompt) + len(completion)
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
class StreamingCompletion:
|
||||
message_queue = Queue()
|
||||
stream_completed = False
|
||||
|
||||
def request(model, prompt, results, creative, detailed, codeContext, language) -> None:
|
||||
|
||||
models = {
|
||||
'gpt-4' : 'expert',
|
||||
'gpt-3.5-turbo' : 'intermediate',
|
||||
'gpt-3.5': 'intermediate',
|
||||
}
|
||||
|
||||
json_data = {
|
||||
'question' : prompt,
|
||||
'bingResults' : results,
|
||||
'codeContext' : codeContext,
|
||||
'options': {
|
||||
'skill' : models[model],
|
||||
'date' : datetime.now().strftime("%d/%m/%Y"),
|
||||
'language': language,
|
||||
'detailed': detailed,
|
||||
'creative': creative
|
||||
}
|
||||
}
|
||||
|
||||
headers = {
|
||||
'authority': 'www.phind.com',
|
||||
'accept': '*/*',
|
||||
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
|
||||
'content-type': 'application/json',
|
||||
'cookie': f'cf_clearance={cf_clearance}',
|
||||
'origin': 'https://www.phind.com',
|
||||
'referer': 'https://www.phind.com/search?q=hi&c=&source=searchbox&init=true',
|
||||
'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-platform': '"macOS"',
|
||||
'sec-fetch-dest': 'empty',
|
||||
'sec-fetch-mode': 'cors',
|
||||
'sec-fetch-site': 'same-origin',
|
||||
'user-agent': user_agent
|
||||
}
|
||||
|
||||
response = post('https://www.phind.com/api/infer/answer',
|
||||
headers = headers, json = json_data, timeout=99999, impersonate='chrome110', content_callback=StreamingCompletion.handle_stream_response)
|
||||
|
||||
|
||||
StreamingCompletion.stream_completed = True
|
||||
|
||||
@staticmethod
|
||||
def create(
|
||||
model : str = 'gpt-4',
|
||||
prompt : str = '',
|
||||
results : dict = None,
|
||||
creative : bool = False,
|
||||
detailed : bool = False,
|
||||
codeContext : str = '',
|
||||
language : str = 'en'):
|
||||
|
||||
if user_agent == '':
|
||||
raise ValueError('user_agent must be set, refer to documentation')
|
||||
if cf_clearance == '' :
|
||||
raise ValueError('cf_clearance must be set, refer to documentation')
|
||||
|
||||
if results is None:
|
||||
results = Search.create(prompt, actualSearch = True)
|
||||
|
||||
if len(codeContext) > 2999:
|
||||
raise ValueError('codeContext must be less than 3000 characters')
|
||||
|
||||
Thread(target = StreamingCompletion.request, args = [
|
||||
model, prompt, results, creative, detailed, codeContext, language]).start()
|
||||
|
||||
while StreamingCompletion.stream_completed != True or not StreamingCompletion.message_queue.empty():
|
||||
try:
|
||||
chunk = StreamingCompletion.message_queue.get(timeout=0)
|
||||
|
||||
if chunk == b'data: \r\ndata: \r\ndata: \r\n\r\n':
|
||||
chunk = b'data: \n\n\r\n\r\n'
|
||||
|
||||
chunk = chunk.decode()
|
||||
|
||||
chunk = chunk.replace('data: \r\n\r\ndata: ', 'data: \n')
|
||||
chunk = chunk.replace('\r\ndata: \r\ndata: \r\n\r\n', '\n\n\r\n\r\n')
|
||||
chunk = chunk.replace('data: ', '').replace('\r\n\r\n', '')
|
||||
|
||||
yield PhindResponse({
|
||||
'id' : f'cmpl-1337-{int(time())}',
|
||||
'object' : 'text_completion',
|
||||
'created': int(time()),
|
||||
'model' : model,
|
||||
'choices': [{
|
||||
'text' : chunk,
|
||||
'index' : 0,
|
||||
'logprobs' : None,
|
||||
'finish_reason' : 'stop'
|
||||
}],
|
||||
'usage': {
|
||||
'prompt_tokens' : len(prompt),
|
||||
'completion_tokens' : len(chunk),
|
||||
'total_tokens' : len(prompt) + len(chunk)
|
||||
}
|
||||
})
|
||||
|
||||
except Empty:
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def handle_stream_response(response):
|
||||
StreamingCompletion.message_queue.put(response)
|
||||
Binary file not shown.
702
poetry.lock
generated
Normal file
702
poetry.lock
generated
Normal file
@@ -0,0 +1,702 @@
|
||||
# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "async-generator"
|
||||
version = "1.10"
|
||||
description = "Async generators and context managers for Python 3.5+"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "async_generator-1.10-py3-none-any.whl", hash = "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b"},
|
||||
{file = "async_generator-1.10.tar.gz", hash = "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "attrs"
|
||||
version = "23.1.0"
|
||||
description = "Classes Without Boilerplate"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"},
|
||||
{file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
importlib-metadata = {version = "*", markers = "python_version < \"3.8\""}
|
||||
|
||||
[package.extras]
|
||||
cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
|
||||
dev = ["attrs[docs,tests]", "pre-commit"]
|
||||
docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
|
||||
tests = ["attrs[tests-no-zope]", "zope-interface"]
|
||||
tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2022.12.7"
|
||||
description = "Python package for providing Mozilla's CA Bundle."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
|
||||
{file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cffi"
|
||||
version = "1.15.1"
|
||||
description = "Foreign Function Interface for Python calling C code."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"},
|
||||
{file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"},
|
||||
{file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"},
|
||||
{file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"},
|
||||
{file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"},
|
||||
{file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"},
|
||||
{file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"},
|
||||
{file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"},
|
||||
{file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"},
|
||||
{file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"},
|
||||
{file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"},
|
||||
{file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"},
|
||||
{file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"},
|
||||
{file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pycparser = "*"
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "3.1.0"
|
||||
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7.0"
|
||||
files = [
|
||||
{file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"},
|
||||
{file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"},
|
||||
{file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"},
|
||||
{file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"},
|
||||
{file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"},
|
||||
{file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"},
|
||||
{file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.6"
|
||||
description = "Cross-platform colored terminal text."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||
files = [
|
||||
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
|
||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "curl-cffi"
|
||||
version = "0.5.5"
|
||||
description = "libcurl ffi bindings for Python, with impersonation support"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "curl_cffi-0.5.5-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:4322f330167a5c87f6913d32b73eb7da9fe3e3dd86b28f137469f432b346d9bb"},
|
||||
{file = "curl_cffi-0.5.5-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:48b0dcc6e91d68694e6472fa47b7f3457d8bd24e42c91e15d6e2b650f0d9d206"},
|
||||
{file = "curl_cffi-0.5.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fda4c35f03ae593b7667d1a09bcd718d1399a5596b936cacb65dcd4bd705e95f"},
|
||||
{file = "curl_cffi-0.5.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660b2174b71d86bd7b136a6b91434a4c3edbcb1de718f3d337b688955872fbcc"},
|
||||
{file = "curl_cffi-0.5.5-cp37-abi3-win_amd64.whl", hash = "sha256:7adb44515cb165ac661a8e5453c41d75bc284f494921051b64f2889c2c518544"},
|
||||
{file = "curl_cffi-0.5.5.tar.gz", hash = "sha256:db94b8d0ad52f3b5c55d32225c29a8219a19592d882075965a78aa9e1a0dead1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cffi = ">=1.12.0"
|
||||
|
||||
[package.extras]
|
||||
build = ["cibuildwheel", "wheel"]
|
||||
dev = ["autoflake (==1.4)", "black (==22.8.0)", "coverage (==6.4.1)", "cryptography (==38.0.3)", "flake8 (==6.0.0)", "flake8-bugbear (==22.7.1)", "flake8-pie (==0.15.0)", "httpx (==0.23.1)", "isort (==5.10.1)", "mypy (==0.971)", "pytest (==7.1.2)", "pytest-asyncio (==0.19.0)", "pytest-trio (==0.7.0)", "trio (==0.21.0)", "trio-typing (==0.7.0)", "trustme (==0.9.0)", "types-certifi (==2021.10.8.2)", "uvicorn (==0.18.3)"]
|
||||
test = ["cryptography (==38.0.3)", "httpx (==0.23.1)", "pytest (==7.1.2)", "pytest-asyncio (==0.19.0)", "pytest-trio (==0.7.0)", "trio (==0.21.0)", "trio-typing (==0.7.0)", "trustme (==0.9.0)", "types-certifi (==2021.10.8.2)", "uvicorn (==0.18.3)"]
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.1.1"
|
||||
description = "Backport of PEP 654 (exception groups)"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"},
|
||||
{file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
test = ["pytest (>=6)"]
|
||||
|
||||
[[package]]
|
||||
name = "fake-useragent"
|
||||
version = "1.1.3"
|
||||
description = "Up-to-date simple useragent faker with real world database"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "fake-useragent-1.1.3.tar.gz", hash = "sha256:1c06f0aa7d6e4894b919b30b9c7ebd72ff497325191057fbb5df3d5db06b93fc"},
|
||||
{file = "fake_useragent-1.1.3-py3-none-any.whl", hash = "sha256:695d3b1bf7d11d04ab0f971fb73b0ca8de98b78bbadfbc8bacbc9a48423f7531"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
importlib-metadata = {version = ">=4.0,<5.0", markers = "python_version < \"3.8\""}
|
||||
importlib-resources = {version = ">=5.0", markers = "python_version < \"3.10\""}
|
||||
|
||||
[[package]]
|
||||
name = "h11"
|
||||
version = "0.14.0"
|
||||
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
|
||||
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing-extensions = {version = "*", markers = "python_version < \"3.8\""}
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.4"
|
||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
|
||||
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "importlib-metadata"
|
||||
version = "4.13.0"
|
||||
description = "Read metadata from Python packages"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"},
|
||||
{file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""}
|
||||
zipp = ">=0.5"
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"]
|
||||
perf = ["ipython"]
|
||||
testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "importlib-resources"
|
||||
version = "5.12.0"
|
||||
description = "Read resources from Python packages"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "importlib_resources-5.12.0-py3-none-any.whl", hash = "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a"},
|
||||
{file = "importlib_resources-5.12.0.tar.gz", hash = "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
|
||||
testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "names"
|
||||
version = "0.3.0"
|
||||
description = "Generate random names"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "names-0.3.0.tar.gz", hash = "sha256:726e46254f2ed03f1ffb5d941dae3bc67c35123941c29becd02d48d0caa2a671"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "outcome"
|
||||
version = "1.2.0"
|
||||
description = "Capture the outcome of Python function calls."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "outcome-1.2.0-py2.py3-none-any.whl", hash = "sha256:c4ab89a56575d6d38a05aa16daeaa333109c1f96167aba8901ab18b6b5e0f7f5"},
|
||||
{file = "outcome-1.2.0.tar.gz", hash = "sha256:6f82bd3de45da303cf1f771ecafa1633750a358436a8bb60e06a1ceb745d2672"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
attrs = ">=19.2.0"
|
||||
|
||||
[[package]]
|
||||
name = "pycparser"
|
||||
version = "2.21"
|
||||
description = "C parser in Python"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
files = [
|
||||
{file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
|
||||
{file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "1.10.7"
|
||||
description = "Data validation and settings management using python type hints"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pydantic-1.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e79e999e539872e903767c417c897e729e015872040e56b96e67968c3b918b2d"},
|
||||
{file = "pydantic-1.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:01aea3a42c13f2602b7ecbbea484a98169fb568ebd9e247593ea05f01b884b2e"},
|
||||
{file = "pydantic-1.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:516f1ed9bc2406a0467dd777afc636c7091d71f214d5e413d64fef45174cfc7a"},
|
||||
{file = "pydantic-1.10.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae150a63564929c675d7f2303008d88426a0add46efd76c3fc797cd71cb1b46f"},
|
||||
{file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ecbbc51391248116c0a055899e6c3e7ffbb11fb5e2a4cd6f2d0b93272118a209"},
|
||||
{file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f4a2b50e2b03d5776e7f21af73e2070e1b5c0d0df255a827e7c632962f8315af"},
|
||||
{file = "pydantic-1.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:a7cd2251439988b413cb0a985c4ed82b6c6aac382dbaff53ae03c4b23a70e80a"},
|
||||
{file = "pydantic-1.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:68792151e174a4aa9e9fc1b4e653e65a354a2fa0fed169f7b3d09902ad2cb6f1"},
|
||||
{file = "pydantic-1.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe2507b8ef209da71b6fb5f4e597b50c5a34b78d7e857c4f8f3115effaef5fe"},
|
||||
{file = "pydantic-1.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10a86d8c8db68086f1e30a530f7d5f83eb0685e632e411dbbcf2d5c0150e8dcd"},
|
||||
{file = "pydantic-1.10.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75ae19d2a3dbb146b6f324031c24f8a3f52ff5d6a9f22f0683694b3afcb16fb"},
|
||||
{file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:464855a7ff7f2cc2cf537ecc421291b9132aa9c79aef44e917ad711b4a93163b"},
|
||||
{file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:193924c563fae6ddcb71d3f06fa153866423ac1b793a47936656e806b64e24ca"},
|
||||
{file = "pydantic-1.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:b4a849d10f211389502059c33332e91327bc154acc1845f375a99eca3afa802d"},
|
||||
{file = "pydantic-1.10.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cc1dde4e50a5fc1336ee0581c1612215bc64ed6d28d2c7c6f25d2fe3e7c3e918"},
|
||||
{file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0cfe895a504c060e5d36b287ee696e2fdad02d89e0d895f83037245218a87fe"},
|
||||
{file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:670bb4683ad1e48b0ecb06f0cfe2178dcf74ff27921cdf1606e527d2617a81ee"},
|
||||
{file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:950ce33857841f9a337ce07ddf46bc84e1c4946d2a3bba18f8280297157a3fd1"},
|
||||
{file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c15582f9055fbc1bfe50266a19771bbbef33dd28c45e78afbe1996fd70966c2a"},
|
||||
{file = "pydantic-1.10.7-cp37-cp37m-win_amd64.whl", hash = "sha256:82dffb306dd20bd5268fd6379bc4bfe75242a9c2b79fec58e1041fbbdb1f7914"},
|
||||
{file = "pydantic-1.10.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c7f51861d73e8b9ddcb9916ae7ac39fb52761d9ea0df41128e81e2ba42886cd"},
|
||||
{file = "pydantic-1.10.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6434b49c0b03a51021ade5c4daa7d70c98f7a79e95b551201fff682fc1661245"},
|
||||
{file = "pydantic-1.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64d34ab766fa056df49013bb6e79921a0265204c071984e75a09cbceacbbdd5d"},
|
||||
{file = "pydantic-1.10.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:701daea9ffe9d26f97b52f1d157e0d4121644f0fcf80b443248434958fd03dc3"},
|
||||
{file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf135c46099ff3f919d2150a948ce94b9ce545598ef2c6c7bf55dca98a304b52"},
|
||||
{file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0f85904f73161817b80781cc150f8b906d521fa11e3cdabae19a581c3606209"},
|
||||
{file = "pydantic-1.10.7-cp38-cp38-win_amd64.whl", hash = "sha256:9f6f0fd68d73257ad6685419478c5aece46432f4bdd8d32c7345f1986496171e"},
|
||||
{file = "pydantic-1.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c230c0d8a322276d6e7b88c3f7ce885f9ed16e0910354510e0bae84d54991143"},
|
||||
{file = "pydantic-1.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:976cae77ba6a49d80f461fd8bba183ff7ba79f44aa5cfa82f1346b5626542f8e"},
|
||||
{file = "pydantic-1.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d45fc99d64af9aaf7e308054a0067fdcd87ffe974f2442312372dfa66e1001d"},
|
||||
{file = "pydantic-1.10.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2a5ebb48958754d386195fe9e9c5106f11275867051bf017a8059410e9abf1f"},
|
||||
{file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:abfb7d4a7cd5cc4e1d1887c43503a7c5dd608eadf8bc615413fc498d3e4645cd"},
|
||||
{file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:80b1fab4deb08a8292d15e43a6edccdffa5377a36a4597bb545b93e79c5ff0a5"},
|
||||
{file = "pydantic-1.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:d71e69699498b020ea198468e2480a2f1e7433e32a3a99760058c6520e2bea7e"},
|
||||
{file = "pydantic-1.10.7-py3-none-any.whl", hash = "sha256:0cd181f1d0b1d00e2b705f1bf1ac7799a2d938cce3376b8007df62b29be3c2c6"},
|
||||
{file = "pydantic-1.10.7.tar.gz", hash = "sha256:cfc83c0678b6ba51b0532bea66860617c4cd4251ecf76e9846fa5a9f3454e97e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing-extensions = ">=4.2.0"
|
||||
|
||||
[package.extras]
|
||||
dotenv = ["python-dotenv (>=0.10.4)"]
|
||||
email = ["email-validator (>=1.0.3)"]
|
||||
|
||||
[[package]]
|
||||
name = "pydub"
|
||||
version = "0.25.1"
|
||||
description = "Manipulate audio with an simple and easy high level interface"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "pydub-0.25.1-py2.py3-none-any.whl", hash = "sha256:65617e33033874b59d87db603aa1ed450633288aefead953b30bded59cb599a6"},
|
||||
{file = "pydub-0.25.1.tar.gz", hash = "sha256:980a33ce9949cab2a569606b65674d748ecbca4f0796887fd6f46173a7b0d30f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pypasser"
|
||||
version = "0.0.5"
|
||||
description = "Bypassing reCaptcha V3 by sending HTTP requests & solving reCaptcha V2 using speech to text."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "PyPasser-0.0.5.tar.gz", hash = "sha256:72b0ded34edcfa885a13ecc825c5a058503b68521ab87294205d7ff5cd569515"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pydub = "0.25.1"
|
||||
PySocks = "1.7.1"
|
||||
requests = ">=2.25.1,<3.0"
|
||||
selenium = "*"
|
||||
SpeechRecognition = "3.8.1"
|
||||
|
||||
[[package]]
|
||||
name = "pysocks"
|
||||
version = "1.7.1"
|
||||
description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||
files = [
|
||||
{file = "PySocks-1.7.1-py27-none-any.whl", hash = "sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299"},
|
||||
{file = "PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5"},
|
||||
{file = "PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.29.0"
|
||||
description = "Python HTTP for Humans."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "requests-2.29.0-py3-none-any.whl", hash = "sha256:e8f3c9be120d3333921d213eef078af392fba3933ab7ed2d1cba3b56f2568c3b"},
|
||||
{file = "requests-2.29.0.tar.gz", hash = "sha256:f2e34a75f4749019bb0e3effb66683630e4ffeaf75819fb51bebef1bf5aef059"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2017.4.17"
|
||||
charset-normalizer = ">=2,<4"
|
||||
idna = ">=2.5,<4"
|
||||
urllib3 = ">=1.21.1,<1.27"
|
||||
|
||||
[package.extras]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "selenium"
|
||||
version = "4.9.0"
|
||||
description = ""
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "selenium-4.9.0-py3-none-any.whl", hash = "sha256:4c19e6aac202719373108d53a5a8e9336ba8d2b25822ca32ae6ff37acbabbdbe"},
|
||||
{file = "selenium-4.9.0.tar.gz", hash = "sha256:478fae77cdfaec32adb1e68d59632c8c191f920535282abcaa2d1a3d98655624"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2021.10.8"
|
||||
trio = ">=0.17,<1.0"
|
||||
trio-websocket = ">=0.9,<1.0"
|
||||
urllib3 = {version = ">=1.26,<2.0", extras = ["socks"]}
|
||||
|
||||
[[package]]
|
||||
name = "sniffio"
|
||||
version = "1.3.0"
|
||||
description = "Sniff out which async library your code is running under"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"},
|
||||
{file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sortedcontainers"
|
||||
version = "2.4.0"
|
||||
description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"},
|
||||
{file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "speechrecognition"
|
||||
version = "3.8.1"
|
||||
description = "Library for performing speech recognition, with support for several engines and APIs, online and offline."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "SpeechRecognition-3.8.1-py2.py3-none-any.whl", hash = "sha256:4d8f73a0c05ec70331c3bacaa89ecc06dfa8d9aba0899276664cda06ab597e8e"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tls-client"
|
||||
version = "0.2"
|
||||
description = "Advanced Python HTTP Client."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "tls_client-0.2-py3-none-any.whl", hash = "sha256:26012084e753d7531d32960ec706f81188bb7d825586675fa300b2b44f791412"},
|
||||
{file = "tls_client-0.2.tar.gz", hash = "sha256:eef3860c6f186fa866dc782f1b9e43ae837e40e831f50831c3515cee7c84fd0f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "trio"
|
||||
version = "0.22.0"
|
||||
description = "A friendly Python library for async concurrency and I/O"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "trio-0.22.0-py3-none-any.whl", hash = "sha256:f1dd0780a89bfc880c7c7994519cb53f62aacb2c25ff487001c0052bd721cdf0"},
|
||||
{file = "trio-0.22.0.tar.gz", hash = "sha256:ce68f1c5400a47b137c5a4de72c7c901bd4e7a24fbdebfe9b41de8c6c04eaacf"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
async-generator = ">=1.9"
|
||||
attrs = ">=19.2.0"
|
||||
cffi = {version = ">=1.14", markers = "os_name == \"nt\" and implementation_name != \"pypy\""}
|
||||
exceptiongroup = {version = ">=1.0.0rc9", markers = "python_version < \"3.11\""}
|
||||
idna = "*"
|
||||
outcome = "*"
|
||||
sniffio = "*"
|
||||
sortedcontainers = "*"
|
||||
|
||||
[[package]]
|
||||
name = "trio-websocket"
|
||||
version = "0.10.2"
|
||||
description = "WebSocket library for Trio"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "trio-websocket-0.10.2.tar.gz", hash = "sha256:af13e9393f9051111300287947ec595d601758ce3d165328e7d36325135a8d62"},
|
||||
{file = "trio_websocket-0.10.2-py3-none-any.whl", hash = "sha256:0908435e4eecc49d830ae1c4d6c47b978a75f00594a2be2104d58b61a04cdb53"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
exceptiongroup = "*"
|
||||
trio = ">=0.11"
|
||||
wsproto = ">=0.14"
|
||||
|
||||
[[package]]
|
||||
name = "twocaptcha"
|
||||
version = "0.0.1"
|
||||
description = "2Captcha Python3 API Wrapper"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "TwoCaptcha-0.0.1.tar.gz", hash = "sha256:fd04127de71ca4bd31c22add84a5bcb7c683cf9ee5bf503ca14a8f372ac76a0e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
requests = "*"
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.5.0"
|
||||
description = "Backported and Experimental Type Hints for Python 3.7+"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"},
|
||||
{file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "1.26.15"
|
||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
|
||||
files = [
|
||||
{file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"},
|
||||
{file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
PySocks = {version = ">=1.5.6,<1.5.7 || >1.5.7,<2.0", optional = true, markers = "extra == \"socks\""}
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
|
||||
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "websocket-client"
|
||||
version = "1.5.1"
|
||||
description = "WebSocket client for Python with low level API options"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "websocket-client-1.5.1.tar.gz", hash = "sha256:3f09e6d8230892547132177f575a4e3e73cfdf06526e20cc02aa1c3b47184d40"},
|
||||
{file = "websocket_client-1.5.1-py3-none-any.whl", hash = "sha256:cdf5877568b7e83aa7cf2244ab56a3213de587bbe0ce9d8b9600fc77b455d89e"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"]
|
||||
optional = ["python-socks", "wsaccel"]
|
||||
test = ["websockets"]
|
||||
|
||||
[[package]]
|
||||
name = "wsproto"
|
||||
version = "1.2.0"
|
||||
description = "WebSockets state-machine based protocol implementation"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7.0"
|
||||
files = [
|
||||
{file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"},
|
||||
{file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
h11 = ">=0.9.0,<1"
|
||||
|
||||
[[package]]
|
||||
name = "zipp"
|
||||
version = "3.15.0"
|
||||
description = "Backport of pathlib-compatible object wrapper for zip files"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"},
|
||||
{file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
|
||||
testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.7"
|
||||
content-hash = "d22aa72ee6bd554c0676a3b6b723090d156a720d03d3b05422a01aa9bf22dda2"
|
||||
28
pyproject.toml
Normal file
28
pyproject.toml
Normal file
@@ -0,0 +1,28 @@
|
||||
[tool.poetry]
|
||||
name = "openai-rev"
|
||||
version = "0.1.0"
|
||||
description = ""
|
||||
authors = []
|
||||
license = "GPL-3.0"
|
||||
readme = "README.md"
|
||||
packages = [{ include = "gpt4free" }]
|
||||
exclude = ["**/*.txt"]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.7"
|
||||
websocket-client = "^1.5.1"
|
||||
requests = "2.29.0"
|
||||
tls-client = "^0.2"
|
||||
pypasser = "^0.0.5"
|
||||
names = "^0.3.0"
|
||||
colorama = "^0.4.6"
|
||||
curl-cffi = "^0.5.5"
|
||||
selenium = "^4.9.0"
|
||||
fake-useragent = "^1.1.3"
|
||||
twocaptcha = "^0.0.1"
|
||||
pydantic = "^1.10.7"
|
||||
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
@@ -7,4 +7,8 @@ colorama
|
||||
curl_cffi
|
||||
streamlit==1.21.0
|
||||
selenium
|
||||
fake-useragent
|
||||
fake-useragent
|
||||
twocaptcha
|
||||
https://github.com/AI-Yash/st-chat/archive/refs/pull/24/head.zip
|
||||
pydantic
|
||||
pymailtm
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import forefront
|
||||
from gpt4free import forefront
|
||||
|
||||
# create an account
|
||||
token = forefront.Account.create(logging=True)
|
||||
print(token)
|
||||
|
||||
# get a response
|
||||
for response in forefront.StreamingCompletion.create(token = token,
|
||||
prompt = 'hello world', model='gpt-4'):
|
||||
|
||||
print(response.completion.choices[0].text, end = '')
|
||||
for response in forefront.StreamingCompletion.create(token=token, prompt='hello world', model='gpt-4'):
|
||||
print(response.text, end='')
|
||||
|
||||
14
testing/openaihosted_test.py
Normal file
14
testing/openaihosted_test.py
Normal file
@@ -0,0 +1,14 @@
|
||||
import openaihosted
|
||||
|
||||
messages = [{"role": "system", "content": "You are a helpful assistant."}]
|
||||
while True:
|
||||
question = input("Question: ")
|
||||
if question == "!stop":
|
||||
break
|
||||
|
||||
messages.append({"role": "user", "content": question})
|
||||
request = openaihosted.Completion.create(messages=messages)
|
||||
|
||||
response = request["responses"]
|
||||
messages.append({"role": "assistant", "content": response})
|
||||
print(f"Answer: {response}")
|
||||
@@ -1,32 +0,0 @@
|
||||
import phind
|
||||
|
||||
# set cf_clearance cookie ( not needed at the moment)
|
||||
phind.cf_clearance = 'MDzwnr3ZWk_ap8u.iwwMR5F3WccfOkhUy_zGNDpcF3s-1682497341-0-160'
|
||||
phind.user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36'
|
||||
|
||||
prompt = 'hello world'
|
||||
|
||||
# normal completion
|
||||
result = phind.Completion.create(
|
||||
model = 'gpt-4',
|
||||
prompt = prompt,
|
||||
results = phind.Search.create(prompt, actualSearch = False), # create search (set actualSearch to False to disable internet)
|
||||
creative = False,
|
||||
detailed = False,
|
||||
codeContext = '') # up to 3000 chars of code
|
||||
|
||||
print(result.completion.choices[0].text)
|
||||
|
||||
prompt = 'who won the quatar world cup'
|
||||
|
||||
# help needed: not getting newlines from the stream, please submit a PR if you know how to fix this
|
||||
# stream completion
|
||||
for result in phind.StreamingCompletion.create(
|
||||
model = 'gpt-4',
|
||||
prompt = prompt,
|
||||
results = phind.Search.create(prompt, actualSearch = True), # create search (set actualSearch to False to disable internet)
|
||||
creative = False,
|
||||
detailed = False,
|
||||
codeContext = ''): # up to 3000 chars of code
|
||||
|
||||
print(result.completion.choices[0].text, end='', flush=True)
|
||||
@@ -1,104 +1,109 @@
|
||||
from requests import Session
|
||||
from tls_client import Session as TLS
|
||||
from json import dumps
|
||||
from hashlib import md5
|
||||
from time import sleep
|
||||
from json import dumps
|
||||
from re import findall
|
||||
from pypasser import reCaptchaV3
|
||||
from quora import extract_formkey
|
||||
from quora.mail import Emailnator
|
||||
from typing import Optional
|
||||
|
||||
from tls_client import Session as TLS
|
||||
from twocaptcha import TwoCaptcha
|
||||
|
||||
from gpt4free.quora import extract_formkey
|
||||
from gpt4free.quora.mail import Emailnator
|
||||
|
||||
solver = TwoCaptcha('72747bf24a9d89b4dcc1b24875efd358')
|
||||
|
||||
|
||||
class Account:
|
||||
def create(proxy: None or str = None, logging: bool = False, enable_bot_creation: bool = False):
|
||||
client = TLS(client_identifier='chrome110')
|
||||
client.proxies = {
|
||||
'http': f'http://{proxy}',
|
||||
'https': f'http://{proxy}'} if proxy else None
|
||||
@staticmethod
|
||||
def create(proxy: Optional[str] = None, logging: bool = False, enable_bot_creation: bool = False):
|
||||
client = TLS(client_identifier='chrome110')
|
||||
client.proxies = {'http': f'http://{proxy}', 'https': f'http://{proxy}'} if proxy else None
|
||||
|
||||
mail_client = Emailnator()
|
||||
mail_address = mail_client.get_mail()
|
||||
mail_client = Emailnator()
|
||||
mail_address = mail_client.get_mail()
|
||||
|
||||
if logging: print('email', mail_address)
|
||||
if logging:
|
||||
print('email', mail_address)
|
||||
|
||||
client.headers = {
|
||||
'authority' : 'poe.com',
|
||||
'accept' : '*/*',
|
||||
'authority': 'poe.com',
|
||||
'accept': '*/*',
|
||||
'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
|
||||
'content-type' : 'application/json',
|
||||
'origin' : 'https://poe.com',
|
||||
'poe-formkey' : 'null',
|
||||
'poe-tag-id' : 'null',
|
||||
'poe-tchannel' : 'null',
|
||||
'referer' : 'https://poe.com/login',
|
||||
'sec-ch-ua' : '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
|
||||
'sec-ch-ua-mobile' : '?0',
|
||||
'content-type': 'application/json',
|
||||
'origin': 'https://poe.com',
|
||||
'poe-formkey': 'null',
|
||||
'poe-tag-id': 'null',
|
||||
'poe-tchannel': 'null',
|
||||
'referer': 'https://poe.com/login',
|
||||
'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-platform': '"macOS"',
|
||||
'sec-fetch-dest': 'empty',
|
||||
'sec-fetch-mode': 'cors',
|
||||
'sec-fetch-site': 'same-origin',
|
||||
'user-agent' : 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36'
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
|
||||
}
|
||||
|
||||
client.headers["poe-formkey"] = extract_formkey(client.get('https://poe.com/login').text)
|
||||
client.headers["poe-formkey"] = extract_formkey(client.get('https://poe.com/login').text)
|
||||
client.headers["poe-tchannel"] = client.get('https://poe.com/api/settings').json()['tchannelData']['channel']
|
||||
|
||||
#token = reCaptchaV3('https://www.recaptcha.net/recaptcha/enterprise/anchor?ar=1&k=6LflhEElAAAAAI_ewVwRWI9hsyV4mbZnYAslSvlG&co=aHR0cHM6Ly9wb2UuY29tOjQ0Mw..&hl=en&v=4PnKmGB9wRHh1i04o7YUICeI&size=invisible&cb=bi6ivxoskyal')
|
||||
token = solver.recaptcha(sitekey='6LflhEElAAAAAI_ewVwRWI9hsyV4mbZnYAslSvlG',
|
||||
url = 'https://poe.com/login?redirect_url=%2F',
|
||||
version = 'v3',
|
||||
enterprise = 1,
|
||||
invisible = 1,
|
||||
action = 'login',)['code']
|
||||
# token = reCaptchaV3('https://www.recaptcha.net/recaptcha/enterprise/anchor?ar=1&k=6LflhEElAAAAAI_ewVwRWI9hsyV4mbZnYAslSvlG&co=aHR0cHM6Ly9wb2UuY29tOjQ0Mw..&hl=en&v=4PnKmGB9wRHh1i04o7YUICeI&size=invisible&cb=bi6ivxoskyal')
|
||||
token = solver.recaptcha(
|
||||
sitekey='6LflhEElAAAAAI_ewVwRWI9hsyV4mbZnYAslSvlG',
|
||||
url='https://poe.com/login?redirect_url=%2F',
|
||||
version='v3',
|
||||
enterprise=1,
|
||||
invisible=1,
|
||||
action='login',
|
||||
)['code']
|
||||
|
||||
payload = dumps(separators = (',', ':'), obj = {
|
||||
'queryName': 'MainSignupLoginSection_sendVerificationCodeMutation_Mutation',
|
||||
'variables': {
|
||||
'emailAddress' : mail_address,
|
||||
'phoneNumber' : None,
|
||||
'recaptchaToken': token
|
||||
payload = dumps(
|
||||
separators=(',', ':'),
|
||||
obj={
|
||||
'queryName': 'MainSignupLoginSection_sendVerificationCodeMutation_Mutation',
|
||||
'variables': {'emailAddress': mail_address, 'phoneNumber': None, 'recaptchaToken': token},
|
||||
'query': 'mutation MainSignupLoginSection_sendVerificationCodeMutation_Mutation(\n $emailAddress: String\n $phoneNumber: String\n $recaptchaToken: String\n) {\n sendVerificationCode(verificationReason: login, emailAddress: $emailAddress, phoneNumber: $phoneNumber, recaptchaToken: $recaptchaToken) {\n status\n errorMessage\n }\n}\n',
|
||||
},
|
||||
'query': 'mutation MainSignupLoginSection_sendVerificationCodeMutation_Mutation(\n $emailAddress: String\n $phoneNumber: String\n $recaptchaToken: String\n) {\n sendVerificationCode(verificationReason: login, emailAddress: $emailAddress, phoneNumber: $phoneNumber, recaptchaToken: $recaptchaToken) {\n status\n errorMessage\n }\n}\n',
|
||||
})
|
||||
)
|
||||
|
||||
base_string = payload + client.headers["poe-formkey"] + 'WpuLMiXEKKE98j56k'
|
||||
client.headers["poe-tag-id"] = md5(base_string.encode()).hexdigest()
|
||||
|
||||
client.headers["poe-tag-id"] = md5(base_string.encode()).hexdigest()
|
||||
|
||||
print(dumps(client.headers, indent=4))
|
||||
|
||||
|
||||
response = client.post('https://poe.com/api/gql_POST', data=payload)
|
||||
|
||||
|
||||
if 'automated_request_detected' in response.text:
|
||||
print('please try using a proxy / wait for fix')
|
||||
|
||||
|
||||
if 'Bad Request' in response.text:
|
||||
if logging: print('bad request, retrying...' , response.json())
|
||||
if logging:
|
||||
print('bad request, retrying...', response.json())
|
||||
quit()
|
||||
|
||||
if logging: print('send_code' ,response.json())
|
||||
|
||||
if logging:
|
||||
print('send_code', response.json())
|
||||
|
||||
mail_content = mail_client.get_message()
|
||||
mail_token = findall(r';">(\d{6,7})</div>', mail_content)[0]
|
||||
mail_token = findall(r';">(\d{6,7})</div>', mail_content)[0]
|
||||
|
||||
if logging: print('code', mail_token)
|
||||
if logging:
|
||||
print('code', mail_token)
|
||||
|
||||
payload = dumps(separators = (',', ':'), obj={
|
||||
"queryName": "SignupOrLoginWithCodeSection_signupWithVerificationCodeMutation_Mutation",
|
||||
"variables": {
|
||||
"verificationCode": str(mail_token),
|
||||
"emailAddress": mail_address,
|
||||
"phoneNumber": None
|
||||
payload = dumps(
|
||||
separators=(',', ':'),
|
||||
obj={
|
||||
"queryName": "SignupOrLoginWithCodeSection_signupWithVerificationCodeMutation_Mutation",
|
||||
"variables": {"verificationCode": str(mail_token), "emailAddress": mail_address, "phoneNumber": None},
|
||||
"query": "mutation SignupOrLoginWithCodeSection_signupWithVerificationCodeMutation_Mutation(\n $verificationCode: String!\n $emailAddress: String\n $phoneNumber: String\n) {\n signupWithVerificationCode(verificationCode: $verificationCode, emailAddress: $emailAddress, phoneNumber: $phoneNumber) {\n status\n errorMessage\n }\n}\n",
|
||||
},
|
||||
"query": "mutation SignupOrLoginWithCodeSection_signupWithVerificationCodeMutation_Mutation(\n $verificationCode: String!\n $emailAddress: String\n $phoneNumber: String\n) {\n signupWithVerificationCode(verificationCode: $verificationCode, emailAddress: $emailAddress, phoneNumber: $phoneNumber) {\n status\n errorMessage\n }\n}\n"
|
||||
})
|
||||
)
|
||||
|
||||
base_string = payload + client.headers["poe-formkey"] + 'WpuLMiXEKKE98j56k'
|
||||
client.headers["poe-tag-id"] = md5(base_string.encode()).hexdigest()
|
||||
client.headers["poe-tag-id"] = md5(base_string.encode()).hexdigest()
|
||||
|
||||
response = client.post('https://poe.com/api/gql_POST', data = payload)
|
||||
if logging: print('verify_code', response.json())
|
||||
|
||||
response = client.post('https://poe.com/api/gql_POST', data=payload)
|
||||
if logging:
|
||||
print('verify_code', response.json())
|
||||
|
||||
Account.create(proxy = 'xtekky:wegwgwegwed_streaming-1@geo.iproyal.com:12321', logging = True)
|
||||
|
||||
Account.create(proxy='xtekky:wegwgwegwed_streaming-1@geo.iproyal.com:12321', logging=True)
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import quora
|
||||
from time import sleep
|
||||
|
||||
token = quora.Account.create(proxy = None,logging = True)
|
||||
from gpt4free import quora
|
||||
|
||||
token = quora.Account.create(proxy=None, logging=True)
|
||||
print('token', token)
|
||||
|
||||
sleep(2)
|
||||
|
||||
for response in quora.StreamingCompletion.create(model = 'gpt-3.5-turbo',
|
||||
prompt = 'hello world',
|
||||
token = token):
|
||||
|
||||
print(response.completion.choices[0].text, end="", flush=True)
|
||||
for response in quora.StreamingCompletion.create(model='ChatGPT', prompt='hello world', token=token):
|
||||
print(response.text, flush=True)
|
||||
|
||||
@@ -1,18 +1,12 @@
|
||||
import quora
|
||||
from gpt4free import quora
|
||||
|
||||
token = quora.Account.create(logging = True, enable_bot_creation=True)
|
||||
token = quora.Account.create(logging=True, enable_bot_creation=True)
|
||||
|
||||
model = quora.Model.create(
|
||||
token = token,
|
||||
model = 'gpt-3.5-turbo', # or claude-instant-v1.0
|
||||
system_prompt = 'you are ChatGPT a large language model ...'
|
||||
token=token, model='ChatGPT', system_prompt='you are ChatGPT a large language model ...' # or claude-instant-v1.0
|
||||
)
|
||||
|
||||
print(model.name)
|
||||
|
||||
for response in quora.StreamingCompletion.create(
|
||||
custom_model = model.name,
|
||||
prompt ='hello world',
|
||||
token = token):
|
||||
|
||||
print(response.completion.choices[0].text)
|
||||
for response in quora.StreamingCompletion.create(custom_model=model.name, prompt='hello world', token=token):
|
||||
print(response.text)
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
import sqlchat
|
||||
|
||||
for response in sqlchat.StreamCompletion.create(
|
||||
prompt = 'write python code to reverse a string',
|
||||
messages = []):
|
||||
|
||||
print(response.completion.choices[0].text, end='')
|
||||
for response in sqlchat.StreamCompletion.create(prompt='write python code to reverse a string', messages=[]):
|
||||
print(response.completion.choices[0].text, end='')
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
import t3nsor
|
||||
|
||||
for response in t3nsor.StreamCompletion.create(
|
||||
prompt = 'write python code to reverse a string',
|
||||
messages = []):
|
||||
|
||||
for response in t3nsor.StreamCompletion.create(prompt='write python code to reverse a string', messages=[]):
|
||||
print(response.completion.choices[0].text)
|
||||
|
||||
27
testing/test_main.py
Normal file
27
testing/test_main.py
Normal file
@@ -0,0 +1,27 @@
|
||||
import gpt4free
|
||||
from gpt4free import Provider, quora, forefront
|
||||
|
||||
# usage You
|
||||
response = gpt4free.Completion.create(Provider.You, prompt='Write a poem on Lionel Messi')
|
||||
print(response)
|
||||
|
||||
# usage Poe
|
||||
token = quora.Account.create(logging=False)
|
||||
response = gpt4free.Completion.create(Provider.Poe, prompt='Write a poem on Lionel Messi', token=token, model='ChatGPT')
|
||||
print(response)
|
||||
|
||||
# usage forefront
|
||||
token = forefront.Account.create(logging=False)
|
||||
response = gpt4free.Completion.create(
|
||||
Provider.ForeFront, prompt='Write a poem on Lionel Messi', model='gpt-4', token=token
|
||||
)
|
||||
print(response)
|
||||
print(f'END')
|
||||
|
||||
# usage theb
|
||||
response = gpt4free.Completion.create(Provider.Theb, prompt='Write a poem on Lionel Messi')
|
||||
print(response)
|
||||
|
||||
# usage cocalc
|
||||
response = gpt4free.Completion.create(Provider.CoCalc, prompt='Write a poem on Lionel Messi', cookie_input='')
|
||||
print(response)
|
||||
5
testing/theb_test.py
Normal file
5
testing/theb_test.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from gpt4free import theb
|
||||
|
||||
for token in theb.Completion.create('hello world'):
|
||||
print(token, end='', flush=True)
|
||||
print('asdsos')
|
||||
27
testing/useless_test.py
Normal file
27
testing/useless_test.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from gpt4free import usesless
|
||||
|
||||
message_id = ""
|
||||
while True:
|
||||
prompt = input("Question: ")
|
||||
if prompt == "!stop":
|
||||
break
|
||||
|
||||
req = usesless.Completion.create(prompt=prompt, parentMessageId=message_id)
|
||||
|
||||
print(f"Answer: {req['text']}")
|
||||
message_id = req["id"]
|
||||
|
||||
|
||||
import gpt4free
|
||||
|
||||
message_id = ""
|
||||
while True:
|
||||
prompt = input("Question: ")
|
||||
if prompt == "!stop":
|
||||
break
|
||||
|
||||
req = gpt4free.Completion.create(provider = gpt4free.Provider.UseLess,
|
||||
prompt=prompt, parentMessageId=message_id)
|
||||
|
||||
print(f"Answer: {req['text']}")
|
||||
message_id = req["id"]
|
||||
13
testing/usesless_test.py
Normal file
13
testing/usesless_test.py
Normal file
@@ -0,0 +1,13 @@
|
||||
import usesless
|
||||
|
||||
question1 = "Who won the world series in 2020?"
|
||||
req = usesless.Completion.create(prompt=question1)
|
||||
answer = req["text"]
|
||||
message_id = req["parentMessageId"]
|
||||
|
||||
question2 = "Where was it played?"
|
||||
req2 = usesless.Completion.create(prompt=question2, parentMessageId=message_id)
|
||||
answer2 = req2["text"]
|
||||
|
||||
print(answer)
|
||||
print(answer2)
|
||||
@@ -2,48 +2,34 @@
|
||||
import writesonic
|
||||
|
||||
# create account (3-4s)
|
||||
account = writesonic.Account.create(logging = True)
|
||||
account = writesonic.Account.create(logging=True)
|
||||
|
||||
# with loging:
|
||||
# 2023-04-06 21:50:25 INFO __main__ -> register success : '{"id":"51aa0809-3053-44f7-922a...' (2s)
|
||||
# 2023-04-06 21:50:25 INFO __main__ -> id : '51aa0809-3053-44f7-922a-2b85d8d07edf'
|
||||
# 2023-04-06 21:50:25 INFO __main__ -> token : 'eyJhbGciOiJIUzI1NiIsInR5cCI6Ik...'
|
||||
# 2023-04-06 21:50:28 INFO __main__ -> got key : '194158c4-d249-4be0-82c6-5049e869533c' (2s)
|
||||
# with loging:
|
||||
# 2023-04-06 21:50:25 INFO __main__ -> register success : '{"id":"51aa0809-3053-44f7-922a...' (2s)
|
||||
# 2023-04-06 21:50:25 INFO __main__ -> id : '51aa0809-3053-44f7-922a-2b85d8d07edf'
|
||||
# 2023-04-06 21:50:25 INFO __main__ -> token : 'eyJhbGciOiJIUzI1NiIsInR5cCI6Ik...'
|
||||
# 2023-04-06 21:50:28 INFO __main__ -> got key : '194158c4-d249-4be0-82c6-5049e869533c' (2s)
|
||||
|
||||
# simple completion
|
||||
response = writesonic.Completion.create(
|
||||
api_key = account.key,
|
||||
prompt = 'hello world'
|
||||
)
|
||||
response = writesonic.Completion.create(api_key=account.key, prompt='hello world')
|
||||
|
||||
print(response.completion.choices[0].text) # Hello! How may I assist you today?
|
||||
print(response.completion.choices[0].text) # Hello! How may I assist you today?
|
||||
|
||||
# conversation
|
||||
|
||||
response = writesonic.Completion.create(
|
||||
api_key = account.key,
|
||||
prompt = 'what is my name ?',
|
||||
enable_memory = True,
|
||||
history_data = [
|
||||
{
|
||||
'is_sent': True,
|
||||
'message': 'my name is Tekky'
|
||||
},
|
||||
{
|
||||
'is_sent': False,
|
||||
'message': 'hello Tekky'
|
||||
}
|
||||
]
|
||||
api_key=account.key,
|
||||
prompt='what is my name ?',
|
||||
enable_memory=True,
|
||||
history_data=[{'is_sent': True, 'message': 'my name is Tekky'}, {'is_sent': False, 'message': 'hello Tekky'}],
|
||||
)
|
||||
|
||||
print(response.completion.choices[0].text) # Your name is Tekky.
|
||||
print(response.completion.choices[0].text) # Your name is Tekky.
|
||||
|
||||
# enable internet
|
||||
|
||||
response = writesonic.Completion.create(
|
||||
api_key = account.key,
|
||||
prompt = 'who won the quatar world cup ?',
|
||||
enable_google_results = True
|
||||
api_key=account.key, prompt='who won the quatar world cup ?', enable_google_results=True
|
||||
)
|
||||
|
||||
print(response.completion.choices[0].text) # Argentina won the 2022 FIFA World Cup tournament held in Qatar ...
|
||||
print(response.completion.choices[0].text) # Argentina won the 2022 FIFA World Cup tournament held in Qatar ...
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import you
|
||||
from gpt4free import you
|
||||
|
||||
# simple request with links and details
|
||||
response = you.Completion.create(prompt="hello world", detailed=True, include_links=True)
|
||||
@@ -22,6 +22,6 @@ while True:
|
||||
|
||||
response = you.Completion.create(prompt=prompt, chat=chat)
|
||||
|
||||
print("Bot:", response["response"])
|
||||
print("Bot:", response.text)
|
||||
|
||||
chat.append({"question": prompt, "answer": response["response"]})
|
||||
chat.append({"question": prompt, "answer": response.text})
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
from requests import Session
|
||||
from re import search
|
||||
from random import randint
|
||||
from json import dumps, loads
|
||||
from random import randint
|
||||
from json import dumps, loads
|
||||
from os import getenv
|
||||
from random import randint
|
||||
from re import search
|
||||
from urllib.parse import urlencode
|
||||
from dotenv import load_dotenv; load_dotenv()
|
||||
from os import getenv
|
||||
|
||||
from bard.typings import BardResponse
|
||||
from dotenv import load_dotenv
|
||||
from requests import Session
|
||||
|
||||
load_dotenv()
|
||||
token = getenv('1psid')
|
||||
proxy = getenv('proxy')
|
||||
|
||||
temperatures = {
|
||||
0 : "Generate text strictly following known patterns, with no creativity.",
|
||||
0: "Generate text strictly following known patterns, with no creativity.",
|
||||
0.1: "Produce text adhering closely to established patterns, allowing minimal creativity.",
|
||||
0.2: "Create text with modest deviations from familiar patterns, injecting a slight creative touch.",
|
||||
0.3: "Craft text with a mild level of creativity, deviating somewhat from common patterns.",
|
||||
@@ -23,93 +23,71 @@ temperatures = {
|
||||
0.7: "Produce text favoring creativity over typical patterns for more original results.",
|
||||
0.8: "Create text heavily focused on creativity, with limited concern for familiar patterns.",
|
||||
0.9: "Craft text with a strong emphasis on unique and inventive ideas, largely ignoring established patterns.",
|
||||
1 : "Generate text with maximum creativity, disregarding any constraints of known patterns or structures."
|
||||
1: "Generate text with maximum creativity, disregarding any constraints of known patterns or structures."
|
||||
}
|
||||
|
||||
|
||||
class Completion:
|
||||
# def __init__(self, _token, proxy: str or None = None) -> None:
|
||||
# self.client = Session()
|
||||
# self.client.proxies = {
|
||||
# 'http': f'http://{proxy}',
|
||||
# 'https': f'http://{proxy}' } if proxy else None
|
||||
|
||||
# self.client.headers = {
|
||||
# 'authority' : 'bard.google.com',
|
||||
# 'content-type' : 'application/x-www-form-urlencoded;charset=UTF-8',
|
||||
# 'origin' : 'https://bard.google.com',
|
||||
# 'referer' : 'https://bard.google.com/',
|
||||
# 'user-agent' : 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
|
||||
# 'x-same-domain' : '1',
|
||||
# 'cookie' : f'__Secure-1PSID={_token}'
|
||||
# }
|
||||
|
||||
# self.snlm0e = self.__init_client()
|
||||
# self.conversation_id = ''
|
||||
# self.response_id = ''
|
||||
# self.choice_id = ''
|
||||
# self.reqid = randint(1111, 9999)
|
||||
|
||||
def create(
|
||||
prompt : str = 'hello world',
|
||||
temperature : int = None,
|
||||
conversation_id : str = '',
|
||||
response_id : str = '',
|
||||
choice_id : str = '') -> BardResponse:
|
||||
|
||||
prompt: str = 'hello world',
|
||||
temperature: int = None,
|
||||
conversation_id: str = '',
|
||||
response_id: str = '',
|
||||
choice_id: str = '') -> BardResponse:
|
||||
|
||||
if temperature:
|
||||
prompt = f'''settings: follow these settings for your response: [temperature: {temperature} - {temperatures[temperature]}] | prompt : {prompt}'''
|
||||
|
||||
client = Session()
|
||||
|
||||
client = Session()
|
||||
client.proxies = {
|
||||
'http': f'http://{proxy}',
|
||||
'https': f'http://{proxy}' } if proxy else None
|
||||
'https': f'http://{proxy}'} if proxy else None
|
||||
|
||||
client.headers = {
|
||||
'authority' : 'bard.google.com',
|
||||
'content-type' : 'application/x-www-form-urlencoded;charset=UTF-8',
|
||||
'origin' : 'https://bard.google.com',
|
||||
'referer' : 'https://bard.google.com/',
|
||||
'user-agent' : 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
|
||||
'x-same-domain' : '1',
|
||||
'cookie' : f'__Secure-1PSID={token}'
|
||||
'authority': 'bard.google.com',
|
||||
'content-type': 'application/x-www-form-urlencoded;charset=UTF-8',
|
||||
'origin': 'https://bard.google.com',
|
||||
'referer': 'https://bard.google.com/',
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
|
||||
'x-same-domain': '1',
|
||||
'cookie': f'__Secure-1PSID={token}'
|
||||
}
|
||||
|
||||
snlm0e = search(r'SNlM0e\":\"(.*?)\"', client.get('https://bard.google.com/').text).group(1)
|
||||
|
||||
snlm0e = search(r'SNlM0e\":\"(.*?)\"',
|
||||
client.get('https://bard.google.com/').text).group(1)
|
||||
|
||||
params = urlencode({
|
||||
'bl' : 'boq_assistant-bard-web-server_20230326.21_p0',
|
||||
'_reqid' : randint(1111, 9999),
|
||||
'rt' : 'c',
|
||||
'bl': 'boq_assistant-bard-web-server_20230326.21_p0',
|
||||
'_reqid': randint(1111, 9999),
|
||||
'rt': 'c',
|
||||
})
|
||||
|
||||
response = client.post(f'https://bard.google.com/_/BardChatUi/data/assistant.lamda.BardFrontendService/StreamGenerate?{params}',
|
||||
data = {
|
||||
response = client.post(
|
||||
f'https://bard.google.com/_/BardChatUi/data/assistant.lamda.BardFrontendService/StreamGenerate?{params}',
|
||||
data={
|
||||
'at': snlm0e,
|
||||
'f.req': dumps([None, dumps([
|
||||
[prompt],
|
||||
None,
|
||||
[conversation_id, response_id, choice_id],
|
||||
])
|
||||
])
|
||||
[prompt],
|
||||
None,
|
||||
[conversation_id, response_id, choice_id],
|
||||
])])
|
||||
}
|
||||
)
|
||||
|
||||
)
|
||||
|
||||
chat_data = loads(response.content.splitlines()[3])[0][2]
|
||||
if not chat_data: print('error, retrying'); Completion.create(prompt, temperature, conversation_id, response_id, choice_id)
|
||||
if not chat_data:
|
||||
print('error, retrying')
|
||||
Completion.create(prompt, temperature,
|
||||
conversation_id, response_id, choice_id)
|
||||
|
||||
json_chat_data = loads(chat_data)
|
||||
results = {
|
||||
'content' : json_chat_data[0][0],
|
||||
'conversation_id' : json_chat_data[1][0],
|
||||
'response_id' : json_chat_data[1][1],
|
||||
'factualityQueries' : json_chat_data[3],
|
||||
'textQuery' : json_chat_data[2][0] if json_chat_data[2] is not None else '',
|
||||
'choices' : [{'id': i[0], 'content': i[1]} for i in json_chat_data[4]],
|
||||
'content': json_chat_data[0][0],
|
||||
'conversation_id': json_chat_data[1][0],
|
||||
'response_id': json_chat_data[1][1],
|
||||
'factualityQueries': json_chat_data[3],
|
||||
'textQuery': json_chat_data[2][0] if json_chat_data[2] is not None else '',
|
||||
'choices': [{'id': i[0], 'content': i[1]} for i in json_chat_data[4]],
|
||||
}
|
||||
|
||||
# self.conversation_id = results['conversation_id']
|
||||
# self.response_id = results['response_id']
|
||||
# self.choice_id = results['choices'][0]['id']
|
||||
# self.reqid += 100000
|
||||
|
||||
return BardResponse(results)
|
||||
|
||||
@@ -1,15 +1,54 @@
|
||||
from typing import Dict, List, Union
|
||||
|
||||
|
||||
class BardResponse:
|
||||
def __init__(self, json_dict):
|
||||
self.json = json_dict
|
||||
|
||||
self.content = json_dict.get('content')
|
||||
self.conversation_id = json_dict.get('conversation_id')
|
||||
self.response_id = json_dict.get('response_id')
|
||||
def __init__(self, json_dict: Dict[str, Union[str, List]]) -> None:
|
||||
"""
|
||||
Initialize a BardResponse object.
|
||||
|
||||
:param json_dict: A dictionary containing the JSON response data.
|
||||
"""
|
||||
self.json = json_dict
|
||||
|
||||
self.content = json_dict.get('content')
|
||||
self.conversation_id = json_dict.get('conversation_id')
|
||||
self.response_id = json_dict.get('response_id')
|
||||
self.factuality_queries = json_dict.get('factualityQueries', [])
|
||||
self.text_query = json_dict.get('textQuery', [])
|
||||
self.choices = [self.BardChoice(choice) for choice in json_dict.get('choices', [])]
|
||||
self.text_query = json_dict.get('textQuery', [])
|
||||
self.choices = [self.BardChoice(choice)
|
||||
for choice in json_dict.get('choices', [])]
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""
|
||||
Return a string representation of the BardResponse object.
|
||||
|
||||
:return: A string representation of the BardResponse object.
|
||||
"""
|
||||
return f"BardResponse(conversation_id={self.conversation_id}, response_id={self.response_id}, content={self.content})"
|
||||
|
||||
def filter_choices(self, keyword: str) -> List['BardChoice']:
|
||||
"""
|
||||
Filter the choices based on a keyword.
|
||||
|
||||
:param keyword: The keyword to filter choices by.
|
||||
:return: A list of filtered BardChoice objects.
|
||||
"""
|
||||
return [choice for choice in self.choices if keyword.lower() in choice.content.lower()]
|
||||
|
||||
class BardChoice:
|
||||
def __init__(self, choice_dict):
|
||||
self.id = choice_dict.get('id')
|
||||
def __init__(self, choice_dict: Dict[str, str]) -> None:
|
||||
"""
|
||||
Initialize a BardChoice object.
|
||||
|
||||
:param choice_dict: A dictionary containing the choice data.
|
||||
"""
|
||||
self.id = choice_dict.get('id')
|
||||
self.content = choice_dict.get('content')[0]
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""
|
||||
Return a string representation of the BardChoice object.
|
||||
|
||||
:return: A string representation of the BardChoice object.
|
||||
"""
|
||||
return f"BardChoice(id={self.id}, content={self.content})"
|
||||
|
||||
@@ -1,151 +1,108 @@
|
||||
from requests import get
|
||||
from browser_cookie3 import edge, chrome
|
||||
from ssl import create_default_context
|
||||
from certifi import where
|
||||
from uuid import uuid4
|
||||
from random import randint
|
||||
from json import dumps, loads
|
||||
|
||||
# Import necessary libraries
|
||||
import asyncio
|
||||
import websockets
|
||||
from json import dumps, loads
|
||||
from ssl import create_default_context
|
||||
|
||||
import websockets
|
||||
from browser_cookie3 import edge
|
||||
from certifi import where
|
||||
from requests import get
|
||||
|
||||
# Set up SSL context
|
||||
ssl_context = create_default_context()
|
||||
ssl_context.load_verify_locations(where())
|
||||
|
||||
|
||||
def format(msg: dict) -> str:
|
||||
"""Format message as JSON string with delimiter."""
|
||||
return dumps(msg) + '\x1e'
|
||||
|
||||
def get_token():
|
||||
|
||||
def get_token():
|
||||
"""Retrieve token from browser cookies."""
|
||||
cookies = {c.name: c.value for c in edge(domain_name='bing.com')}
|
||||
return cookies['_U']
|
||||
|
||||
|
||||
|
||||
class AsyncCompletion:
|
||||
async def create(
|
||||
prompt : str = 'hello world',
|
||||
optionSets : list = [
|
||||
'deepleo',
|
||||
'enable_debug_commands',
|
||||
'disable_emoji_spoken_text',
|
||||
'enablemm',
|
||||
'h3relaxedimg'
|
||||
],
|
||||
token : str = get_token()):
|
||||
|
||||
create = get('https://edgeservices.bing.com/edgesvc/turing/conversation/create',
|
||||
headers = {
|
||||
'host' : 'edgeservices.bing.com',
|
||||
'authority' : 'edgeservices.bing.com',
|
||||
'cookie' : f'_U={token}',
|
||||
'user-agent' : 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.69',
|
||||
}
|
||||
)
|
||||
prompt: str = 'hello world',
|
||||
optionSets: list = [
|
||||
'deepleo',
|
||||
'enable_debug_commands',
|
||||
'disable_emoji_spoken_text',
|
||||
'enablemm',
|
||||
'h3relaxedimg'
|
||||
],
|
||||
token: str = get_token()):
|
||||
"""Create a connection to Bing AI and send the prompt."""
|
||||
|
||||
conversationId = create.json()['conversationId']
|
||||
clientId = create.json()['clientId']
|
||||
# Send create request
|
||||
create = get('https://edgeservices.bing.com/edgesvc/turing/conversation/create',
|
||||
headers={
|
||||
'host': 'edgeservices.bing.com',
|
||||
'authority': 'edgeservices.bing.com',
|
||||
'cookie': f'_U={token}',
|
||||
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.69',
|
||||
}
|
||||
)
|
||||
|
||||
# Extract conversation data
|
||||
conversationId = create.json()['conversationId']
|
||||
clientId = create.json()['clientId']
|
||||
conversationSignature = create.json()['conversationSignature']
|
||||
|
||||
wss: websockets.WebSocketClientProtocol or None = None
|
||||
|
||||
wss = await websockets.connect('wss://sydney.bing.com/sydney/ChatHub', max_size = None, ssl = ssl_context,
|
||||
extra_headers = {
|
||||
'accept': 'application/json',
|
||||
'accept-language': 'en-US,en;q=0.9',
|
||||
'content-type': 'application/json',
|
||||
'sec-ch-ua': '"Not_A Brand";v="99", Microsoft Edge";v="110", "Chromium";v="110"',
|
||||
'sec-ch-ua-arch': '"x86"',
|
||||
'sec-ch-ua-bitness': '"64"',
|
||||
'sec-ch-ua-full-version': '"109.0.1518.78"',
|
||||
'sec-ch-ua-full-version-list': '"Chromium";v="110.0.5481.192", "Not A(Brand";v="24.0.0.0", "Microsoft Edge";v="110.0.1587.69"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-model': "",
|
||||
'sec-ch-ua-platform': '"Windows"',
|
||||
'sec-ch-ua-platform-version': '"15.0.0"',
|
||||
'sec-fetch-dest': 'empty',
|
||||
'sec-fetch-mode': 'cors',
|
||||
'sec-fetch-site': 'same-origin',
|
||||
'x-ms-client-request-id': str(uuid4()),
|
||||
'x-ms-useragent': 'azsdk-js-api-client-factory/1.0.0-beta.1 core-rest-pipeline/1.10.0 OS/Win32',
|
||||
'Referer': 'https://www.bing.com/search?q=Bing+AI&showconv=1&FORM=hpcodx',
|
||||
'Referrer-Policy': 'origin-when-cross-origin',
|
||||
'x-forwarded-for': f'13.{randint(104, 107)}.{randint(0, 255)}.{randint(0, 255)}'
|
||||
}
|
||||
)
|
||||
# Connect to WebSocket
|
||||
wss = await websockets.connect('wss://sydney.bing.com/sydney/ChatHub', max_size=None, ssl=ssl_context,
|
||||
extra_headers={
|
||||
# Add necessary headers
|
||||
}
|
||||
)
|
||||
|
||||
# Send JSON protocol version
|
||||
await wss.send(format({'protocol': 'json', 'version': 1}))
|
||||
await wss.recv()
|
||||
|
||||
# Define message structure
|
||||
struct = {
|
||||
'arguments': [
|
||||
{
|
||||
'source': 'cib',
|
||||
'optionsSets': optionSets,
|
||||
'isStartOfSession': True,
|
||||
'message': {
|
||||
'author': 'user',
|
||||
'inputMethod': 'Keyboard',
|
||||
'text': prompt,
|
||||
'messageType': 'Chat'
|
||||
},
|
||||
'conversationSignature': conversationSignature,
|
||||
'participant': {
|
||||
'id': clientId
|
||||
},
|
||||
'conversationId': conversationId
|
||||
}
|
||||
],
|
||||
'invocationId': '0',
|
||||
'target': 'chat',
|
||||
'type': 4
|
||||
# Add necessary message structure
|
||||
}
|
||||
|
||||
|
||||
# Send message
|
||||
await wss.send(format(struct))
|
||||
|
||||
|
||||
# Process responses
|
||||
base_string = ''
|
||||
|
||||
final = False
|
||||
while not final:
|
||||
objects = str(await wss.recv()).split('\x1e')
|
||||
for obj in objects:
|
||||
if obj is None or obj == '':
|
||||
continue
|
||||
|
||||
|
||||
response = loads(obj)
|
||||
if response.get('type') == 1 and response['arguments'][0].get('messages',):
|
||||
response_text = response['arguments'][0]['messages'][0]['adaptiveCards'][0]['body'][0].get('text')
|
||||
|
||||
if response.get('type') == 1 and response['arguments'][0].get('messages', ):
|
||||
response_text = response['arguments'][0]['messages'][0]['adaptiveCards'][0]['body'][0].get(
|
||||
'text')
|
||||
|
||||
yield (response_text.replace(base_string, ''))
|
||||
base_string = response_text
|
||||
|
||||
|
||||
elif response.get('type') == 2:
|
||||
final = True
|
||||
|
||||
|
||||
await wss.close()
|
||||
|
||||
async def run():
|
||||
async for value in AsyncCompletion.create(
|
||||
prompt = 'summarize cinderella with each word beginning with a consecutive letter of the alphabet, a-z',
|
||||
# optionSets = [
|
||||
# "deepleo",
|
||||
# "enable_debug_commands",
|
||||
# "disable_emoji_spoken_text",
|
||||
# "enablemm"
|
||||
# ]
|
||||
optionSets = [
|
||||
#"nlu_direct_response_filter",
|
||||
#"deepleo",
|
||||
#"disable_emoji_spoken_text",
|
||||
# "responsible_ai_policy_235",
|
||||
#"enablemm",
|
||||
"galileo",
|
||||
#"dtappid",
|
||||
# "cricinfo",
|
||||
# "cricinfov2",
|
||||
# "dv3sugg",
|
||||
]
|
||||
):
|
||||
print(value, end = '', flush=True)
|
||||
|
||||
asyncio.run(run())
|
||||
async def run():
|
||||
"""Run the async completion and print the result."""
|
||||
async for value in AsyncCompletion.create(
|
||||
prompt='summarize cinderella with each word beginning with a consecutive letter of the alphabet, a-z',
|
||||
optionSets=[
|
||||
"galileo",
|
||||
]
|
||||
):
|
||||
print(value, end='', flush=True)
|
||||
|
||||
|
||||
asyncio.run(run())
|
||||
|
||||
82
unfinished/chatpdf/__init__.py
Normal file
82
unfinished/chatpdf/__init__.py
Normal file
@@ -0,0 +1,82 @@
|
||||
import requests
|
||||
import json
|
||||
|
||||
from queue import Queue, Empty
|
||||
from threading import Thread
|
||||
from json import loads
|
||||
from re import findall
|
||||
|
||||
|
||||
class Completion:
|
||||
|
||||
def request(prompt: str):
|
||||
'''TODO: some sort of authentication + upload PDF from URL or local file
|
||||
Then you should get the atoken and chat ID
|
||||
'''
|
||||
|
||||
token = "your_token_here"
|
||||
chat_id = "your_chat_id_here"
|
||||
|
||||
url = "https://chat-pr4yueoqha-ue.a.run.app/"
|
||||
|
||||
payload = json.dumps({
|
||||
"v": 2,
|
||||
"chatSession": {
|
||||
"type": "join",
|
||||
"chatId": chat_id
|
||||
},
|
||||
"history": [
|
||||
{
|
||||
"id": "VNsSyJIq_0",
|
||||
"author": "p_if2GPSfyN8hjDoA7unYe",
|
||||
"msg": "<start>",
|
||||
"time": 1682672009270
|
||||
},
|
||||
{
|
||||
"id": "Zk8DRUtx_6",
|
||||
"author": "uplaceholder",
|
||||
"msg": prompt,
|
||||
"time": 1682672181339
|
||||
}
|
||||
]
|
||||
})
|
||||
|
||||
# TODO: fix headers, use random user-agent, streaming response, etc
|
||||
headers = {
|
||||
'authority': 'chat-pr4yueoqha-ue.a.run.app',
|
||||
'accept': '*/*',
|
||||
'accept-language': 'en-US,en;q=0.9',
|
||||
'atoken': token,
|
||||
'content-type': 'application/json',
|
||||
'origin': 'https://www.chatpdf.com',
|
||||
'referer': 'https://www.chatpdf.com/',
|
||||
'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-platform': '"Windows"',
|
||||
'sec-fetch-dest': 'empty',
|
||||
'sec-fetch-mode': 'cors',
|
||||
'sec-fetch-site': 'cross-site',
|
||||
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36'
|
||||
}
|
||||
|
||||
response = requests.request(
|
||||
"POST", url, headers=headers, data=payload).text
|
||||
Completion.stream_completed = True
|
||||
return {'response': response}
|
||||
|
||||
@staticmethod
|
||||
def create(prompt: str):
|
||||
Thread(target=Completion.request, args=[prompt]).start()
|
||||
|
||||
while Completion.stream_completed != True or not Completion.message_queue.empty():
|
||||
try:
|
||||
message = Completion.message_queue.get(timeout=0.01)
|
||||
for message in findall(Completion.regex, message):
|
||||
yield loads(Completion.part1 + message + Completion.part2)['delta']
|
||||
|
||||
except Empty:
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def handle_stream_response(response):
|
||||
Completion.message_queue.put(response.decode())
|
||||
@@ -1,31 +0,0 @@
|
||||
import requests
|
||||
|
||||
class Completion:
|
||||
def create(prompt="What is the square root of pi",
|
||||
system_prompt="ASSUME I HAVE FULL ACCESS TO COCALC. ENCLOSE MATH IN $. INCLUDE THE LANGUAGE DIRECTLY AFTER THE TRIPLE BACKTICKS IN ALL MARKDOWN CODE BLOCKS. How can I do the following using CoCalc?") -> str:
|
||||
|
||||
# Initialize a session
|
||||
session = requests.Session()
|
||||
|
||||
# Set headers for the request
|
||||
headers = {
|
||||
'Accept': '*/*',
|
||||
'Accept-Language': 'en-US,en;q=0.5',
|
||||
'Origin': 'https://cocalc.com',
|
||||
'Referer': 'https://cocalc.com/api/v2/openai/chatgpt',
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36',
|
||||
}
|
||||
session.headers.update(headers)
|
||||
|
||||
# Set the data that will be submitted
|
||||
payload = {
|
||||
"input": prompt,
|
||||
"system": system_prompt,
|
||||
"tag": "next:index"
|
||||
}
|
||||
|
||||
# Submit the request
|
||||
response = session.post("https://cocalc.com/api/v2/openai/chatgpt", json=payload).json()
|
||||
|
||||
# Return the results
|
||||
return response
|
||||
@@ -1,8 +0,0 @@
|
||||
import cocalc
|
||||
|
||||
|
||||
response = cocalc.Completion.create(
|
||||
prompt = 'hello world'
|
||||
)
|
||||
|
||||
print(response)
|
||||
@@ -1,31 +0,0 @@
|
||||
from requests import get
|
||||
from os import urandom
|
||||
from json import loads
|
||||
|
||||
sessionId = urandom(10).hex()
|
||||
|
||||
headers = {
|
||||
'Accept': 'text/event-stream',
|
||||
'Accept-Language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
|
||||
'Cache-Control': 'no-cache',
|
||||
'Connection': 'keep-alive',
|
||||
'Pragma': 'no-cache',
|
||||
'Referer': 'http://easy-ai.ink/chat',
|
||||
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
|
||||
'token': 'null',
|
||||
}
|
||||
|
||||
while True:
|
||||
prompt = input('you: ')
|
||||
|
||||
params = {
|
||||
'message': prompt,
|
||||
'sessionId': sessionId
|
||||
}
|
||||
|
||||
for chunk in get('http://easy-ai.ink/easyapi/v1/chat/completions', params = params,
|
||||
headers = headers, verify = False, stream = True).iter_lines():
|
||||
|
||||
if b'content' in chunk:
|
||||
data = loads(chunk.decode('utf-8').split('data:')[1])
|
||||
print(data['content'], end='')
|
||||
@@ -1,30 +1,46 @@
|
||||
import websockets
|
||||
from json import dumps, loads
|
||||
|
||||
import websockets
|
||||
|
||||
|
||||
# Define the asynchronous function to test the WebSocket connection
|
||||
|
||||
|
||||
async def test():
|
||||
# Establish a WebSocket connection with the specified URL
|
||||
async with websockets.connect('wss://chatgpt.func.icu/conversation+ws') as wss:
|
||||
|
||||
await wss.send(dumps(separators=(',', ':'), obj = {
|
||||
'content_type':'text',
|
||||
'engine':'chat-gpt',
|
||||
'parts':['hello world'],
|
||||
'options':{}
|
||||
}
|
||||
))
|
||||
|
||||
|
||||
# Prepare the message payload as a JSON object
|
||||
payload = {
|
||||
'content_type': 'text',
|
||||
'engine': 'chat-gpt',
|
||||
'parts': ['hello world'],
|
||||
'options': {}
|
||||
}
|
||||
|
||||
# Send the payload to the WebSocket server
|
||||
await wss.send(dumps(obj=payload, separators=(',', ':')))
|
||||
|
||||
# Initialize a variable to track the end of the conversation
|
||||
ended = None
|
||||
|
||||
# Continuously receive and process messages until the conversation ends
|
||||
while not ended:
|
||||
try:
|
||||
response = await wss.recv()
|
||||
# Receive and parse the JSON response from the server
|
||||
response = await wss.recv()
|
||||
json_response = loads(response)
|
||||
|
||||
# Print the entire JSON response
|
||||
print(json_response)
|
||||
|
||||
ended = json_response.get('eof')
|
||||
|
||||
|
||||
# Check for the end of the conversation
|
||||
ended = json_response.get('eof')
|
||||
|
||||
# If the conversation has not ended, print the received message
|
||||
if not ended:
|
||||
print(json_response['content']['parts'][0])
|
||||
|
||||
|
||||
# Handle cases when the connection is closed by the server
|
||||
except websockets.ConnectionClosed:
|
||||
break
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user