Skip to content

Commit

Permalink
Merge pull request #2387 from hlohaus/info
Browse files Browse the repository at this point in the history
Add image upload to Copilot provider, Add --cookie-browsers argument to cli
  • Loading branch information
hlohaus authored Nov 19, 2024
2 parents 3aa497f + 2754043 commit de4f374
Show file tree
Hide file tree
Showing 9 changed files with 84 additions and 84 deletions.
16 changes: 3 additions & 13 deletions g4f/Provider/Copilot.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,17 +15,13 @@
has_nodriver = True
except ImportError:
has_nodriver = False
try:
from platformdirs import user_config_dir
has_platformdirs = True
except ImportError:
has_platformdirs = False

from .base_provider import AbstractProvider, BaseConversation
from .helper import format_prompt
from ..typing import CreateResult, Messages, ImageType
from ..errors import MissingRequirementsError
from ..requests.raise_for_status import raise_for_status
from ..requests import get_nodriver
from ..image import to_bytes, is_accepted_format
from .. import debug

Expand Down Expand Up @@ -130,6 +126,7 @@ def create_completion(
except:
break
if msg.get("event") == "appendText":
is_started = True
yield msg.get("text")
elif msg.get("event") in ["done", "partCompleted"]:
break
Expand All @@ -138,14 +135,7 @@ def create_completion(

@classmethod
async def get_access_token_and_cookies(cls, proxy: str = None):
if not has_nodriver:
raise MissingRequirementsError('Install "nodriver" package | pip install -U nodriver')
user_data_dir = user_config_dir("g4f-nodriver") if has_platformdirs else None
debug.log(f"Copilot: Open nodriver with user_dir: {user_data_dir}")
browser = await nodriver.start(
user_data_dir=user_data_dir,
browser_args=None if proxy is None else [f"--proxy-server={proxy}"],
)
browser = await get_nodriver(proxy=proxy)
page = await browser.get(cls.url)
access_token = None
while access_token is None:
Expand Down
82 changes: 16 additions & 66 deletions g4f/Provider/You.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,15 @@

import re
import json
import base64
import uuid

from ..typing import AsyncResult, Messages, ImageType, Cookies
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from .helper import format_prompt
from ..image import ImageResponse, ImagePreview, EXTENSIONS_MAP, to_bytes, is_accepted_format
from ..requests import StreamSession, FormData, raise_for_status
from .you.har_file import get_telemetry_ids
from ..requests import StreamSession, FormData, raise_for_status, get_nodriver
from ..cookies import get_cookies
from ..errors import MissingRequirementsError
from .. import debug

class You(AsyncGeneratorProvider, ProviderModelMixin):
Expand Down Expand Up @@ -57,6 +57,7 @@ async def create_async_generator(
proxy: str = None,
timeout: int = 240,
chat_mode: str = "default",
cookies: Cookies = None,
**kwargs,
) -> AsyncResult:
if image is not None or model == cls.default_vision_model:
Expand All @@ -69,12 +70,22 @@ async def create_async_generator(
else:
chat_mode = "custom"
model = cls.get_model(model)
if cookies is None and chat_mode != "default":
try:
cookies = get_cookies(".you.com")
except MissingRequirementsError:
browser = await get_nodriver(proxy=proxy)
page = await browser.get(cls.url)
await page.wait_for('[data-testid="user-profile-button"]', timeout=900)
cookies = {}
for c in await page.send(nodriver.cdp.network.get_cookies([cls.url])):
cookies[c.name] = c.value
await page.close()
async with StreamSession(
proxy=proxy,
impersonate="chrome",
timeout=(30, timeout)
) as session:
cookies = await cls.get_cookies(session) if chat_mode != "default" else None
upload = ""
if image is not None:
upload_file = await cls.upload_file(
Expand Down Expand Up @@ -156,65 +167,4 @@ async def upload_file(cls, client: StreamSession, cookies: Cookies, file: bytes,
result = await response.json()
result["user_filename"] = filename
result["size"] = len(file)
return result

@classmethod
async def get_cookies(cls, client: StreamSession) -> Cookies:
if not cls._cookies or cls._cookies_used >= 5:
cls._cookies = await cls.create_cookies(client)
cls._cookies_used = 0
cls._cookies_used += 1
return cls._cookies

@classmethod
def get_sdk(cls) -> str:
return base64.standard_b64encode(json.dumps({
"event_id":f"event-id-{str(uuid.uuid4())}",
"app_session_id":f"app-session-id-{str(uuid.uuid4())}",
"persistent_id":f"persistent-id-{uuid.uuid4()}",
"client_sent_at":"","timezone":"",
"stytch_user_id":f"user-live-{uuid.uuid4()}",
"stytch_session_id":f"session-live-{uuid.uuid4()}",
"app":{"identifier":"you.com"},
"sdk":{"identifier":"Stytch.js Javascript SDK","version":"3.3.0"
}}).encode()).decode()

def get_auth() -> str:
auth_uuid = "507a52ad-7e69-496b-aee0-1c9863c7c819"
auth_token = f"public-token-live-{auth_uuid}:public-token-live-{auth_uuid}"
auth = base64.standard_b64encode(auth_token.encode()).decode()
return f"Basic {auth}"

@classmethod
async def create_cookies(cls, client: StreamSession) -> Cookies:
if not cls._telemetry_ids:
cls._telemetry_ids = await get_telemetry_ids()
user_uuid = str(uuid.uuid4())
telemetry_id = cls._telemetry_ids.pop()
if debug.logging:
print(f"Use telemetry_id: {telemetry_id}")
async with client.post(
"https://web.stytch.com/sdk/v1/passwords",
headers={
"Authorization": cls.get_auth(),
"X-SDK-Client": cls.get_sdk(),
"X-SDK-Parent-Host": cls.url,
"Origin": "https://you.com",
"Referer": "https://you.com/"
},
json={
"dfp_telemetry_id": telemetry_id,
"email": f"{user_uuid}@gmail.com",
"password": f"{user_uuid}#{user_uuid}",
"session_duration_minutes": 129600
}
) as response:
await raise_for_status(response)
session = (await response.json())["data"]

return {
"stytch_session": session["session_token"],
'stytch_session_jwt': session["session_jwt"],
'ydc_stytch_session': session["session_token"],
'ydc_stytch_session_jwt': session["session_jwt"],
}
return result
2 changes: 1 addition & 1 deletion g4f/Provider/needs_auth/DeepInfraImage.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ async def create_async(
async with session.post(f"{api_base.rstrip('/')}/{model}", json=data) as response:
await raise_for_status(response)
data = await response.json()
images = data["output"] if "output" in data else data["images"]
images = data.get("output", data.get("images"))
if not images:
raise RuntimeError(f"Response: {data}")
images = images[0] if len(images) == 1 else images
Expand Down
4 changes: 4 additions & 0 deletions g4f/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from g4f import Provider
from g4f.gui.run import gui_parser, run_gui_args
import g4f.cookies

def main():
parser = argparse.ArgumentParser(description="Run gpt4free")
Expand All @@ -23,6 +24,8 @@ def main():
api_parser.add_argument("--g4f-api-key", type=str, default=None, help="Sets an authentication key for your API. (incompatible with --reload and --workers)")
api_parser.add_argument("--ignored-providers", nargs="+", choices=[provider.__name__ for provider in Provider.__providers__ if provider.working],
default=[], help="List of providers to ignore when processing request. (incompatible with --reload and --workers)")
api_parser.add_argument("--cookie-browsers", nargs="+", choices=[browser.__name__ for browser in g4f.cookies.browsers],
default=[], help="List of browsers to access or retrieve cookies from. (incompatible with --reload and --workers)")
api_parser.add_argument("--reload", action="store_true", help="Enable reloading.")
subparsers.add_parser("gui", parents=[gui_parser()], add_help=False)

Expand All @@ -47,6 +50,7 @@ def run_api_args(args):
proxy=args.proxy,
model=args.model
)
g4f.cookies.browsers = [g4f.cookies[browser] for browser in args.cookie_browsers]
run_api(
bind=args.bind,
debug=args.debug,
Expand Down
25 changes: 24 additions & 1 deletion g4f/cookies.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,32 @@
brave, edge, vivaldi, firefox,
_LinuxPasswordManager, BrowserCookieError
)

def _g4f(domain_name: str) -> list:
"""
Load cookies from the 'g4f' browser (if exists).
Args:
domain_name (str): The domain for which to load cookies.
Returns:
list: List of cookies.
"""
if not has_platformdirs:
return []
user_data_dir = user_config_dir("g4f")
cookie_file = os.path.join(user_data_dir, "Default", "Cookies")
return [] if not os.path.exists(cookie_file) else chrome(cookie_file, domain_name)

browsers = [
_g4f,
chrome, chromium, opera, opera_gx,
brave, edge, vivaldi, firefox,
]
has_browser_cookie3 = True
except ImportError:
has_browser_cookie3 = False
browsers = []

from .typing import Dict, Cookies
from .errors import MissingRequirementsError
Expand Down Expand Up @@ -114,7 +137,7 @@ def get_domain(v: dict) -> str:

harFiles = []
cookieFiles = []
for root, dirs, files in os.walk(CookiesConfig.cookies_dir if dirPath is None else dirPath):
for root, _, files in os.walk(CookiesConfig.cookies_dir if dirPath is None else dirPath):
for file in files:
if file.endswith(".har"):
harFiles.append(os.path.join(root, file))
Expand Down
14 changes: 12 additions & 2 deletions g4f/gui/client/static/css/style.css
Original file line number Diff line number Diff line change
Expand Up @@ -500,14 +500,24 @@ body {
animation: show_popup 0.4s;
}

.toolbar .regenerate {
left: 50%;
transform: translateX(-50%);
right: auto;
}

.toolbar .regenerate span {
display: none;
}

@media only screen and (min-width: 40em) {
.stop_generating {
left: 50%;
transform: translateX(-50%);
right: auto;
}
.toolbar .regenerate {
right: 5px;
.toolbar .regenerate span {
display: block;
}
}

Expand Down
4 changes: 4 additions & 0 deletions g4f/gui/gui_parser.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
from argparse import ArgumentParser

from ..cookies import browsers

def gui_parser():
parser = ArgumentParser(description="Run the GUI")
parser.add_argument("--host", type=str, default="0.0.0.0", help="hostname")
parser.add_argument("--port", "-p", type=int, default=8080, help="port")
parser.add_argument("--debug", "-d", "-debug", action="store_true", help="debug mode")
parser.add_argument("--ignore-cookie-files", action="store_true", help="Don't read .har and cookie files.")
parser.add_argument("--cookie-browsers", nargs="+", choices=[browser.__name__ for browser in browsers],
default=[], help="List of browsers to access or retrieve cookies from.")
return parser
2 changes: 2 additions & 0 deletions g4f/gui/run.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from .gui_parser import gui_parser
from ..cookies import read_cookie_files
import g4f.cookies
import g4f.debug

def run_gui_args(args):
Expand All @@ -11,6 +12,7 @@ def run_gui_args(args):
host = args.host
port = args.port
debug = args.debug
g4f.cookies.browsers = [g4f.cookies[browser] for browser in args.cookie_browsers]
run_gui(host, port, debug)

if __name__ == "__main__":
Expand Down
19 changes: 18 additions & 1 deletion g4f/requests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,15 @@
try:
import nodriver
from nodriver.cdp.network import CookieParam
from nodriver import Browser
has_nodriver = True
except ImportError:
has_nodriver = False
try:
from platformdirs import user_config_dir
has_platformdirs = True
except ImportError:
has_platformdirs = False

from .. import debug
from .raise_for_status import raise_for_status
Expand Down Expand Up @@ -165,4 +171,15 @@ def merge_cookies(cookies: Iterator[Morsel], response: Response) -> Cookies:
if cookies is None:
cookies = {}
for cookie in response.cookies.jar:
cookies[cookie.name] = cookie.value
cookies[cookie.name] = cookie.value

async def get_nodriver(proxy: str = None, **kwargs)-> Browser:
if not has_nodriver:
raise MissingRequirementsError('Install "nodriver" package | pip install -U nodriver')
user_data_dir = user_config_dir("g4f-nodriver") if has_platformdirs else None
debug.log(f"Copilot: Open nodriver with user_dir: {user_data_dir}")
return await nodriver.start(
user_data_dir=user_data_dir,
browser_args=None if proxy is None else [f"--proxy-server={proxy}"],
**kwargs
)

0 comments on commit de4f374

Please sign in to comment.