Skip to content

Commit

Permalink
cs-fix
Browse files Browse the repository at this point in the history
  • Loading branch information
huangzhhui committed Feb 6, 2025
1 parent 11a7612 commit 5ed4af2
Show file tree
Hide file tree
Showing 77 changed files with 723 additions and 450 deletions.
3 changes: 2 additions & 1 deletion .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
{
"php.version": "8.2"
"php.version": "8.2",
"php-cs-fixer.executablePath": "php-cs-fixer"
}
44 changes: 26 additions & 18 deletions bin/code_optimize.php
Original file line number Diff line number Diff line change
@@ -1,5 +1,15 @@
<?php

declare(strict_types=1);
/**
* This file is part of Hyperf.
*
* @link https://www.hyperf.io
* @document https://hyperf.wiki
* @contact [email protected]
* @license https://github.com/hyperf/hyperf/blob/master/LICENSE
*/
use Hyperf\Di\ClassLoader;
use Hyperf\Odin\Apis\AzureOpenAI\AzureOpenAI;
use Hyperf\Odin\Apis\AzureOpenAI\AzureOpenAIConfig;
use Hyperf\Odin\Apis\AzureOpenAI\Client as AzureOpenAIClient;
Expand All @@ -8,75 +18,73 @@
use Hyperf\Odin\Apis\OpenAI\OpenAIConfig;
use Hyperf\Odin\Message\SystemMessage;
use Hyperf\Odin\Message\UserMessage;
use function Hyperf\Support\env as env;

use function Hyperf\Support\env;

! defined('BASE_PATH') && define('BASE_PATH', dirname(__DIR__, 1));

require_once dirname(dirname(__FILE__)) . '/vendor/autoload.php';

\Hyperf\Di\ClassLoader::init();
ClassLoader::init();

class LLM
{

public string $model = 'gpt-3.5-turbo';

public function chat(array $messages, float $temperature = 0.9,): string
public function chat(array $messages, float $temperature = 0.9): string
{
$client = $this->getAzureOpenAIClient();
$client->setDebug(false);
return $client->chat($messages, $this->model, $temperature);
}

function getOpenAIClient(): OpenAIClient
public function getOpenAIClient(): OpenAIClient
{
$openAI = new OpenAI();
$config = new OpenAIConfig(env('OPENAI_API_KEY'),);
$config = new OpenAIConfig(env('OPENAI_API_KEY'));
return $openAI->getClient($config);
}

function getAzureOpenAIClient(): AzureOpenAIClient
public function getAzureOpenAIClient(): AzureOpenAIClient
{
$openAI = new AzureOpenAI();
$config = new AzureOpenAIConfig(apiKey: env('AZURE_OPENAI_API_KEY'), baseUrl: env('AZURE_OPENAI_API_BASE'), apiVersion: env('AZURE_OPENAI_API_VERSION'), deploymentName: env('AZURE_OPENAI_DEPLOYMENT_NAME'),);
$config = new AzureOpenAIConfig(apiKey: env('AZURE_OPENAI_API_KEY'), baseUrl: env('AZURE_OPENAI_API_BASE'), apiVersion: env('AZURE_OPENAI_API_VERSION'), deploymentName: env('AZURE_OPENAI_DEPLOYMENT_NAME'));
return $openAI->getClient($config);
}
}

function chat(string $message): string
{
$prefixPrompt = <<<PROMPT
$prefixPrompt = <<<'PROMPT'
你是一个低代码平台的代码生成器,项目使用 Hyperf 3.0 框架作为代码实现,你需要尽可能详细的分析流程,代码内容不能省略必须完成实现可运行的具体的代码,结果必须根据格式要求返回。
PROMPT;

$llm = new LLM();
$result = $llm->chat([
'system' => new SystemMessage('你是一个由 Hyperf 组织开发的低代码生成器,你必须严格按照格式要求返回内容'),
'user' => new UserMessage($prefixPrompt . PHP_EOL . $message),
], temperature: 0) . PHP_EOL;
'system' => new SystemMessage('你是一个由 Hyperf 组织开发的低代码生成器,你必须严格按照格式要求返回内容'),
'user' => new UserMessage($prefixPrompt . PHP_EOL . $message),
], temperature: 0) . PHP_EOL;
echo '[AI]: ' . $result;
return $result;
}

$userMessage = "根据代码逻辑和注释要求,改成 TypeScript 代码,每个方法以 JsDoc 的形式生成注释,实现方法中具体的业务逻辑,不能只输出注释,必须完成所有的 TODO,必须是具体的、完整的、可运行的代码";
$userMessage = '根据代码逻辑和注释要求,改成 TypeScript 代码,每个方法以 JsDoc 的形式生成注释,实现方法中具体的业务逻辑,不能只输出注释,必须完成所有的 TODO,必须是具体的、完整的、可运行的代码';

$outputDir = BASE_PATH . '/output';
$sourceCodeFilePath = $outputDir . '/service.js';
$sourceCode = file_get_contents($sourceCodeFilePath);
$generate = <<<PROMPT
用户需求:$userMessage
用户需求:{$userMessage}
原始代码:
$sourceCode
{$sourceCode}
要求:你需要根据上面的代码结构和用户的需求,对提供的代码进行修改,不需要输出其他类的代码,代码结构必须符合 Javascript 的规范,使用强类型代码实现,不需要任何解释和多余的换行,直接输出代码即可。
返回结果:
PROMPT;
var_dump($generate);
exit();
exit;
$result = chat($generate);
$code = trim($result);
// 解析 ```php ``` 之间的代码
preg_match('/```php(.*)```/s', $code, $matches);
$code = trim($matches[1] ?? '');
file_put_contents(str_replace('service.js', 'service.ts', $sourceCodeFilePath), $code);


12 changes: 6 additions & 6 deletions bin/codegen.php
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
<?php

declare(strict_types=1);

/**
* This file is part of Hyperf.
*
Expand All @@ -10,7 +9,7 @@
* @contact [email protected]
* @license https://github.com/hyperf/hyperf/blob/master/LICENSE
*/

use Hyperf\Di\ClassLoader;
use Hyperf\Odin\Apis\AzureOpenAI\AzureOpenAI;
use Hyperf\Odin\Apis\AzureOpenAI\AzureOpenAIConfig;
use Hyperf\Odin\Apis\AzureOpenAI\Client as AzureOpenAIClient;
Expand All @@ -19,13 +18,14 @@
use Hyperf\Odin\Apis\OpenAI\OpenAIConfig;
use Hyperf\Odin\Message\SystemMessage;
use Hyperf\Odin\Message\UserMessage;

use function Hyperf\Support\env;

! defined('BASE_PATH') && define('BASE_PATH', dirname(__DIR__, 1));

require_once dirname(dirname(__FILE__)) . '/vendor/autoload.php';

\Hyperf\Di\ClassLoader::init();
ClassLoader::init();

class LLM
{
Expand Down Expand Up @@ -61,9 +61,9 @@ function chat(string $message): string

$llm = new LLM();
$result = $llm->chat([
'system' => new SystemMessage('You are a low-code generator developed by Hyperf. Follow the format requirements to return content.'),
'user' => new UserMessage($prefixPrompt . PHP_EOL . $message),
], temperature: 0) . PHP_EOL;
'system' => new SystemMessage('You are a low-code generator developed by Hyperf. Follow the format requirements to return content.'),
'user' => new UserMessage($prefixPrompt . PHP_EOL . $message),
], temperature: 0) . PHP_EOL;
echo '[AI]: ' . $result;
return $result;
}
Expand Down
34 changes: 22 additions & 12 deletions bin/data.php
Original file line number Diff line number Diff line change
@@ -1,5 +1,15 @@
<?php

declare(strict_types=1);
/**
* This file is part of Hyperf.
*
* @link https://www.hyperf.io
* @document https://hyperf.wiki
* @contact [email protected]
* @license https://github.com/hyperf/hyperf/blob/master/LICENSE
*/
use Hyperf\Di\ClassLoader;
use Hyperf\Odin\Apis\AzureOpenAI\AzureOpenAI;
use Hyperf\Odin\Apis\AzureOpenAI\AzureOpenAIConfig;
use Hyperf\Odin\Apis\AzureOpenAI\Client as AzureOpenAIClient;
Expand All @@ -8,37 +18,37 @@
use Hyperf\Odin\Apis\OpenAI\OpenAIConfig;
use Hyperf\Odin\Message\SystemMessage;
use Hyperf\Odin\Message\UserMessage;
use function Hyperf\Support\env as env;

use function Hyperf\Support\env;

! defined('BASE_PATH') && define('BASE_PATH', dirname(__DIR__, 1));

require_once dirname(dirname(__FILE__)) . '/vendor/autoload.php';

\Hyperf\Di\ClassLoader::init();
ClassLoader::init();

class LLM
{

public string $model = 'gpt-3.5-turbo';

public function chat(array $messages, float $temperature = 0,): string
public function chat(array $messages, float $temperature = 0): string
{
$client = $this->getAzureOpenAIClient();
$client->setDebug(true);
return $client->chat($messages, $this->model, $temperature);
}

function getOpenAIClient(): OpenAIClient
public function getOpenAIClient(): OpenAIClient
{
$openAI = new OpenAI();
$config = new OpenAIConfig(env('OPENAI_API_KEY'),);
$config = new OpenAIConfig(env('OPENAI_API_KEY'));
return $openAI->getClient($config);
}

function getAzureOpenAIClient(): AzureOpenAIClient
public function getAzureOpenAIClient(): AzureOpenAIClient
{
$openAI = new AzureOpenAI();
$config = new AzureOpenAIConfig(apiKey: env('AZURE_OPENAI_API_KEY'), baseUrl: env('AZURE_OPENAI_API_BASE'), apiVersion: env('AZURE_OPENAI_API_VERSION'), deploymentName: env('AZURE_OPENAI_DEPLOYMENT_NAME'),);
$config = new AzureOpenAIConfig(apiKey: env('AZURE_OPENAI_API_KEY'), baseUrl: env('AZURE_OPENAI_API_BASE'), apiVersion: env('AZURE_OPENAI_API_VERSION'), deploymentName: env('AZURE_OPENAI_DEPLOYMENT_NAME'));
return $openAI->getClient($config);
}
}
Expand All @@ -49,7 +59,7 @@ function getAzureOpenAIClient(): AzureOpenAIClient
你是一个专业的数据分析师,你需要根据下面的数据进行分析,根据用户问题以结论性的内容简洁的输出你的分析结果,尽量不要输出空白行:
数据:
$data
{$data}
数据计算逻辑:单杯利润=价格-费用合计,毛利率=毛利/价格,毛利=价格-物料成本,费用合计=运营费用+营销费用+其它成本+折旧+管理费用+税项
要求:严格基于上面的数据和数据计算逻辑,一步一步推理全过程回答下面的问题
Expand All @@ -59,6 +69,6 @@ function getAzureOpenAIClient(): AzureOpenAIClient

$llm = new LLM();
echo '[AI]: ' . $llm->chat([
'system' => new SystemMessage('你是一个由 Hyperf 组织开发的专业的数据分析机器人,你必须严格按照格式要求返回内容'),
'user' => new UserMessage($prompt),
]) . PHP_EOL;
'system' => new SystemMessage('你是一个由 Hyperf 组织开发的专业的数据分析机器人,你必须严格按照格式要求返回内容'),
'user' => new UserMessage($prompt),
]) . PHP_EOL;
11 changes: 10 additions & 1 deletion bin/init.php
Original file line number Diff line number Diff line change
@@ -1,5 +1,14 @@
<?php

declare(strict_types=1);
/**
* This file is part of Hyperf.
*
* @link https://www.hyperf.io
* @document https://hyperf.wiki
* @contact [email protected]
* @license https://github.com/hyperf/hyperf/blob/master/LICENSE
*/
use Hyperf\Context\ApplicationContext;
use Hyperf\Di\ClassLoader;
use Hyperf\Di\Container;
Expand All @@ -10,4 +19,4 @@
require_once dirname(dirname(__FILE__)) . '/vendor/autoload.php';

ClassLoader::init();
return $container = ApplicationContext::setContainer(new Container((new DefinitionSourceFactory())()));
return $container = ApplicationContext::setContainer(new Container((new DefinitionSourceFactory())()));
5 changes: 2 additions & 3 deletions bin/interactive.php
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
* @contact [email protected]
* @license https://github.com/hyperf/hyperf/blob/master/LICENSE
*/

use Hyperf\Odin\Agent\ToolsAgent;
use Hyperf\Odin\Knowledge\Knowledge;
use Hyperf\Odin\Loader\Loader;
Expand All @@ -35,7 +34,7 @@

/** @var ModelMapper $modelMapper */
$modelMapper = $container->get(ModelMapper::class);
#$modelName = 'qwen:32b-chat';
# $modelName = 'qwen:32b-chat';
$modelName = 'gpt-4-turbo';
$llm = $modelMapper->getModel($modelName);
$embeddingModel = $modelMapper->getModel($modelName);
Expand Down Expand Up @@ -82,7 +81,7 @@
while (true) {
echo 'Human: ';
// 如果 $defaultInputs 有值,就用 $defaultInputs 的值,否则就读取用户输入
$input = array_shift($defaultInputs) ? : trim(fgets(STDIN));
$input = array_shift($defaultInputs) ?: trim(fgets(STDIN));
$isCommand = false;
switch ($input) {
case 'dump-messages':
Expand Down
8 changes: 3 additions & 5 deletions bin/interpreter.php
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
<?php

declare(strict_types=1);

/**
* This file is part of Hyperf.
*
Expand All @@ -10,7 +9,6 @@
* @contact [email protected]
* @license https://github.com/hyperf/hyperf/blob/master/LICENSE
*/

use Hyperf\Odin\Conversation\Conversation;
use Hyperf\Odin\Conversation\Option;
use Hyperf\Odin\Interpreter\CodeRunner;
Expand All @@ -34,8 +32,8 @@
echo 'Human: ';
$input = trim(fgets(STDIN, 1024));
$response = $conversation->chat(messages: [
$systemPrompt,
new UserMessage($input),
], model: 'gpt-4', option: new Option(temperature: 0, maxTokens: 3000, functions: [CodeRunner::toFunctionCallDefinition()]), conversationId: $conversationId);
$systemPrompt,
new UserMessage($input),
], model: 'gpt-4', option: new Option(temperature: 0, maxTokens: 3000, functions: [CodeRunner::toFunctionCallDefinition()]), conversationId: $conversationId);
echo 'AI: ' . $response . PHP_EOL;
}
28 changes: 19 additions & 9 deletions bin/json2code.php
Original file line number Diff line number Diff line change
@@ -1,33 +1,43 @@
<?php

declare(strict_types=1);
/**
* This file is part of Hyperf.
*
* @link https://www.hyperf.io
* @document https://hyperf.wiki
* @contact [email protected]
* @license https://github.com/hyperf/hyperf/blob/master/LICENSE
*/
use Hyperf\Context\ApplicationContext;
use Hyperf\Di\ClassLoader;
use Hyperf\Di\Container;
use Hyperf\Di\Definition\DefinitionSourceFactory;
use Hyperf\Odin\LLM;
use Hyperf\Odin\Message\SystemMessage;
use Hyperf\Odin\Message\UserMessage;

! defined('BASE_PATH') && define('BASE_PATH', dirname(__DIR__, 1));

require_once dirname(dirname(__FILE__)) . '/vendor/autoload.php';

\Hyperf\Di\ClassLoader::init();
ClassLoader::init();

$container = ApplicationContext::setContainer(new Container((new DefinitionSourceFactory())()));

function chat(string $message): string
{
$container = ApplicationContext::getContainer();
$llm = $container->get(\Hyperf\Odin\LLM::class);
$llm = $container->get(LLM::class);
$result = $llm->chat([
'system' => new SystemMessage('You are a low-code generator developed by Hyperf. Follow the format requirements to return content.'),
'user' => new UserMessage($message),
], temperature: 0) . PHP_EOL;
'system' => new SystemMessage('You are a low-code generator developed by Hyperf. Follow the format requirements to return content.'),
'user' => new UserMessage($message),
], temperature: 0) . PHP_EOL;
echo '[AI]: ' . $result;
return $result;
}


$json = <<<ARRAY
$json = <<<'ARRAY'
[
'name' => 'run_code',
'description' => 'Executes code and returns the output.',
Expand Down Expand Up @@ -61,12 +71,12 @@ function chat(string $message): string
Requirements: Transform PHP Array / JSON to PHP object, the object should be an clean object without any property value, should not includes the value, all object class should includes setter and getter, and strong type, output the PHP object code directly, no need to provide the use cases.
ClassName: FunctionCallDefinition, FunctionCallParameters, FunctionCallParameter
Array: ```array
$json
{$json}
```
Output Format: Put the code into ```php ``` tag, and output the code directly.
Output:
PROMPT;

$result = chat($prompt);
var_dump($result);
exit();
exit;
Loading

0 comments on commit 5ed4af2

Please sign in to comment.