From 4d886618d8746f46e628555e1519c8ecd774467d Mon Sep 17 00:00:00 2001
From: loks0n <22452787+loks0n@users.noreply.github.com>
Date: Fri, 18 Aug 2023 09:40:13 +0100
Subject: [PATCH 1/6] feat: prompt-chatgpt python
---
python/prompt_chatgpt/.gitignore | 160 ++++++++++++++++++++++++
python/prompt_chatgpt/README.md | 85 +++++++++++++
python/prompt_chatgpt/requirements.txt | 1 +
python/prompt_chatgpt/src/main.py | 35 ++++++
python/prompt_chatgpt/src/utils.py | 27 ++++
python/prompt_chatgpt/static/index.html | 91 ++++++++++++++
6 files changed, 399 insertions(+)
create mode 100644 python/prompt_chatgpt/.gitignore
create mode 100644 python/prompt_chatgpt/README.md
create mode 100644 python/prompt_chatgpt/requirements.txt
create mode 100644 python/prompt_chatgpt/src/main.py
create mode 100644 python/prompt_chatgpt/src/utils.py
create mode 100644 python/prompt_chatgpt/static/index.html
diff --git a/python/prompt_chatgpt/.gitignore b/python/prompt_chatgpt/.gitignore
new file mode 100644
index 00000000..68bc17f9
--- /dev/null
+++ b/python/prompt_chatgpt/.gitignore
@@ -0,0 +1,160 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+#pdm.lock
+# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
+# in version control.
+# https://pdm.fming.dev/#use-with-ide
+.pdm.toml
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+#.idea/
diff --git a/python/prompt_chatgpt/README.md b/python/prompt_chatgpt/README.md
new file mode 100644
index 00000000..c384b636
--- /dev/null
+++ b/python/prompt_chatgpt/README.md
@@ -0,0 +1,85 @@
+# 🤖 Python Prompt ChatGPT Function
+
+Ask question, and let OpenAI GPT-3.5-turbo answer.
+
+## 🧰 Usage
+
+### `GET`
+
+HTML form for interacting with the function.
+
+### `POST`
+
+Query the model for a completion.
+
+**Parameters**
+
+| Name | Description | Location | Type | Sample Value |
+| ------------ | ------------------------------------ | -------- | ------------------ | ----------------------------- |
+| Content-Type | The content type of the request body | Header | `application/json` | N/A |
+| prompt | Text to prompt the model | Body | String | `Write a haiku about Mondays` |
+
+Sample `200` Response:
+
+Response from the model.
+
+```json
+{
+ "ok": true,
+ "completion": "Monday's heavy weight, Dawning with a sigh of grey, Hopeful hearts await."
+}
+```
+
+Sample `400` Response:
+
+Response when the request body is missing.
+
+```json
+{
+ "ok": false,
+ "error": "Missing body with a prompt."
+}
+```
+
+Sample `500` Response:
+
+Response when the model fails to respond.
+
+```json
+{
+ "ok": false,
+ "error": "Failed to query model."
+}
+```
+
+## ⚙️ Configuration
+
+| Setting | Value |
+| ----------------- | --------------------------------- |
+| Runtime | Python (3.9) |
+| Entrypoint | `src/main.py` |
+| Build Commands | `pip install -r requirements.txt` |
+| Permissions | `any` |
+| Timeout (Seconds) | 15 |
+
+## 🔒 Environment Variables
+
+### OPENAI_API_KEY
+
+A unique key used to authenticate with the OpenAI API. Please note that this is a paid service and you will be charged for each request made to the API. For more information, see the [OpenAI pricing page](https://openai.com/pricing/).
+
+| Question | Answer |
+| ------------- | --------------------------------------------------------------------------- |
+| Required | Yes |
+| Sample Value | `sk-wzG...vcy` |
+| Documentation | [OpenAI Docs](https://platform.openai.com/docs/quickstart/add-your-api-key) |
+
+### OPENAI_MAX_TOKENS
+
+The maximum number of tokens that the OpenAI response should contain. Be aware that OpenAI models read and write a maximum number of tokens per API call, which varies depending on the model. For GPT-3.5-turbo, the limit is 4096 tokens.
+
+| Question | Answer |
+| ------------- | ------------------------------------------------------------------------------------------------------------- |
+| Required | No |
+| Sample Value | `512` |
+| Documentation | [OpenAI: What are tokens?](https://help.openai.com/en/articles/4936856-what-are-tokens-and-how-to-count-them) |
diff --git a/python/prompt_chatgpt/requirements.txt b/python/prompt_chatgpt/requirements.txt
new file mode 100644
index 00000000..f0dd0aec
--- /dev/null
+++ b/python/prompt_chatgpt/requirements.txt
@@ -0,0 +1 @@
+openai
\ No newline at end of file
diff --git a/python/prompt_chatgpt/src/main.py b/python/prompt_chatgpt/src/main.py
new file mode 100644
index 00000000..b2574f3e
--- /dev/null
+++ b/python/prompt_chatgpt/src/main.py
@@ -0,0 +1,35 @@
+import openai
+from utils import get_static_file, throw_if_missing
+import os
+
+
+def main(context):
+ throw_if_missing(os.environ, ["OPENAI_API_KEY"])
+
+ if context.req.method == "GET":
+ return context.res.send(
+ get_static_file("index.html"),
+ 200,
+ {
+ "Content-Type": "text/html; charset=utf-8",
+ },
+ )
+
+ try:
+ throw_if_missing(context.req.body, ["prompt"])
+ except ValueError as err:
+ return context.res.json({"ok": False, "error": err.message}, 400)
+
+ openai.api_key = os.environ["OPENAI_API_KEY"]
+
+ response = openai.ChatCompletion.create(
+ model="gpt-3.5-turbo",
+ max_tokens=int(os.environ.get("OPENAI_MAX_TOKENS", "512")),
+ messages=[{"role": "user", "content": context.req.body["prompt"]}],
+ )
+
+ completion = response.choices[0].message.content
+ if not completion:
+ return context.res.json({"ok": False, "error": "Failed to query model."}, 500)
+
+ return context.res.json({"ok": True, "completion": completion}, 200)
diff --git a/python/prompt_chatgpt/src/utils.py b/python/prompt_chatgpt/src/utils.py
new file mode 100644
index 00000000..dc43b664
--- /dev/null
+++ b/python/prompt_chatgpt/src/utils.py
@@ -0,0 +1,27 @@
+import os
+
+__dirname = os.path.dirname(os.path.abspath(__file__))
+static_folder = os.path.join(__dirname, "../static")
+
+
+def get_static_file(file_name):
+ """
+ Returns the contents of a file in the static folder
+ :param file_name: Name of the file to read
+ :return: Contents of static/{file_name}
+ """
+ file_path = os.path.join(static_folder, file_name)
+ with open(file_path, "r") as file:
+ return file.read()
+
+
+def throw_if_missing(obj, keys):
+ """
+ Throws an error if any of the keys are missing from the object
+ :param obj: Object to validate
+ :param keys: List of keys to check
+ :raises ValueError: If any keys are missing
+ """
+ missing = [key for key in keys if key not in obj or not obj[key]]
+ if missing:
+ raise ValueError(f"Missing required fields: {', '.join(missing)}")
diff --git a/python/prompt_chatgpt/static/index.html b/python/prompt_chatgpt/static/index.html
new file mode 100644
index 00000000..5e5e5d2e
--- /dev/null
+++ b/python/prompt_chatgpt/static/index.html
@@ -0,0 +1,91 @@
+
+
+
+
+
+
+ Prompt ChatGPT Demo
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Prompt ChatGPT Demo
+
+
+
+ Use this page to test your implementation with OpenAI ChatGPT. Enter
+ text and receive the model output as a response.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
ChatGPT:
+
+
+
+
+
+
+
+
+
+
+
+
+
From 5501ea0541d8d4da872d3dcee85acc09be7c8df7 Mon Sep 17 00:00:00 2001
From: loks0n <22452787+loks0n@users.noreply.github.com>
Date: Fri, 18 Aug 2023 10:16:34 +0100
Subject: [PATCH 2/6] feat: prompt_chatgpt dart
---
dart/prompt_chatgpt/.gitignore | 27 +++++++
dart/prompt_chatgpt/README.md | 85 +++++++++++++++++++++++
dart/prompt_chatgpt/analysis_options.yaml | 1 +
dart/prompt_chatgpt/lib/main.dart | 40 +++++++++++
dart/prompt_chatgpt/lib/utils.dart | 27 +++++++
dart/prompt_chatgpt/pubspec.yaml | 11 +++
6 files changed, 191 insertions(+)
create mode 100644 dart/prompt_chatgpt/.gitignore
create mode 100644 dart/prompt_chatgpt/README.md
create mode 100644 dart/prompt_chatgpt/analysis_options.yaml
create mode 100644 dart/prompt_chatgpt/lib/main.dart
create mode 100644 dart/prompt_chatgpt/lib/utils.dart
create mode 100644 dart/prompt_chatgpt/pubspec.yaml
diff --git a/dart/prompt_chatgpt/.gitignore b/dart/prompt_chatgpt/.gitignore
new file mode 100644
index 00000000..dc259f50
--- /dev/null
+++ b/dart/prompt_chatgpt/.gitignore
@@ -0,0 +1,27 @@
+# See https://www.dartlang.org/guides/libraries/private-files
+
+# Files and directories created by pub
+.dart_tool/
+.packages
+build/
+# If you're building an application, you may want to check-in your pubspec.lock
+pubspec.lock
+
+# Directory created by dartdoc
+# If you don't generate documentation locally you can remove this line.
+doc/api/
+
+# dotenv environment variables file
+.env*
+
+# Avoid committing generated Javascript files:
+*.dart.js
+*.info.json # Produced by the --dump-info flag.
+*.js # When generated by dart2js. Don't specify *.js if your
+ # project includes source files written in JavaScript.
+*.js_
+*.js.deps
+*.js.map
+
+.flutter-plugins
+.flutter-plugins-dependencies
\ No newline at end of file
diff --git a/dart/prompt_chatgpt/README.md b/dart/prompt_chatgpt/README.md
new file mode 100644
index 00000000..1dc21b37
--- /dev/null
+++ b/dart/prompt_chatgpt/README.md
@@ -0,0 +1,85 @@
+# 🤖 Dart Prompt ChatGPT Function
+
+Ask question, and let OpenAI GPT-3.5-turbo answer.
+
+## 🧰 Usage
+
+### `GET`
+
+HTML form for interacting with the function.
+
+### `POST`
+
+Query the model for a completion.
+
+**Parameters**
+
+| Name | Description | Location | Type | Sample Value |
+| ------------ | ------------------------------------ | -------- | ------------------ | ----------------------------- |
+| Content-Type | The content type of the request body | Header | `application/json` | N/A |
+| prompt | Text to prompt the model | Body | String | `Write a haiku about Mondays` |
+
+Sample `200` Response:
+
+Response from the model.
+
+```json
+{
+ "ok": true,
+ "completion": "Monday's heavy weight, Dawning with a sigh of grey, Hopeful hearts await."
+}
+```
+
+Sample `400` Response:
+
+Response when the request body is missing.
+
+```json
+{
+ "ok": false,
+ "error": "Missing body with a prompt."
+}
+```
+
+Sample `500` Response:
+
+Response when the model fails to respond.
+
+```json
+{
+ "ok": false,
+ "error": "Failed to query model."
+}
+```
+
+## ⚙️ Configuration
+
+| Setting | Value |
+| ----------------- | --------------- |
+| Runtime | Dart (2.17) |
+| Entrypoint | `lib/main.dart` |
+| Build Commands | `dart pub get` |
+| Permissions | `any` |
+| Timeout (Seconds) | 15 |
+
+## 🔒 Environment Variables
+
+### OPENAI_API_KEY
+
+A unique key used to authenticate with the OpenAI API. Please note that this is a paid service and you will be charged for each request made to the API. For more information, see the [OpenAI pricing page](https://openai.com/pricing/).
+
+| Question | Answer |
+| ------------- | --------------------------------------------------------------------------- |
+| Required | Yes |
+| Sample Value | `sk-wzG...vcy` |
+| Documentation | [OpenAI Docs](https://platform.openai.com/docs/quickstart/add-your-api-key) |
+
+### OPENAI_MAX_TOKENS
+
+The maximum number of tokens that the OpenAI response should contain. Be aware that OpenAI models read and write a maximum number of tokens per API call, which varies depending on the model. For GPT-3.5-turbo, the limit is 4096 tokens.
+
+| Question | Answer |
+| ------------- | ------------------------------------------------------------------------------------------------------------- |
+| Required | No |
+| Sample Value | `512` |
+| Documentation | [OpenAI: What are tokens?](https://help.openai.com/en/articles/4936856-what-are-tokens-and-how-to-count-them) |
diff --git a/dart/prompt_chatgpt/analysis_options.yaml b/dart/prompt_chatgpt/analysis_options.yaml
new file mode 100644
index 00000000..ea2c9e94
--- /dev/null
+++ b/dart/prompt_chatgpt/analysis_options.yaml
@@ -0,0 +1 @@
+include: package:lints/recommended.yaml
\ No newline at end of file
diff --git a/dart/prompt_chatgpt/lib/main.dart b/dart/prompt_chatgpt/lib/main.dart
new file mode 100644
index 00000000..6cb37109
--- /dev/null
+++ b/dart/prompt_chatgpt/lib/main.dart
@@ -0,0 +1,40 @@
+import 'dart:io';
+import 'dart:async';
+import 'package:dart_openai/dart_openai.dart';
+import 'utils.dart';
+
+Future main(final context) async {
+ throwIfMissing(Platform.environment, ['OPENAI_API_KEY']);
+
+ if (context.req.method == 'GET') {
+ return context.res.send(getStaticFile('index.html'), 200,
+ {'Content-Type': 'text/html; charset=utf-8'});
+ }
+
+ try {
+ throwIfMissing(context.req.body, ['prompt']);
+ } catch (err) {
+ return context.res.json({'ok': false, 'error': err.toString()});
+ }
+
+ OpenAI.apiKey = Platform.environment['OPENAI_API_KEY']!;
+
+ try {
+ final response = await OpenAI.instance.chat.create(
+ model: 'gpt-3.5-turbo',
+ maxTokens: int.parse(Platform.environment['OPENAI_MAX_TOKENS'] ?? '512'),
+ messages: [
+ OpenAIChatCompletionChoiceMessageModel(
+ content: context.req.body['prompt'],
+ role: OpenAIChatMessageRole.user)
+ ],
+ );
+
+ final completion = response.choices[0].message.content;
+
+ return context.res.json({'ok': true, 'completion': completion}, 200);
+ } on RequestFailedException {
+ return context.res
+ .json({'ok': false, 'error': 'Failed to query model.'}, 500);
+ }
+}
diff --git a/dart/prompt_chatgpt/lib/utils.dart b/dart/prompt_chatgpt/lib/utils.dart
new file mode 100644
index 00000000..881cea78
--- /dev/null
+++ b/dart/prompt_chatgpt/lib/utils.dart
@@ -0,0 +1,27 @@
+import 'dart:io';
+
+/// Throws an error if any of the keys are missing from the object
+/// @param obj - The object to check
+/// @param keys - The list of keys to check for
+/// @throws Exception
+void throwIfMissing(Map obj, List keys) {
+ final missing = [];
+ for (var key in keys) {
+ if (!obj.containsKey(key) || obj[key] == null) {
+ missing.add(key);
+ }
+ }
+ if (missing.isNotEmpty) {
+ throw Exception('Missing required fields: ${missing.join(', ')}');
+ }
+}
+
+final String _dirname = Platform.script.toFilePath();
+final String staticFolder = '${Uri.file(_dirname).resolve('../static')}';
+
+/// Returns the contents of a file in the static folder
+/// @param fileName - The name of the file to read
+/// @returns Contents of static/{fileName}
+String getStaticFile(String fileName) {
+ return File('${staticFolder}/$fileName').readAsStringSync();
+}
diff --git a/dart/prompt_chatgpt/pubspec.yaml b/dart/prompt_chatgpt/pubspec.yaml
new file mode 100644
index 00000000..0a7ba8f4
--- /dev/null
+++ b/dart/prompt_chatgpt/pubspec.yaml
@@ -0,0 +1,11 @@
+name: prompt_chatgpt
+version: 1.0.0
+
+environment:
+ sdk: ^2.17.0
+
+dependencies:
+ dart_openai: ^3.0.0
+
+dev_dependencies:
+ lints: ^2.0.0
From 6393953850866c142e8d5334ea37da81c02ab04f Mon Sep 17 00:00:00 2001
From: loks0n <22452787+loks0n@users.noreply.github.com>
Date: Fri, 18 Aug 2023 11:29:15 +0100
Subject: [PATCH 3/6] feat: prompt chatgpt php
---
php/prompt-chatgpt/.gitignore | 470 ++++++++++++++++++++++++++++++
php/prompt-chatgpt/README.md | 85 ++++++
php/prompt-chatgpt/composer.json | 15 +
php/prompt-chatgpt/src/index.php | 38 +++
php/prompt-chatgpt/src/utils.php | 34 +++
python/prompt_chatgpt/src/main.py | 18 +-
6 files changed, 651 insertions(+), 9 deletions(-)
create mode 100644 php/prompt-chatgpt/.gitignore
create mode 100644 php/prompt-chatgpt/README.md
create mode 100644 php/prompt-chatgpt/composer.json
create mode 100644 php/prompt-chatgpt/src/index.php
create mode 100644 php/prompt-chatgpt/src/utils.php
diff --git a/php/prompt-chatgpt/.gitignore b/php/prompt-chatgpt/.gitignore
new file mode 100644
index 00000000..65a21c66
--- /dev/null
+++ b/php/prompt-chatgpt/.gitignore
@@ -0,0 +1,470 @@
+##### Windows
+# Windows thumbnail cache files
+Thumbs.db
+Thumbs.db:encryptable
+ehthumbs.db
+ehthumbs_vista.db
+
+# Dump file
+*.stackdump
+
+# Folder config file
+[Dd]esktop.ini
+
+# Recycle Bin used on file shares
+$RECYCLE.BIN/
+
+# Windows Installer files
+*.cab
+*.msi
+*.msix
+*.msm
+*.msp
+
+# Windows shortcuts
+*.lnk
+
+##### Linux
+*~
+
+# temporary files which can be created if a process still has a handle open of a deleted file
+.fuse_hidden*
+
+# KDE directory preferences
+.directory
+
+# Linux trash folder which might appear on any partition or disk
+.Trash-*
+
+# .nfs files are created when an open file is removed but is still being accessed
+.nfs*
+
+##### MacOS
+# General
+.DS_Store
+.AppleDouble
+.LSOverride
+
+# Icon must end with two \r
+Icon
+
+# Thumbnails
+._*
+
+# Files that might appear in the root of a volume
+.DocumentRevisions-V100
+.fseventsd
+.Spotlight-V100
+.TemporaryItems
+.Trashes
+.VolumeIcon.icns
+.com.apple.timemachine.donotpresent
+
+# Directories potentially created on remote AFP share
+.AppleDB
+.AppleDesktop
+Network Trash Folder
+Temporary Items
+.apdisk
+
+##### Backup
+*.bak
+*.gho
+*.ori
+*.orig
+*.tmp
+
+##### GPG
+secring.*
+
+##### Dropbox
+# Dropbox settings and caches
+.dropbox
+.dropbox.attr
+.dropbox.cache
+
+##### SynopsysVCS
+# Waveform formats
+*.vcd
+*.vpd
+*.evcd
+*.fsdb
+
+# Default name of the simulation executable. A different name can be
+# specified with this switch (the associated daidir database name is
+# also taken from here): -o /
+simv
+
+# Generated for Verilog and VHDL top configs
+simv.daidir/
+simv.db.dir/
+
+# Infrastructure necessary to co-simulate SystemC models with
+# Verilog/VHDL models. An alternate directory may be specified with this
+# switch: -Mdir=
+csrc/
+
+# Log file - the following switch allows to specify the file that will be
+# used to write all messages from simulation: -l
+*.log
+
+# Coverage results (generated with urg) and database location. The
+# following switch can also be used: urg -dir .vdb
+simv.vdb/
+urgReport/
+
+# DVE and UCLI related files.
+DVEfiles/
+ucli.key
+
+# When the design is elaborated for DirectC, the following file is created
+# with declarations for C/C++ functions.
+vc_hdrs.h
+
+##### SVN
+.svn/
+
+##### Mercurial
+.hg/
+.hgignore
+.hgsigs
+.hgsub
+.hgsubstate
+.hgtags
+
+##### Bazaar
+.bzr/
+.bzrignore
+
+##### CVS
+/CVS/*
+**/CVS/*
+.cvsignore
+*/.cvsignore
+
+##### TortoiseGit
+# Project-level settings
+/.tgitconfig
+
+##### PuTTY
+# Private key
+*.ppk
+
+##### Vim
+# Swap
+[._]*.s[a-v][a-z]
+!*.svg # comment out if you don't need vector files
+[._]*.sw[a-p]
+[._]s[a-rt-v][a-z]
+[._]ss[a-gi-z]
+[._]sw[a-p]
+
+# Session
+Session.vim
+Sessionx.vim
+
+# Temporary
+.netrwhist
+*~
+# Auto-generated tag files
+tags
+# Persistent undo
+[._]*.un~
+
+##### Emacs
+# -*- mode: gitignore; -*-
+*~
+\#*\#
+/.emacs.desktop
+/.emacs.desktop.lock
+*.elc
+auto-save-list
+tramp
+.\#*
+
+# Org-mode
+.org-id-locations
+*_archive
+
+# flymake-mode
+*_flymake.*
+
+# eshell files
+/eshell/history
+/eshell/lastdir
+
+# elpa packages
+/elpa/
+
+# reftex files
+*.rel
+
+# AUCTeX auto folder
+/auto/
+
+# cask packages
+.cask/
+dist/
+
+# Flycheck
+flycheck_*.el
+
+# server auth directory
+/server/
+
+# projectiles files
+.projectile
+
+# directory configuration
+.dir-locals.el
+
+# network security
+/network-security.data
+
+##### SublimeText
+# Cache files for Sublime Text
+*.tmlanguage.cache
+*.tmPreferences.cache
+*.stTheme.cache
+
+# Workspace files are user-specific
+*.sublime-workspace
+
+# Project files should be checked into the repository, unless a significant
+# proportion of contributors will probably not be using Sublime Text
+# *.sublime-project
+
+# SFTP configuration file
+sftp-config.json
+sftp-config-alt*.json
+
+# Package control specific files
+Package Control.last-run
+Package Control.ca-list
+Package Control.ca-bundle
+Package Control.system-ca-bundle
+Package Control.cache/
+Package Control.ca-certs/
+Package Control.merged-ca-bundle
+Package Control.user-ca-bundle
+oscrypto-ca-bundle.crt
+bh_unicode_properties.cache
+
+# Sublime-github package stores a github token in this file
+# https://packagecontrol.io/packages/sublime-github
+GitHub.sublime-settings
+
+##### Notepad++
+# Notepad++ backups #
+*.bak
+
+##### TextMate
+*.tmproj
+*.tmproject
+tmtags
+
+##### VisualStudioCode
+.vscode/*
+!.vscode/settings.json
+!.vscode/tasks.json
+!.vscode/launch.json
+!.vscode/extensions.json
+*.code-workspace
+
+# Local History for Visual Studio Code
+.history/
+
+##### NetBeans
+**/nbproject/private/
+**/nbproject/Makefile-*.mk
+**/nbproject/Package-*.bash
+build/
+nbbuild/
+dist/
+nbdist/
+.nb-gradle/
+
+##### JetBrains
+# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
+# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
+
+# User-specific stuff
+.idea/**/workspace.xml
+.idea/**/tasks.xml
+.idea/**/usage.statistics.xml
+.idea/**/dictionaries
+.idea/**/shelf
+
+# Generated files
+.idea/**/contentModel.xml
+
+# Sensitive or high-churn files
+.idea/**/dataSources/
+.idea/**/dataSources.ids
+.idea/**/dataSources.local.xml
+.idea/**/sqlDataSources.xml
+.idea/**/dynamic.xml
+.idea/**/uiDesigner.xml
+.idea/**/dbnavigator.xml
+
+# Gradle
+.idea/**/gradle.xml
+.idea/**/libraries
+
+# Gradle and Maven with auto-import
+# When using Gradle or Maven with auto-import, you should exclude module files,
+# since they will be recreated, and may cause churn. Uncomment if using
+# auto-import.
+# .idea/artifacts
+# .idea/compiler.xml
+# .idea/jarRepositories.xml
+# .idea/modules.xml
+# .idea/*.iml
+# .idea/modules
+# *.iml
+# *.ipr
+
+# CMake
+cmake-build-*/
+
+# Mongo Explorer plugin
+.idea/**/mongoSettings.xml
+
+# File-based project format
+*.iws
+
+# IntelliJ
+out/
+
+# mpeltonen/sbt-idea plugin
+.idea_modules/
+
+# JIRA plugin
+atlassian-ide-plugin.xml
+
+# Cursive Clojure plugin
+.idea/replstate.xml
+
+# Crashlytics plugin (for Android Studio and IntelliJ)
+com_crashlytics_export_strings.xml
+crashlytics.properties
+crashlytics-build.properties
+fabric.properties
+
+# Editor-based Rest Client
+.idea/httpRequests
+
+# Android studio 3.1+ serialized cache file
+.idea/caches/build_file_checksums.ser
+
+##### Eclipse
+.metadata
+bin/
+tmp/
+*.tmp
+*.bak
+*.swp
+*~.nib
+local.properties
+.settings/
+.loadpath
+.recommenders
+
+# External tool builders
+.externalToolBuilders/
+
+# Locally stored "Eclipse launch configurations"
+*.launch
+
+# PyDev specific (Python IDE for Eclipse)
+*.pydevproject
+
+# CDT-specific (C/C++ Development Tooling)
+.cproject
+
+# CDT- autotools
+.autotools
+
+# Java annotation processor (APT)
+.factorypath
+
+# PDT-specific (PHP Development Tools)
+.buildpath
+
+# sbteclipse plugin
+.target
+
+# Tern plugin
+.tern-project
+
+# TeXlipse plugin
+.texlipse
+
+# STS (Spring Tool Suite)
+.springBeans
+
+# Code Recommenders
+.recommenders/
+
+# Annotation Processing
+.apt_generated/
+.apt_generated_test/
+
+# Scala IDE specific (Scala & Java development for Eclipse)
+.cache-main
+.scala_dependencies
+.worksheet
+
+# Uncomment this line if you wish to ignore the project description file.
+# Typically, this file would be tracked if it contains build/dependency configurations:
+#.project
+
+##### Dreamweaver
+# DW Dreamweaver added files
+_notes
+_compareTemp
+configs/
+dwsync.xml
+dw_php_codehinting.config
+*.mno
+
+##### CodeKit
+# General CodeKit files to ignore
+config.codekit
+config.codekit3
+/min
+
+##### Gradle
+.gradle
+**/build/
+!src/**/build/
+
+# Ignore Gradle GUI config
+gradle-app.setting
+
+# Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored)
+!gradle-wrapper.jar
+
+# Cache of project
+.gradletasknamecache
+
+# # Work around https://youtrack.jetbrains.com/issue/IDEA-116898
+# gradle/wrapper/gradle-wrapper.properties
+
+##### Composer
+composer.phar
+/vendor/
+
+##### PHP CodeSniffer
+# gitignore for the PHP Codesniffer framework
+# website: https://github.com/squizlabs/PHP_CodeSniffer
+#
+# Recommended template: PHP.gitignore
+
+/wpcs/*
+
+##### SASS
+.sass-cache/
+*.css.map
+*.sass.map
+*.scss.map
\ No newline at end of file
diff --git a/php/prompt-chatgpt/README.md b/php/prompt-chatgpt/README.md
new file mode 100644
index 00000000..a2134ccb
--- /dev/null
+++ b/php/prompt-chatgpt/README.md
@@ -0,0 +1,85 @@
+# 🤖 PHP Prompt ChatGPT Function
+
+Ask question, and let OpenAI GPT-3.5-turbo answer.
+
+## 🧰 Usage
+
+### `GET`
+
+HTML form for interacting with the function.
+
+### `POST`
+
+Query the model for a completion.
+
+**Parameters**
+
+| Name | Description | Location | Type | Sample Value |
+| ------------ | ------------------------------------ | -------- | ------------------ | ----------------------------- |
+| Content-Type | The content type of the request body | Header | `application/json` | N/A |
+| prompt | Text to prompt the model | Body | String | `Write a haiku about Mondays` |
+
+Sample `200` Response:
+
+Response from the model.
+
+```json
+{
+ "ok": true,
+ "completion": "Monday's heavy weight, Dawning with a sigh of grey, Hopeful hearts await."
+}
+```
+
+Sample `400` Response:
+
+Response when the request body is missing.
+
+```json
+{
+ "ok": false,
+ "error": "Missing body with a prompt."
+}
+```
+
+Sample `500` Response:
+
+Response when the model fails to respond.
+
+```json
+{
+ "ok": false,
+ "error": "Failed to query model."
+}
+```
+
+## ⚙️ Configuration
+
+| Setting | Value |
+| ----------------- | ------------------ |
+| Runtime | PHP (8.0) |
+| Entrypoint | `src/index.php` |
+| Build Commands | `composer install` |
+| Permissions | `any` |
+| Timeout (Seconds) | 15 |
+
+## 🔒 Environment Variables
+
+### OPENAI_API_KEY
+
+A unique key used to authenticate with the OpenAI API. Please note that this is a paid service and you will be charged for each request made to the API. For more information, see the [OpenAI pricing page](https://openai.com/pricing/).
+
+| Question | Answer |
+| ------------- | --------------------------------------------------------------------------- |
+| Required | Yes |
+| Sample Value | `sk-wzG...vcy` |
+| Documentation | [OpenAI Docs](https://platform.openai.com/docs/quickstart/add-your-api-key) |
+
+### OPENAI_MAX_TOKENS
+
+The maximum number of tokens that the OpenAI response should contain. Be aware that OpenAI models read and write a maximum number of tokens per API call, which varies depending on the model. For GPT-3.5-turbo, the limit is 4096 tokens.
+
+| Question | Answer |
+| ------------- | ------------------------------------------------------------------------------------------------------------- |
+| Required | No |
+| Sample Value | `512` |
+| Documentation | [OpenAI: What are tokens?](https://help.openai.com/en/articles/4936856-what-are-tokens-and-how-to-count-them) |
diff --git a/php/prompt-chatgpt/composer.json b/php/prompt-chatgpt/composer.json
new file mode 100644
index 00000000..bd71c353
--- /dev/null
+++ b/php/prompt-chatgpt/composer.json
@@ -0,0 +1,15 @@
+{
+ "name": "templates/prompt-chatgpt",
+ "type": "library",
+ "require": {
+ "php": ">=8.0.0",
+ "openai-php/client": "^0.6.4",
+ "symfony/http-client": "^6.3",
+ "nyholm/psr7": "^1.8"
+ },
+ "config": {
+ "allow-plugins": {
+ "php-http/discovery": true
+ }
+ }
+}
diff --git a/php/prompt-chatgpt/src/index.php b/php/prompt-chatgpt/src/index.php
new file mode 100644
index 00000000..7fc4f6ab
--- /dev/null
+++ b/php/prompt-chatgpt/src/index.php
@@ -0,0 +1,38 @@
+req->method === 'GET') {
+ return $context->res->send(get_static_file('index.html'), 200, [
+ 'Content-Type' => 'text/html',
+ ]);
+ }
+
+ try {
+ throw_if_missing($context->req->body, ['prompt']);
+ } catch (Exception $e) {
+ return $context->res->json(['ok' => False, 'error' => $e->getMessage()], 400);
+ }
+
+ $openai = OpenAI::client($_ENV['OPENAI_API_KEY']);
+
+ try {
+ $response = $openai->chat()->create([
+ 'model' => 'gpt-3.5-turbo',
+ 'max_tokens' => $_ENV['MAX_TOKENS'] ?: 150,
+ 'messages' => [
+ ['role' => 'user', 'content' => $context->req->body['prompt']]
+ ],
+ ]);
+
+ $completion = $response['choices'][0]['message']['content'];
+ return $context->res->json(['ok' => True, 'completion' => $completion], 200, [
+ 'Content-Type' => 'text/plain',
+ ]);
+ } catch (Exception $e) {
+ return $context->res->json(['ok' => False, 'error' => 'Failed to query model.'], 500);
+ }
+};
diff --git a/php/prompt-chatgpt/src/utils.php b/php/prompt-chatgpt/src/utils.php
new file mode 100644
index 00000000..48105156
--- /dev/null
+++ b/php/prompt-chatgpt/src/utils.php
@@ -0,0 +1,34 @@
+ 0) {
+ throw new Exception('Missing required fields: ' . implode(', ', $missing));
+ }
+}
diff --git a/python/prompt_chatgpt/src/main.py b/python/prompt_chatgpt/src/main.py
index b2574f3e..f4966e23 100644
--- a/python/prompt_chatgpt/src/main.py
+++ b/python/prompt_chatgpt/src/main.py
@@ -22,14 +22,14 @@ def main(context):
openai.api_key = os.environ["OPENAI_API_KEY"]
- response = openai.ChatCompletion.create(
- model="gpt-3.5-turbo",
- max_tokens=int(os.environ.get("OPENAI_MAX_TOKENS", "512")),
- messages=[{"role": "user", "content": context.req.body["prompt"]}],
- )
+ try:
+ response = openai.ChatCompletion.create(
+ model="gpt-3.5-turbo",
+ max_tokens=int(os.environ.get("OPENAI_MAX_TOKENS", "512")),
+ messages=[{"role": "user", "content": context.req.body["prompt"]}],
+ )
+ completion = response.choices[0].message.content
+ return context.res.json({"ok": True, "completion": completion}, 200)
- completion = response.choices[0].message.content
- if not completion:
+ except Exception:
return context.res.json({"ok": False, "error": "Failed to query model."}, 500)
-
- return context.res.json({"ok": True, "completion": completion}, 200)
From 311ae7d302d84ee39b539b36da03ba687d2a7573 Mon Sep 17 00:00:00 2001
From: loks0n <22452787+loks0n@users.noreply.github.com>
Date: Sun, 20 Aug 2023 14:35:02 +0100
Subject: [PATCH 4/6] chore: matej review
---
dart/prompt_chatgpt/lib/utils.dart | 8 +--
dart/prompt_chatgpt/static/index.html | 91 +++++++++++++++++++++++++++
php/prompt-chatgpt/src/index.php | 11 ++--
php/prompt-chatgpt/static/index.html | 91 +++++++++++++++++++++++++++
4 files changed, 191 insertions(+), 10 deletions(-)
create mode 100644 dart/prompt_chatgpt/static/index.html
create mode 100644 php/prompt-chatgpt/static/index.html
diff --git a/dart/prompt_chatgpt/lib/utils.dart b/dart/prompt_chatgpt/lib/utils.dart
index 881cea78..aeada1f0 100644
--- a/dart/prompt_chatgpt/lib/utils.dart
+++ b/dart/prompt_chatgpt/lib/utils.dart
@@ -1,5 +1,8 @@
import 'dart:io';
+final String _dirname = Platform.script.toFilePath();
+final String staticFolder = '${Uri.file(_dirname).resolve('../static')}';
+
/// Throws an error if any of the keys are missing from the object
/// @param obj - The object to check
/// @param keys - The list of keys to check for
@@ -16,12 +19,9 @@ void throwIfMissing(Map obj, List keys) {
}
}
-final String _dirname = Platform.script.toFilePath();
-final String staticFolder = '${Uri.file(_dirname).resolve('../static')}';
-
/// Returns the contents of a file in the static folder
/// @param fileName - The name of the file to read
/// @returns Contents of static/{fileName}
String getStaticFile(String fileName) {
- return File('${staticFolder}/$fileName').readAsStringSync();
+ return File('$staticFolder/$fileName').readAsStringSync();
}
diff --git a/dart/prompt_chatgpt/static/index.html b/dart/prompt_chatgpt/static/index.html
new file mode 100644
index 00000000..5e5e5d2e
--- /dev/null
+++ b/dart/prompt_chatgpt/static/index.html
@@ -0,0 +1,91 @@
+
+
+
+
+
+
+ Prompt ChatGPT Demo
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Prompt ChatGPT Demo
+
+
+
+ Use this page to test your implementation with OpenAI ChatGPT. Enter
+ text and receive the model output as a response.
+