mirror of
https://github.com/tymur999/braintok.git
synced 2025-08-05 16:20:37 +00:00
Switched from file-tuning to simply sending file
This commit is contained in:
5
.gitignore
vendored
5
.gitignore
vendored
@@ -34,3 +34,8 @@ yarn-error.log*
|
|||||||
# typescript
|
# typescript
|
||||||
*.tsbuildinfo
|
*.tsbuildinfo
|
||||||
next-env.d.ts
|
next-env.d.ts
|
||||||
|
config.py
|
||||||
|
__pycache__/
|
||||||
|
.idea/braintok-2.iml
|
||||||
|
*.pdf
|
||||||
|
.idea/
|
34
main.py
34
main.py
@@ -1,22 +1,34 @@
|
|||||||
|
|
||||||
from config import API_KEY
|
from config import API_KEY
|
||||||
from openai import OpenAI
|
from openai import OpenAI
|
||||||
from fileFormatConverter import convert_file
|
from fileFormatConverter import convert_file
|
||||||
client = OpenAI(api_key=API_KEY)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
inputFileName = input("enter input file name with extension (supported types: pdf, docx): ")
|
#inputFileName = input("enter input file name with extension (supported types: pdf, docx): ")
|
||||||
outputFileName = input("enter output file name without extension:")
|
# outputFileName = input("enter output file name without extension:")
|
||||||
convert_file(inputFileName, outputFileName)
|
|
||||||
|
|
||||||
with open(f'{outputFileName}.jsonl', 'rb') as file:
|
inputFileName = "HW07.pdf"
|
||||||
file_response = client.files.create(
|
outputFileName = "demo"
|
||||||
file=file,
|
convert_file(inputFileName, outputFileName)
|
||||||
purpose="fine-tune"
|
client = OpenAI(api_key=API_KEY)
|
||||||
)
|
|
||||||
|
with open(f'{outputFileName}.jsonl', 'r') as file:
|
||||||
|
# file_response = client.files.create(
|
||||||
|
# file=file,
|
||||||
|
# purpose="fine-tune"
|
||||||
|
# )
|
||||||
|
# fine_tuned_model = client.fine_tuning.jobs.create(
|
||||||
|
# training_file=file_response.id,
|
||||||
|
# model="gpt-4o-mini-2024-07-18"
|
||||||
|
# )
|
||||||
|
homework = file.read()
|
||||||
stream = client.chat.completions.create(
|
stream = client.chat.completions.create(
|
||||||
model="gpt-4o",
|
model="gpt-4o-mini-2024-07-18",
|
||||||
messages=[{"role": "user", "content": "Give me an estimation how long this homework will take me"}],
|
messages=[{"role": "user", "content": f"""
|
||||||
|
This is my homework: {homework}
|
||||||
|
If this homework consists of several separate problems, split them.
|
||||||
|
For each problem give me problem number, problem name, and problem text"""}],
|
||||||
stream=True,
|
stream=True,
|
||||||
)
|
)
|
||||||
for chunk in stream:
|
for chunk in stream:
|
||||||
|
Reference in New Issue
Block a user