Skip to content

Commit 422e732

Browse files
committed
ai commands
1 parent 233bdad commit 422e732

File tree

3 files changed

+50
-28
lines changed

3 files changed

+50
-28
lines changed

bigfun/bigfunctions.py

+21
Original file line numberDiff line numberDiff line change
@@ -200,6 +200,15 @@ def doc(self):
200200
self.config['use_case'] = open(self.use_case_filename, encoding='utf-8').read()
201201
return BIGFUNCTION_DOC_TEMPLATE.render(**self.config)
202202

203+
def generate_description_improvement(self):
204+
prompt = '\n\n'.join([
205+
'Improve the description of this bigquery function. Return only the description.',
206+
'FUNCTION YAML DEFINITION:',
207+
open(self.filename, encoding='utf-8').read(),
208+
])
209+
description = generate_content(prompt)
210+
print(description)
211+
203212
def generate_use_case(self, overwrite_if_exists=False):
204213
if os.path.isfile(self.use_case_filename):
205214
if not overwrite_if_exists:
@@ -320,6 +329,18 @@ def parse_readme(self):
320329

321330
return title, readme, frontmatter
322331

332+
def generate_readme(self):
333+
prompt = '''
334+
BigFunctions is a catalog of BigQuery functions. Functions are categorized in folders. Generate the description of the folder containing the following functions:
335+
336+
337+
'''.replace('\n ', '')
338+
prompt += '\n\n--------------------\n\n '.join(['FUNCTION `' + b.name + '``: ' + b.config['description'] for b in self.bigfunctions])
339+
print(prompt)
340+
readme = generate_content(prompt)
341+
with open(f'{self.path}/README.md', 'w', encoding='utf-8') as file:
342+
file.write(readme)
343+
323344
@property
324345
def doc(self):
325346
return DOC_FOLDER_TEMPLATE.render(**self.dict)

bigfun/cli.py

+24
Original file line numberDiff line numberDiff line change
@@ -178,6 +178,30 @@ def generate_use_case(bigfunction):
178178
bigfunction.generate_use_case()
179179

180180

181+
@docs.command()
182+
@click.argument('bigfunction')
183+
def generate_description_improvement(bigfunction):
184+
"""
185+
Generate markdown files for documentation from yaml bigfunctions files
186+
"""
187+
bigfunctions = [bigfunction.strip() for bigfunction in bigfunction.split(',')]
188+
if bigfunction == 'ALL':
189+
bigfunctions = bf.BIGFUNCTIONS
190+
for name in bigfunctions:
191+
bigfunction = bf.BigFunction(name)
192+
bigfunction.generate_description_improvement()
193+
194+
195+
@docs.command()
196+
@click.argument('folder_path')
197+
def generate_folder_readme(folder_path):
198+
"""
199+
Generate README.md for bigfunctions folder
200+
"""
201+
folder = bf.Folder(folder_path)
202+
folder.generate_readme()
203+
204+
181205
@docs.command()
182206
@click.option('--config', default='config.yaml', help='Path to the config file')
183207
@click.option('--port', default='8000', help='Port to serve docs on')

bigfun/utils.py

+5-28
Original file line numberDiff line numberDiff line change
@@ -377,32 +377,9 @@ def download(url, destination_filename):
377377

378378

379379
def generate_content(prompt):
380-
import vertexai
381-
from vertexai.generative_models import GenerativeModel, SafetySetting
382-
383-
safety_settings = [
384-
SafetySetting(
385-
category=SafetySetting.HarmCategory.HARM_CATEGORY_HATE_SPEECH,
386-
threshold=SafetySetting.HarmBlockThreshold.OFF
387-
),
388-
SafetySetting(
389-
category=SafetySetting.HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT,
390-
threshold=SafetySetting.HarmBlockThreshold.OFF
391-
),
392-
SafetySetting(
393-
category=SafetySetting.HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT,
394-
threshold=SafetySetting.HarmBlockThreshold.OFF
395-
),
396-
SafetySetting(
397-
category=SafetySetting.HarmCategory.HARM_CATEGORY_HARASSMENT,
398-
threshold=SafetySetting.HarmBlockThreshold.OFF
399-
),
400-
]
401-
402-
vertexai.init()
403-
model = GenerativeModel("gemini-1.5-pro-002")
404-
response = model.generate_content(
405-
prompt,
406-
safety_settings=safety_settings,
407-
)
380+
import google.auth
381+
import google.genai
382+
_, project = google.auth.default()
383+
genai = google.genai.Client(vertexai=True, project=project, location='europe-west1')
384+
response = genai.models.generate_content(model='gemini-2.0-flash-001', contents=prompt)
408385
return response.text

0 commit comments

Comments
 (0)